entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "note\nclient = new Evernote.Client({ consumerKey: 'grobgl', consumerSecret: '259b25f020752d99', sandbox: tr",
"end": 248,
"score": 0.9398221373558044,
"start": 242,
"tag": "KEY",
"value": "grobgl"
},
{
"context": ".Client({ consumerKey: 'grobgl', consumerSecret: '259b25... | lib/evernote-client.coffee | dannysmith/n1-evernote | 7 | {Utils, React, FocusedContentStore} = require 'nylas-exports'
LocalStorage = require 'localStorage'
BrowserWindow = require('electron').remote.BrowserWindow
Evernote = require('evernote').Evernote
client = new Evernote.Client({ consumerKey: 'grobgl', consumerSecret: '259b25f020752d99', sandbox: true })
callbackUrl = 'http://evernote.callback'
class EvernoteClient
instance = null
get: () ->
return new Promise (resolve, reject) ->
if instance
resolve instance
else
instance = new _EvernoteClient
instance.init (err) ->
if err
reject err
else
resolve instance
class _EvernoteClient
init: ->
oauthAccessToken = localStorage.getItem('evernote_token')
if oauthAccessToken and oauthAccessToken != ''
@_client = new Evernote.Client({token: oauthAccessToken});
else
@_loginToEvernote (err, oauthAccessToken) =>
if err
@_client = null
else
@_client = new Evernote.Client({token: oauthAccessToken})
makeNote: (noteTitle, noteContent, parentNotebook, callback) =>
if !@_client
callback new Error('Client not defined')
ourNote = new Evernote.Note
ourNote.title = noteTitle
ourNote.content = noteContent
# parentNotebook is optional; if omitted, default notebook is used
if parentNotebook and parentNotebook.guid
ourNote.notebookGuid = parentNotebook.guid
noteStore = @_client.getNoteStore()
noteStore.createNote ourNote, callback
_loginToEvernote: (callback) ->
# callback with oauthAccessToken
client.getRequestToken callbackUrl, (error, oauthToken, oauthTokenSecret, results) =>
authorizeUrl = client.getAuthorizeUrl oauthToken
authWindow = new BrowserWindow
width: 800,
height: 600,
show: false,
resizable: false,
'node-integration': false,
'always-on-top': true,
'skip-taskbar': true,
frame: false,
'standard-window': false
authWindow.loadUrl authorizeUrl
authWindow.show()
# authWindow.webContents.on 'will-navigate', (event, url) =>
# @_handleCallback url, oauthToken, oauthTokenSecret, authWindow, callback
authWindow.webContents.on 'did-get-redirect-request', (event, oldUrl, newUrl) =>
@_handleCallback newUrl, oauthToken, oauthTokenSecret, authWindow, callback
authWindow.on 'close', () =>
alert 'Could not log in (window closed)'
callback(new Error('Window closed'))
_handleCallback: (url, oauthToken, oauthTokenSecret, authWindow, callback) ->
console.log '_handleCallback: ' + url
# Only proceed if callback url is called by Evernote authenticator
if url.substring(0, callbackUrl.length) == callbackUrl
authWindow.destroy()
# Read token from callback URL
rawOauthVerifier = /oauth_verifier=([^&]*)/.exec(url) or null
oauthVerifier = if rawOauthVerifier and rawOauthVerifier.length > 1 then rawOauthVerifier[1] else null
if oauthVerifier
client.getAccessToken oauthToken, oauthTokenSecret, oauthVerifier, (error, oauthAccessToken, oauthAccessTokenSecret, results) ->
localStorage.setItem 'evernote_token', oauthAccessToken
callback null, oauthAccessToken
else
callback(new Error('Could not get access token'))
# # Returns Evernote client instance. Logs in user if user is not logged in yet.
# get: (callback) ->
# if @_client
# return callback(null, @_client)
#
# oauthAccessToken = localStorage.getItem('evernote_token')
# if oauthAccessToken and oauthAccessToken != ''
# @_client = new Evernote.Client({token: oauthAccessToken});
# callback(null, @_client)
# else
# @_loginToEvernote (err, oauthAccessToken) =>
# if err
# @_client = null
# callback(err)
# else
# @_client = new Evernote.Client({token: oauthAccessToken})
# callback(null, @_client)
#
# makeNote: (noteTitle, noteBody, parentNotebook, callback) ->
# if !@_client
# callback new Error('Client not defined')
#
# nBody = '<?xml version=\"1.0\" encoding=\"UTF-8\"?>' +
# '<!DOCTYPE en-note SYSTEM "http://xml.evernote.com/pub/enml2.dtd">' +
# noteBody +
# '<en-note></en-note>'
# ourNote = new Evernote.Note
# ourNote.title = noteTitle
# ourNote.content = nBody
#
# # parentNotebook is optional; if omitted, default notebook is used
# if parentNotebook and parentNotebook.guid
# ourNote.notebookGuid = parentNotebook.guid
#
# noteStore = client.getNoteStore()
# noteStore.createNote ourNote, callback
#
# _loginToEvernote: (callback) ->
# # callback with oauthAccessToken
# client.getRequestToken callbackUrl, (error, oauthToken, oauthTokenSecret, results) =>
# authorizeUrl = client.getAuthorizeUrl oauthToken
# authWindow = new BrowserWindow
# width: 800,
# height: 600,
# show: false,
# resizable: false,
# 'node-integration': false,
# 'always-on-top': true,
# 'skip-taskbar': true,
# frame: false,
# 'standard-window': false
# authWindow.loadUrl authorizeUrl
# authWindow.show()
# # authWindow.webContents.on 'will-navigate', (event, url) =>
# # @_handleCallback url, oauthToken, oauthTokenSecret, authWindow, callback
#
# authWindow.webContents.on 'did-get-redirect-request', (event, oldUrl, newUrl) =>
# @_handleCallback newUrl, oauthToken, oauthTokenSecret, authWindow, callback
#
# authWindow.on 'close', () =>
# alert 'Could not log in (window closed)'
# callback(new Error('Window closed'))
#
#
# _handleCallback: (url, oauthToken, oauthTokenSecret, authWindow, callback) ->
# console.log '_handleCallback: ' + url
# # Only proceed if callback url is called by Evernote authenticator
# if url.substring(0, callbackUrl.length) == callbackUrl
# authWindow.destroy()
#
# # Read token from callback URL
# rawOauthVerifier = /oauth_verifier=([^&]*)/.exec(url) or null
# oauthVerifier = if rawOauthVerifier and rawOauthVerifier.length > 1 then rawOauthVerifier[1] else null
#
# if oauthVerifier
# client.getAccessToken oauthToken, oauthTokenSecret, oauthVerifier, (error, oauthAccessToken, oauthAccessTokenSecret, results) ->
# localStorage.setItem 'evernote_token', oauthAccessToken
# callback null, oauthAccessToken
# else
# callback(new Error('Could not get access token'))
module.exports = new EvernoteClient()
| 179270 | {Utils, React, FocusedContentStore} = require 'nylas-exports'
LocalStorage = require 'localStorage'
BrowserWindow = require('electron').remote.BrowserWindow
Evernote = require('evernote').Evernote
client = new Evernote.Client({ consumerKey: '<KEY>', consumerSecret: '<KEY>', sandbox: true })
callbackUrl = 'http://evernote.callback'
class EvernoteClient
instance = null
get: () ->
return new Promise (resolve, reject) ->
if instance
resolve instance
else
instance = new _EvernoteClient
instance.init (err) ->
if err
reject err
else
resolve instance
class _EvernoteClient
init: ->
oauthAccessToken = localStorage.getItem('evernote_token')
if oauthAccessToken and oauthAccessToken != ''
@_client = new Evernote.Client({token: oauthAccessToken});
else
@_loginToEvernote (err, oauthAccessToken) =>
if err
@_client = null
else
@_client = new Evernote.Client({token: oauthAccessToken})
makeNote: (noteTitle, noteContent, parentNotebook, callback) =>
if !@_client
callback new Error('Client not defined')
ourNote = new Evernote.Note
ourNote.title = noteTitle
ourNote.content = noteContent
# parentNotebook is optional; if omitted, default notebook is used
if parentNotebook and parentNotebook.guid
ourNote.notebookGuid = parentNotebook.guid
noteStore = @_client.getNoteStore()
noteStore.createNote ourNote, callback
_loginToEvernote: (callback) ->
# callback with oauthAccessToken
client.getRequestToken callbackUrl, (error, oauthToken, oauthTokenSecret, results) =>
authorizeUrl = client.getAuthorizeUrl oauthToken
authWindow = new BrowserWindow
width: 800,
height: 600,
show: false,
resizable: false,
'node-integration': false,
'always-on-top': true,
'skip-taskbar': true,
frame: false,
'standard-window': false
authWindow.loadUrl authorizeUrl
authWindow.show()
# authWindow.webContents.on 'will-navigate', (event, url) =>
# @_handleCallback url, oauthToken, oauthTokenSecret, authWindow, callback
authWindow.webContents.on 'did-get-redirect-request', (event, oldUrl, newUrl) =>
@_handleCallback newUrl, oauthToken, oauthTokenSecret, authWindow, callback
authWindow.on 'close', () =>
alert 'Could not log in (window closed)'
callback(new Error('Window closed'))
_handleCallback: (url, oauthToken, oauthTokenSecret, authWindow, callback) ->
console.log '_handleCallback: ' + url
# Only proceed if callback url is called by Evernote authenticator
if url.substring(0, callbackUrl.length) == callbackUrl
authWindow.destroy()
# Read token from callback URL
rawOauthVerifier = /oauth_verifier=([^&]*)/.exec(url) or null
oauthVerifier = if rawOauthVerifier and rawOauthVerifier.length > 1 then rawOauthVerifier[1] else null
if oauthVerifier
client.getAccessToken oauthToken, oauthTokenSecret, oauthVerifier, (error, oauthAccessToken, oauthAccessTokenSecret, results) ->
localStorage.setItem 'evernote_token', oauthAccessToken
callback null, oauthAccessToken
else
callback(new Error('Could not get access token'))
# # Returns Evernote client instance. Logs in user if user is not logged in yet.
# get: (callback) ->
# if @_client
# return callback(null, @_client)
#
# oauthAccessToken = localStorage.getItem('evernote_token')
# if oauthAccessToken and oauthAccessToken != ''
# @_client = new Evernote.Client({token: oauthAccessToken});
# callback(null, @_client)
# else
# @_loginToEvernote (err, oauthAccessToken) =>
# if err
# @_client = null
# callback(err)
# else
# @_client = new Evernote.Client({token: oauthAccessToken})
# callback(null, @_client)
#
# makeNote: (noteTitle, noteBody, parentNotebook, callback) ->
# if !@_client
# callback new Error('Client not defined')
#
# nBody = '<?xml version=\"1.0\" encoding=\"UTF-8\"?>' +
# '<!DOCTYPE en-note SYSTEM "http://xml.evernote.com/pub/enml2.dtd">' +
# noteBody +
# '<en-note></en-note>'
# ourNote = new Evernote.Note
# ourNote.title = noteTitle
# ourNote.content = nBody
#
# # parentNotebook is optional; if omitted, default notebook is used
# if parentNotebook and parentNotebook.guid
# ourNote.notebookGuid = parentNotebook.guid
#
# noteStore = client.getNoteStore()
# noteStore.createNote ourNote, callback
#
# _loginToEvernote: (callback) ->
# # callback with oauthAccessToken
# client.getRequestToken callbackUrl, (error, oauthToken, oauthTokenSecret, results) =>
# authorizeUrl = client.getAuthorizeUrl oauthToken
# authWindow = new BrowserWindow
# width: 800,
# height: 600,
# show: false,
# resizable: false,
# 'node-integration': false,
# 'always-on-top': true,
# 'skip-taskbar': true,
# frame: false,
# 'standard-window': false
# authWindow.loadUrl authorizeUrl
# authWindow.show()
# # authWindow.webContents.on 'will-navigate', (event, url) =>
# # @_handleCallback url, oauthToken, oauthTokenSecret, authWindow, callback
#
# authWindow.webContents.on 'did-get-redirect-request', (event, oldUrl, newUrl) =>
# @_handleCallback newUrl, oauthToken, oauthTokenSecret, authWindow, callback
#
# authWindow.on 'close', () =>
# alert 'Could not log in (window closed)'
# callback(new Error('Window closed'))
#
#
# _handleCallback: (url, oauthToken, oauthTokenSecret, authWindow, callback) ->
# console.log '_handleCallback: ' + url
# # Only proceed if callback url is called by Evernote authenticator
# if url.substring(0, callbackUrl.length) == callbackUrl
# authWindow.destroy()
#
# # Read token from callback URL
# rawOauthVerifier = /oauth_verifier=([^&]*)/.exec(url) or null
# oauthVerifier = if rawOauthVerifier and rawOauthVerifier.length > 1 then rawOauthVerifier[1] else null
#
# if oauthVerifier
# client.getAccessToken oauthToken, oauthTokenSecret, oauthVerifier, (error, oauthAccessToken, oauthAccessTokenSecret, results) ->
# localStorage.setItem 'evernote_token', oauthAccessToken
# callback null, oauthAccessToken
# else
# callback(new Error('Could not get access token'))
module.exports = new EvernoteClient()
| true | {Utils, React, FocusedContentStore} = require 'nylas-exports'
LocalStorage = require 'localStorage'
BrowserWindow = require('electron').remote.BrowserWindow
Evernote = require('evernote').Evernote
client = new Evernote.Client({ consumerKey: 'PI:KEY:<KEY>END_PI', consumerSecret: 'PI:KEY:<KEY>END_PI', sandbox: true })
callbackUrl = 'http://evernote.callback'
class EvernoteClient
instance = null
get: () ->
return new Promise (resolve, reject) ->
if instance
resolve instance
else
instance = new _EvernoteClient
instance.init (err) ->
if err
reject err
else
resolve instance
class _EvernoteClient
init: ->
oauthAccessToken = localStorage.getItem('evernote_token')
if oauthAccessToken and oauthAccessToken != ''
@_client = new Evernote.Client({token: oauthAccessToken});
else
@_loginToEvernote (err, oauthAccessToken) =>
if err
@_client = null
else
@_client = new Evernote.Client({token: oauthAccessToken})
makeNote: (noteTitle, noteContent, parentNotebook, callback) =>
if !@_client
callback new Error('Client not defined')
ourNote = new Evernote.Note
ourNote.title = noteTitle
ourNote.content = noteContent
# parentNotebook is optional; if omitted, default notebook is used
if parentNotebook and parentNotebook.guid
ourNote.notebookGuid = parentNotebook.guid
noteStore = @_client.getNoteStore()
noteStore.createNote ourNote, callback
_loginToEvernote: (callback) ->
# callback with oauthAccessToken
client.getRequestToken callbackUrl, (error, oauthToken, oauthTokenSecret, results) =>
authorizeUrl = client.getAuthorizeUrl oauthToken
authWindow = new BrowserWindow
width: 800,
height: 600,
show: false,
resizable: false,
'node-integration': false,
'always-on-top': true,
'skip-taskbar': true,
frame: false,
'standard-window': false
authWindow.loadUrl authorizeUrl
authWindow.show()
# authWindow.webContents.on 'will-navigate', (event, url) =>
# @_handleCallback url, oauthToken, oauthTokenSecret, authWindow, callback
authWindow.webContents.on 'did-get-redirect-request', (event, oldUrl, newUrl) =>
@_handleCallback newUrl, oauthToken, oauthTokenSecret, authWindow, callback
authWindow.on 'close', () =>
alert 'Could not log in (window closed)'
callback(new Error('Window closed'))
_handleCallback: (url, oauthToken, oauthTokenSecret, authWindow, callback) ->
console.log '_handleCallback: ' + url
# Only proceed if callback url is called by Evernote authenticator
if url.substring(0, callbackUrl.length) == callbackUrl
authWindow.destroy()
# Read token from callback URL
rawOauthVerifier = /oauth_verifier=([^&]*)/.exec(url) or null
oauthVerifier = if rawOauthVerifier and rawOauthVerifier.length > 1 then rawOauthVerifier[1] else null
if oauthVerifier
client.getAccessToken oauthToken, oauthTokenSecret, oauthVerifier, (error, oauthAccessToken, oauthAccessTokenSecret, results) ->
localStorage.setItem 'evernote_token', oauthAccessToken
callback null, oauthAccessToken
else
callback(new Error('Could not get access token'))
# # Returns Evernote client instance. Logs in user if user is not logged in yet.
# get: (callback) ->
# if @_client
# return callback(null, @_client)
#
# oauthAccessToken = localStorage.getItem('evernote_token')
# if oauthAccessToken and oauthAccessToken != ''
# @_client = new Evernote.Client({token: oauthAccessToken});
# callback(null, @_client)
# else
# @_loginToEvernote (err, oauthAccessToken) =>
# if err
# @_client = null
# callback(err)
# else
# @_client = new Evernote.Client({token: oauthAccessToken})
# callback(null, @_client)
#
# makeNote: (noteTitle, noteBody, parentNotebook, callback) ->
# if !@_client
# callback new Error('Client not defined')
#
# nBody = '<?xml version=\"1.0\" encoding=\"UTF-8\"?>' +
# '<!DOCTYPE en-note SYSTEM "http://xml.evernote.com/pub/enml2.dtd">' +
# noteBody +
# '<en-note></en-note>'
# ourNote = new Evernote.Note
# ourNote.title = noteTitle
# ourNote.content = nBody
#
# # parentNotebook is optional; if omitted, default notebook is used
# if parentNotebook and parentNotebook.guid
# ourNote.notebookGuid = parentNotebook.guid
#
# noteStore = client.getNoteStore()
# noteStore.createNote ourNote, callback
#
# _loginToEvernote: (callback) ->
# # callback with oauthAccessToken
# client.getRequestToken callbackUrl, (error, oauthToken, oauthTokenSecret, results) =>
# authorizeUrl = client.getAuthorizeUrl oauthToken
# authWindow = new BrowserWindow
# width: 800,
# height: 600,
# show: false,
# resizable: false,
# 'node-integration': false,
# 'always-on-top': true,
# 'skip-taskbar': true,
# frame: false,
# 'standard-window': false
# authWindow.loadUrl authorizeUrl
# authWindow.show()
# # authWindow.webContents.on 'will-navigate', (event, url) =>
# # @_handleCallback url, oauthToken, oauthTokenSecret, authWindow, callback
#
# authWindow.webContents.on 'did-get-redirect-request', (event, oldUrl, newUrl) =>
# @_handleCallback newUrl, oauthToken, oauthTokenSecret, authWindow, callback
#
# authWindow.on 'close', () =>
# alert 'Could not log in (window closed)'
# callback(new Error('Window closed'))
#
#
# _handleCallback: (url, oauthToken, oauthTokenSecret, authWindow, callback) ->
# console.log '_handleCallback: ' + url
# # Only proceed if callback url is called by Evernote authenticator
# if url.substring(0, callbackUrl.length) == callbackUrl
# authWindow.destroy()
#
# # Read token from callback URL
# rawOauthVerifier = /oauth_verifier=([^&]*)/.exec(url) or null
# oauthVerifier = if rawOauthVerifier and rawOauthVerifier.length > 1 then rawOauthVerifier[1] else null
#
# if oauthVerifier
# client.getAccessToken oauthToken, oauthTokenSecret, oauthVerifier, (error, oauthAccessToken, oauthAccessTokenSecret, results) ->
# localStorage.setItem 'evernote_token', oauthAccessToken
# callback null, oauthAccessToken
# else
# callback(new Error('Could not get access token'))
module.exports = new EvernoteClient()
|
[
{
"context": "ice and styled\n\n @license the unlicense\n @author jens alexander ewald, 2011-2014, lea.io\n @version 0.2.0\n @dependson ",
"end": 163,
"score": 0.9998171925544739,
"start": 143,
"tag": "NAME",
"value": "jens alexander ewald"
}
] | src/filer.coffee | jens-a-e/textinput.js | 0 | ###!
Filer.js – helps you to retrieve data from files, you select via a dialog
... but nice and styled
@license the unlicense
@author jens alexander ewald, 2011-2014, lea.io
@version 0.2.0
@dependson FancyFileReader.js
!###
$ ->
$.fn.filer = (callback,filetypes,@encoding,@binary) ->
@addClass "filer"
# does it work?
unless FancyFileReader? and FancyFileReader.supported?
@addClass "unsupported"
console.error "FILE OBJECTS NOT SUPPORTED"
return @
# init:
@bind "selectstart", (event) -> event.preventDefault?()
accept = filetypes ? @data('accept').split(',')
keeper = $(@).parent()
keeper.remove('.filerhelper')
input = $('<input type="file" accept="'+accept+'" style="position:absolute; height:0!important; width:0!important; z-index:-9999!important;" class="filerhelper">')
input = $(input).appendTo(keeper)
@.click (event) =>
event.stopPropagation?()
event.preventDefault?()
$(@).removeClass('success loaded')
input.click()
return false
# ================================
# = Instantiate a FancyFileReader =
# ================================
@reader = new FancyFileReader()
# ===========================================
# = The internal callback for styling state =
# ===========================================
internal_cb =
success: (event) =>
$(@).addClass("success")
always: (event) =>
# remove all classes with on** before we enter a fresh read
$(@)
.removeClass (index, klass) ->
result = []
f = null
r = /on\w+/gi
# search globally
while f = r.exec(klass)
result.push f[0]
result.map((el) -> $.trim(el)).join(" ")
.addClass("on#{event.type}")
#### END INTERNAL CALLBACKS
# first bind some styling callback
@reader.bind internal_cb
# then bind the given callbacks
@reader.bind callback
# ===================================
# = This is where the magic happens =
# ===================================
input.bind "change", (event) =>
event.stopPropagation?()
event.preventDefault?()
[file,] = event.target.files
return false unless file
# and read it
@reader.setDebug $(@).data("debug")
# data-encoding attribute:
encoding = $(@).data('encoding') ? @encoding
binary = $(@).data('binary') ? !!@binary
if $(@).data('accept')
@reader.setAllowedFileTypes $(@).data('accept')
# call start callbacks
callback.start?()
# and read it defered
setTimeout (=>@reader.read(file,{encoding,binary})) , 1
# Fixed bug in chrome, would not read the same file again
input.val("").change();
# return nothing
return false
| 222374 | ###!
Filer.js – helps you to retrieve data from files, you select via a dialog
... but nice and styled
@license the unlicense
@author <NAME>, 2011-2014, lea.io
@version 0.2.0
@dependson FancyFileReader.js
!###
$ ->
$.fn.filer = (callback,filetypes,@encoding,@binary) ->
@addClass "filer"
# does it work?
unless FancyFileReader? and FancyFileReader.supported?
@addClass "unsupported"
console.error "FILE OBJECTS NOT SUPPORTED"
return @
# init:
@bind "selectstart", (event) -> event.preventDefault?()
accept = filetypes ? @data('accept').split(',')
keeper = $(@).parent()
keeper.remove('.filerhelper')
input = $('<input type="file" accept="'+accept+'" style="position:absolute; height:0!important; width:0!important; z-index:-9999!important;" class="filerhelper">')
input = $(input).appendTo(keeper)
@.click (event) =>
event.stopPropagation?()
event.preventDefault?()
$(@).removeClass('success loaded')
input.click()
return false
# ================================
# = Instantiate a FancyFileReader =
# ================================
@reader = new FancyFileReader()
# ===========================================
# = The internal callback for styling state =
# ===========================================
internal_cb =
success: (event) =>
$(@).addClass("success")
always: (event) =>
# remove all classes with on** before we enter a fresh read
$(@)
.removeClass (index, klass) ->
result = []
f = null
r = /on\w+/gi
# search globally
while f = r.exec(klass)
result.push f[0]
result.map((el) -> $.trim(el)).join(" ")
.addClass("on#{event.type}")
#### END INTERNAL CALLBACKS
# first bind some styling callback
@reader.bind internal_cb
# then bind the given callbacks
@reader.bind callback
# ===================================
# = This is where the magic happens =
# ===================================
input.bind "change", (event) =>
event.stopPropagation?()
event.preventDefault?()
[file,] = event.target.files
return false unless file
# and read it
@reader.setDebug $(@).data("debug")
# data-encoding attribute:
encoding = $(@).data('encoding') ? @encoding
binary = $(@).data('binary') ? !!@binary
if $(@).data('accept')
@reader.setAllowedFileTypes $(@).data('accept')
# call start callbacks
callback.start?()
# and read it defered
setTimeout (=>@reader.read(file,{encoding,binary})) , 1
# Fixed bug in chrome, would not read the same file again
input.val("").change();
# return nothing
return false
| true | ###!
Filer.js – helps you to retrieve data from files, you select via a dialog
... but nice and styled
@license the unlicense
@author PI:NAME:<NAME>END_PI, 2011-2014, lea.io
@version 0.2.0
@dependson FancyFileReader.js
!###
$ ->
$.fn.filer = (callback,filetypes,@encoding,@binary) ->
@addClass "filer"
# does it work?
unless FancyFileReader? and FancyFileReader.supported?
@addClass "unsupported"
console.error "FILE OBJECTS NOT SUPPORTED"
return @
# init:
@bind "selectstart", (event) -> event.preventDefault?()
accept = filetypes ? @data('accept').split(',')
keeper = $(@).parent()
keeper.remove('.filerhelper')
input = $('<input type="file" accept="'+accept+'" style="position:absolute; height:0!important; width:0!important; z-index:-9999!important;" class="filerhelper">')
input = $(input).appendTo(keeper)
@.click (event) =>
event.stopPropagation?()
event.preventDefault?()
$(@).removeClass('success loaded')
input.click()
return false
# ================================
# = Instantiate a FancyFileReader =
# ================================
@reader = new FancyFileReader()
# ===========================================
# = The internal callback for styling state =
# ===========================================
internal_cb =
success: (event) =>
$(@).addClass("success")
always: (event) =>
# remove all classes with on** before we enter a fresh read
$(@)
.removeClass (index, klass) ->
result = []
f = null
r = /on\w+/gi
# search globally
while f = r.exec(klass)
result.push f[0]
result.map((el) -> $.trim(el)).join(" ")
.addClass("on#{event.type}")
#### END INTERNAL CALLBACKS
# first bind some styling callback
@reader.bind internal_cb
# then bind the given callbacks
@reader.bind callback
# ===================================
# = This is where the magic happens =
# ===================================
input.bind "change", (event) =>
event.stopPropagation?()
event.preventDefault?()
[file,] = event.target.files
return false unless file
# and read it
@reader.setDebug $(@).data("debug")
# data-encoding attribute:
encoding = $(@).data('encoding') ? @encoding
binary = $(@).data('binary') ? !!@binary
if $(@).data('accept')
@reader.setAllowedFileTypes $(@).data('accept')
# call start callbacks
callback.start?()
# and read it defered
setTimeout (=>@reader.read(file,{encoding,binary})) , 1
# Fixed bug in chrome, would not read the same file again
input.val("").change();
# return nothing
return false
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999158978462219,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/twitch-player.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @TwitchPlayer
constructor: (@turbolinksReload) ->
@playerDivs = document.getElementsByClassName('js-twitch-player')
addEventListener 'turbolinks:load', @startAll
initializeEmbed: =>
@turbolinksReload.load 'https://player.twitch.tv/js/embed/v1.js', @startAll
startAll: =>
return if @playerDivs.length == 0
if !Twitch?
@initializeEmbed()
else
@start(div) for div in @playerDivs
start: (div) =>
return if div.dataset.twitchPlayerStarted
div.dataset.twitchPlayerStarted = true
options =
width: '100%'
height: '100%'
channel: div.dataset.channel
player = new Twitch.Player(div.id, options)
player.addEventListener Twitch.Player.PLAY, => @openPlayer(div)
noCookieDiv: (playerDivId) =>
document.querySelector(".js-twitch-player--no-cookie[data-player-id='#{playerDivId}']")
openPlayer: (div) =>
return unless div.classList.contains 'hidden'
div.classList.remove 'hidden'
Fade.out @noCookieDiv(div.id)
| 190742 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @TwitchPlayer
constructor: (@turbolinksReload) ->
@playerDivs = document.getElementsByClassName('js-twitch-player')
addEventListener 'turbolinks:load', @startAll
initializeEmbed: =>
@turbolinksReload.load 'https://player.twitch.tv/js/embed/v1.js', @startAll
startAll: =>
return if @playerDivs.length == 0
if !Twitch?
@initializeEmbed()
else
@start(div) for div in @playerDivs
start: (div) =>
return if div.dataset.twitchPlayerStarted
div.dataset.twitchPlayerStarted = true
options =
width: '100%'
height: '100%'
channel: div.dataset.channel
player = new Twitch.Player(div.id, options)
player.addEventListener Twitch.Player.PLAY, => @openPlayer(div)
noCookieDiv: (playerDivId) =>
document.querySelector(".js-twitch-player--no-cookie[data-player-id='#{playerDivId}']")
openPlayer: (div) =>
return unless div.classList.contains 'hidden'
div.classList.remove 'hidden'
Fade.out @noCookieDiv(div.id)
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @TwitchPlayer
constructor: (@turbolinksReload) ->
@playerDivs = document.getElementsByClassName('js-twitch-player')
addEventListener 'turbolinks:load', @startAll
initializeEmbed: =>
@turbolinksReload.load 'https://player.twitch.tv/js/embed/v1.js', @startAll
startAll: =>
return if @playerDivs.length == 0
if !Twitch?
@initializeEmbed()
else
@start(div) for div in @playerDivs
start: (div) =>
return if div.dataset.twitchPlayerStarted
div.dataset.twitchPlayerStarted = true
options =
width: '100%'
height: '100%'
channel: div.dataset.channel
player = new Twitch.Player(div.id, options)
player.addEventListener Twitch.Player.PLAY, => @openPlayer(div)
noCookieDiv: (playerDivId) =>
document.querySelector(".js-twitch-player--no-cookie[data-player-id='#{playerDivId}']")
openPlayer: (div) =>
return unless div.classList.contains 'hidden'
div.classList.remove 'hidden'
Fade.out @noCookieDiv(div.id)
|
[
{
"context": ".clear()\n .then ->\n dbfs.write '/My Documents/Alain Deschênes', 'Alain Deschênes'\n .then ->\n\n dbfs.",
"end": 69,
"score": 0.970417320728302,
"start": 54,
"tag": "NAME",
"value": "Alain Deschênes"
},
{
"context": "\n dbfs.write '/My Documents/Alai... | tests/dbfs.coffee | kornalius/termos | 0 | dbfs.clear()
.then ->
dbfs.write '/My Documents/Alain Deschênes', 'Alain Deschênes'
.then ->
dbfs.write '/My Documents/Mélissa Dubé', 'Mélissa Dubé'
.then ->
dbfs.read '/My Documents/Mélissa Dubé'
.then (doc) -> console.log doc
.catch (err) -> console.log err
dbfs.stats '/My Documents/Mélissa Dubé'
.then (stats) -> console.log stats
.catch (err) -> console.log err
dbfs.files '/My Documents'
.then (files) -> console.log files
.catch (err) -> console.log err
.catch (err) -> console.log err
.catch (err) -> console.log err
| 60677 | dbfs.clear()
.then ->
dbfs.write '/My Documents/<NAME>', '<NAME>'
.then ->
dbfs.write '/My Documents/<NAME>', '<NAME>'
.then ->
dbfs.read '/My Documents/<NAME>'
.then (doc) -> console.log doc
.catch (err) -> console.log err
dbfs.stats '/My Documents/<NAME>'
.then (stats) -> console.log stats
.catch (err) -> console.log err
dbfs.files '/My Documents'
.then (files) -> console.log files
.catch (err) -> console.log err
.catch (err) -> console.log err
.catch (err) -> console.log err
| true | dbfs.clear()
.then ->
dbfs.write '/My Documents/PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'
.then ->
dbfs.write '/My Documents/PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI'
.then ->
dbfs.read '/My Documents/PI:NAME:<NAME>END_PI'
.then (doc) -> console.log doc
.catch (err) -> console.log err
dbfs.stats '/My Documents/PI:NAME:<NAME>END_PI'
.then (stats) -> console.log stats
.catch (err) -> console.log err
dbfs.files '/My Documents'
.then (files) -> console.log files
.catch (err) -> console.log err
.catch (err) -> console.log err
.catch (err) -> console.log err
|
[
{
"context": ", ->\n type = eve.type\n user =\n login: \"test\"\n name: \"Test\"\n email: \"test@mail.com\"\n",
"end": 219,
"score": 0.9585682153701782,
"start": 215,
"tag": "USERNAME",
"value": "test"
},
{
"context": " login: \"test\"\n name: \"Test\"\n ... | test/examples.test.coffee | zzdhidden/EVE | 4 | {assert, ok, fail, equal, notEqual, deepEqual, notDeepEqual, strictEqual, notStrictEqual, eve} = require "./helper"
describe "examples", ->
describe "signup_user", ->
type = eve.type
user =
login: "test"
name: "Test"
email: "test@mail.com"
password: "test"
password_confirmation: "test"
birthday: "1990-1-1"
age: "20"
schema = type.object(
login: type.string().lowercase().trim().notEmpty().len(3, 12).match(/^[a-zA-Z0-9]*$/).validator((val, done) ->
setTimeout (->
done val isnt "admin"
), 100
, "must be unique")
name: type.string().trim().notEmpty()
email: type.string().trim().notEmpty().email()
password: type.string().trim().notEmpty().len(6, 12)
password_confirmation: type.string().trim().notEmpty().len(6, 12).validator((val) ->
val is @password
, "must be equal to password")
birthday: type.date()
age: type.integer()
)
schema.value(user).validate (errors) ->
#ok !errors | 88930 | {assert, ok, fail, equal, notEqual, deepEqual, notDeepEqual, strictEqual, notStrictEqual, eve} = require "./helper"
describe "examples", ->
describe "signup_user", ->
type = eve.type
user =
login: "test"
name: "Test"
email: "<EMAIL>"
password: "<PASSWORD>"
password_confirmation: "<PASSWORD>"
birthday: "1990-1-1"
age: "20"
schema = type.object(
login: type.string().lowercase().trim().notEmpty().len(3, 12).match(/^[a-zA-Z0-9]*$/).validator((val, done) ->
setTimeout (->
done val isnt "admin"
), 100
, "must be unique")
name: type.string().trim().notEmpty()
email: type.string().trim().notEmpty().email()
password: type.string().trim().notEmpty().len(6, 12)
password_confirmation: type.string().trim().notEmpty().len(6, 12).validator((val) ->
val is @password
, "must be equal to password")
birthday: type.date()
age: type.integer()
)
schema.value(user).validate (errors) ->
#ok !errors | true | {assert, ok, fail, equal, notEqual, deepEqual, notDeepEqual, strictEqual, notStrictEqual, eve} = require "./helper"
describe "examples", ->
describe "signup_user", ->
type = eve.type
user =
login: "test"
name: "Test"
email: "PI:EMAIL:<EMAIL>END_PI"
password: "PI:PASSWORD:<PASSWORD>END_PI"
password_confirmation: "PI:PASSWORD:<PASSWORD>END_PI"
birthday: "1990-1-1"
age: "20"
schema = type.object(
login: type.string().lowercase().trim().notEmpty().len(3, 12).match(/^[a-zA-Z0-9]*$/).validator((val, done) ->
setTimeout (->
done val isnt "admin"
), 100
, "must be unique")
name: type.string().trim().notEmpty()
email: type.string().trim().notEmpty().email()
password: type.string().trim().notEmpty().len(6, 12)
password_confirmation: type.string().trim().notEmpty().len(6, 12).validator((val) ->
val is @password
, "must be equal to password")
birthday: type.date()
age: type.integer()
)
schema.value(user).validate (errors) ->
#ok !errors |
[
{
"context": "# Copyright 2012 Jonas Finnemann Jensen <jopsen@gmail.com>\n#\n# Licensed under the Apache ",
"end": 39,
"score": 0.9998480081558228,
"start": 17,
"tag": "NAME",
"value": "Jonas Finnemann Jensen"
},
{
"context": "# Copyright 2012 Jonas Finnemann Jensen <jopsen@gmail.com... | src/BJSON.coffee | jonasfj/BJSON.coffee | 1 | # Copyright 2012 Jonas Finnemann Jensen <jopsen@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
@BJSON ?= {}
#### BJSON Parser
# String type parser
decodeString = (size, ctx) ->
#if TextDecoder?
# val = TextDecoder('utf-8').decode(new Uint8Array(ctx.buffer, ctx.offset, size))
# ctx.offset += size
# return val
offset = ctx.offset
end = ctx.offset + size
strs = []
# while there is text to read
while offset < end
buf = []
nextend = Math.min(end, offset + 0x7fff)
# while there's room for two entries in buf
while offset < nextend
b = ctx.bytes[offset++]
if not (b & 0x80)
# Add codepoint b
buf.push b
continue
i = 0
while (b << i) & 0x40
i++
c = b & (0xff >> i + 1)
while i > 0
i -= 1
if offset == end
i = -1
break
b = ctx.bytes[offset++]
if (b & 0xc0) != 0x80
i = -1
offset--
break
c = (c << 6) | (b & 0x3f)
if i < 0
c = 0xfffd # Replacement character
# Add codepoint c
if c <= 0xffff
buf.push c
else
c -= 0x10000
buf.push 0xd800 + ((c >> 10) & 0x3ff)
buf.push 0xdc00 + (c & 0x3ff)
# Decode codepoints and add string to list
strs.push String.fromCharCode(buf...)
ctx.offset += size
# Join and return decoded strings
if strs.length == 1
return strs[0]
return strs.join("")
# Array lookup is faster than Math.pow: http://jsperf.com/math-pow-vs-array-lookup
prim = [null, false, "", true]
# Read BJSON item
read = (ctx) ->
t = ctx.bytes[ctx.offset++]
tt = (t & 0x3c) >> 2 # High bits, indicating type
st = t & 0x3 # Low bits, indicating size of size field
# Types 0 and 3 are special cases, these take different arguments
if tt is 0
return prim[st]
if tt is 3
if st is 0
val = ctx.view.getFloat32(ctx.offset, true)
else
val = ctx.view.getFloat64(ctx.offset, true)
ctx.offset += 4 + 4 * st
return val
# If tt isn't 0 or 3, we must read a size field
if st < 2
if st is 0
size = ctx.bytes[ctx.offset++]
else
size = ctx.view.getUint16(ctx.offset, true)
ctx.offset += 2
else
if st is 2
size = ctx.view.getUint32(ctx.offset, true)
ctx.offset += 4
else
# This code path is untested, and will fail for numbers larger
# than 2^53, but let's hope documents large than 4GiB are unlikely.
# Technically, Javascript supports integers up to 2^53, however,
# bitwise operations on more than 32bit integers is not possible.
# This is why we use addition and multiplication to combine the
# upper and lower parts of the 64 bit integer.
# This transformation could have nasty side effects, who, knows...
# But browsers probably doesn't support ArrayBuffers larger than
# 4GiB anyway.
lower = ctx.view.getUint64(ctx.offset, true)
upper = ctx.view.getUint64(ctx.offset + 4, true)
size = lower + upper * 0x100000000
ctx.offset += 8
if tt < 3
return size * (3 - 2 * tt) # possible values are 1 and 2
else if tt is 4 # String
return decodeString(size, ctx)
else
if tt is 9 # Object
end = ctx.offset + size
obj = {}
while ctx.offset < end
key = read(ctx)
val = read(ctx)
obj[key] = val
return obj
else if tt is 8 # Array
end = ctx.offset + size
val = []
while ctx.offset < end
val.push read(ctx)
return val
else if tt is 5 # ArrayBuffer
val = ctx.buffer.slice(ctx.offset, size)
ctx.offset += size
return val
throw new Error("Type doesn't exists!!!")
# Parse a BJSON document
@BJSON.parse = (buf) ->
# Create a context and read using it
return read
buffer: buf
view: new DataView(buf)
bytes: new Uint8Array(buf)
offset: 0
#### BJSON Serialization
class SerializationContext
constructor: (size = 4096) ->
@buf = new ArrayBuffer(size)
@view = new DataView(@buf)
@bytes = new Uint8Array(@buf)
@offset = 0
resize: (size) ->
if @buf.byteLength - @offset < size
@buf = new ArrayBuffer((@offset + size) * 2)
bytes = new Uint8Array(@buf)
bytes.set(@bytes)
@bytes = bytes
@view = new DataView(@buf)
# Dictionary with serializers for different types, each taking a value and serializing to context
put = {}
# String serialization
# For DOM-string intepretation see: http://www.w3.org/TR/WebIDL/#idl-DOMString
# For UTF-8 encoding see: http://tools.ietf.org/html/rfc3629#section-3
put.string = (val, ctx) ->
if val.length is 0
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 0x2)
ctx.offset += 1
else
bound = val.length * 3
typeoffset = ctx.offset
typesize(0x10, 0, bound, ctx)
contentoffset = ctx.offset
ctx.resize(bound)
offset = ctx.offset
bytes = ctx.bytes
i = 0
n = val.length
while i < n
c = val.charCodeAt(i++)
size = 0
first = 0
if c < 0x80
bytes[offset++] = c
continue
else if c < 0x800
first = 0xc0
size = 2
else if c < 0xd800 or c > 0xdfff
first = 0xe0
size = 3
else if 0xdc00 <= c <= 0xdfff
c = 0xfffd # Replacement character
first = 0xe0
size = 3
else if 0xd800 <= c <= 0xdbff
if i < n
d = val.charCodeAt(i++)
if 0xdc00 <= d <= 0xdfff
a = c & 0x3ff
b = d & 0x3ff
c = 0x10000 + (a << 10) + b
first = 0xf0
size = 4
else
c = 0xfffd # Replacement character
first = 0xe0
size = 3
else
# Specification doesn't derive any character from c
continue
j = offset + size - 1
while j > offset
bytes[j--] = (c & 0x3f) | 0x80
c >>= 6
bytes[offset] = c | first
offset += size
ctx.offset = typeoffset
typesize(0x10, offset - contentoffset, bound, ctx)
ctx.offset = offset
# Boolean serialization
put.boolean = (val, ctx) ->
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 1 + val * 2)
ctx.offset++
# Number serialization
put.number = (val, ctx) ->
# If integer less than 2^32 encode as integer, otherwise we write it as 64 bit float.
# Javascript only support 64 bit floats, so encoding anything bigger than 2^32 as
# integer is pointless.
if val % 1 is 0 and Math.abs(val) <= 0xffffffff
if val > 0
typesize(0x4, val, val, ctx)
else
typesize(0x8, -val, -val, ctx)
else
ctx.resize(9)
ctx.view.setUint8(ctx.offset, 0xd)
ctx.view.setFloat64(ctx.offset + 1, val, true)
ctx.offset += 9
# Object serialization
put.object = (val, ctx) ->
# Handle binary fields
if val instanceof ArrayBuffer
typesize(0x14, val.byteLength, val.byteLength, ctx)
ctx.resize(val.byteLength)
ctx.bytes.set(new Uint8Array(val), ctx.offset)
ctx.offset += val.byteLength
# Serialization of arrays
else if val instanceof Array
typeoffset = ctx.offset
typesize(0x20, 0, 0x10000, ctx)
contentoffset = ctx.offset
for v in val
put[typeof v](v, ctx)
offset = ctx.offset
ctx.offset = typeoffset
typesize(0x20, offset - contentoffset, 0x10000, ctx)
ctx.offset = offset
# Serialize objects that isn't null
else if val isnt null
typeoffset = ctx.offset
typesize(0x24, 0, 0x10000, ctx)
contentoffset = ctx.offset
for k, v of val
put[typeof k](k, ctx)
put[typeof v](v, ctx)
offset = ctx.offset
ctx.offset = typeoffset
typesize(0x24, offset - contentoffset, 0x10000, ctx)
ctx.offset = offset
# Serialization of null
else #if data is null
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 0x0)
ctx.offset++
# Write type and size field with enought bit s.t. size can grow to bound later
typesize = (type, size, bound, ctx) ->
if size > bound
bound = size
if bound < 0x10000
if bound < 0x100
ctx.resize(2)
ctx.view.setUint8(ctx.offset, type)
ctx.view.setUint8(ctx.offset + 1, size)
ctx.offset += 2
else
ctx.resize(3)
ctx.view.setUint8(ctx.offset, type + 1)
ctx.view.setUint16(ctx.offset + 1, size, true)
ctx.offset += 3
else if bound < 0x100000000
ctx.resize(5)
ctx.view.setUint8(ctx.offset, type + 2)
ctx.view.setUint32(ctx.offset + 1, size, true)
ctx.offset += 5
else
# This code path is untested, will fail for numbers larger than
# 2^53 as Javascript numbers are 64bit floats.
# But let's hope documents larger 4GiB are unlikely, and assume
# documents larger than 9 PetaBytes isn't relevant.
#
# BJSON.coffee encodes numbers larger than 2^32 as 64 bit floats,
# however, other BJSON implementations could implement them as
# numbers, in which case this code is best effort to read these
# numbers.
#
# Technically, Javascript supports integers up to 2^53, however,
# bitwise operations on more than 32bit integers is not possible.
# This is why we use subtractions and division to find the upper
# 32 bits of the 64 bit integer. This transformation could have
# nasty side effects, who, knows... But browsers probably doesn't
# support ArrayBuffers larger than 4GiB anyway.
ctx.resize(9)
ctx.view.setUint8(ctx.offset, type + 3)
ctx.view.setUint32(ctx.offset + 1, size & 0xffffffff, true)
ctx.view.setUint32(ctx.offset + 5, (size - (size & 0xffffffff)) / 0x100000000, true)
ctx.offset += 9
# Serialize a JSON document to BJSON
@BJSON.serialize = (val) ->
ctx = new SerializationContext()
put[typeof val](val, ctx)
return ctx.buf.slice(0, ctx.offset)
| 29765 | # Copyright 2012 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
@BJSON ?= {}
#### BJSON Parser
# String type parser
decodeString = (size, ctx) ->
#if TextDecoder?
# val = TextDecoder('utf-8').decode(new Uint8Array(ctx.buffer, ctx.offset, size))
# ctx.offset += size
# return val
offset = ctx.offset
end = ctx.offset + size
strs = []
# while there is text to read
while offset < end
buf = []
nextend = Math.min(end, offset + 0x7fff)
# while there's room for two entries in buf
while offset < nextend
b = ctx.bytes[offset++]
if not (b & 0x80)
# Add codepoint b
buf.push b
continue
i = 0
while (b << i) & 0x40
i++
c = b & (0xff >> i + 1)
while i > 0
i -= 1
if offset == end
i = -1
break
b = ctx.bytes[offset++]
if (b & 0xc0) != 0x80
i = -1
offset--
break
c = (c << 6) | (b & 0x3f)
if i < 0
c = 0xfffd # Replacement character
# Add codepoint c
if c <= 0xffff
buf.push c
else
c -= 0x10000
buf.push 0xd800 + ((c >> 10) & 0x3ff)
buf.push 0xdc00 + (c & 0x3ff)
# Decode codepoints and add string to list
strs.push String.fromCharCode(buf...)
ctx.offset += size
# Join and return decoded strings
if strs.length == 1
return strs[0]
return strs.join("")
# Array lookup is faster than Math.pow: http://jsperf.com/math-pow-vs-array-lookup
prim = [null, false, "", true]
# Read BJSON item
read = (ctx) ->
t = ctx.bytes[ctx.offset++]
tt = (t & 0x3c) >> 2 # High bits, indicating type
st = t & 0x3 # Low bits, indicating size of size field
# Types 0 and 3 are special cases, these take different arguments
if tt is 0
return prim[st]
if tt is 3
if st is 0
val = ctx.view.getFloat32(ctx.offset, true)
else
val = ctx.view.getFloat64(ctx.offset, true)
ctx.offset += 4 + 4 * st
return val
# If tt isn't 0 or 3, we must read a size field
if st < 2
if st is 0
size = ctx.bytes[ctx.offset++]
else
size = ctx.view.getUint16(ctx.offset, true)
ctx.offset += 2
else
if st is 2
size = ctx.view.getUint32(ctx.offset, true)
ctx.offset += 4
else
# This code path is untested, and will fail for numbers larger
# than 2^53, but let's hope documents large than 4GiB are unlikely.
# Technically, Javascript supports integers up to 2^53, however,
# bitwise operations on more than 32bit integers is not possible.
# This is why we use addition and multiplication to combine the
# upper and lower parts of the 64 bit integer.
# This transformation could have nasty side effects, who, knows...
# But browsers probably doesn't support ArrayBuffers larger than
# 4GiB anyway.
lower = ctx.view.getUint64(ctx.offset, true)
upper = ctx.view.getUint64(ctx.offset + 4, true)
size = lower + upper * 0x100000000
ctx.offset += 8
if tt < 3
return size * (3 - 2 * tt) # possible values are 1 and 2
else if tt is 4 # String
return decodeString(size, ctx)
else
if tt is 9 # Object
end = ctx.offset + size
obj = {}
while ctx.offset < end
key = read(ctx)
val = read(ctx)
obj[key] = val
return obj
else if tt is 8 # Array
end = ctx.offset + size
val = []
while ctx.offset < end
val.push read(ctx)
return val
else if tt is 5 # ArrayBuffer
val = ctx.buffer.slice(ctx.offset, size)
ctx.offset += size
return val
throw new Error("Type doesn't exists!!!")
# Parse a BJSON document
@BJSON.parse = (buf) ->
# Create a context and read using it
return read
buffer: buf
view: new DataView(buf)
bytes: new Uint8Array(buf)
offset: 0
#### BJSON Serialization
class SerializationContext
constructor: (size = 4096) ->
@buf = new ArrayBuffer(size)
@view = new DataView(@buf)
@bytes = new Uint8Array(@buf)
@offset = 0
resize: (size) ->
if @buf.byteLength - @offset < size
@buf = new ArrayBuffer((@offset + size) * 2)
bytes = new Uint8Array(@buf)
bytes.set(@bytes)
@bytes = bytes
@view = new DataView(@buf)
# Dictionary with serializers for different types, each taking a value and serializing to context
put = {}
# String serialization
# For DOM-string intepretation see: http://www.w3.org/TR/WebIDL/#idl-DOMString
# For UTF-8 encoding see: http://tools.ietf.org/html/rfc3629#section-3
put.string = (val, ctx) ->
if val.length is 0
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 0x2)
ctx.offset += 1
else
bound = val.length * 3
typeoffset = ctx.offset
typesize(0x10, 0, bound, ctx)
contentoffset = ctx.offset
ctx.resize(bound)
offset = ctx.offset
bytes = ctx.bytes
i = 0
n = val.length
while i < n
c = val.charCodeAt(i++)
size = 0
first = 0
if c < 0x80
bytes[offset++] = c
continue
else if c < 0x800
first = 0xc0
size = 2
else if c < 0xd800 or c > 0xdfff
first = 0xe0
size = 3
else if 0xdc00 <= c <= 0xdfff
c = 0xfffd # Replacement character
first = 0xe0
size = 3
else if 0xd800 <= c <= 0xdbff
if i < n
d = val.charCodeAt(i++)
if 0xdc00 <= d <= 0xdfff
a = c & 0x3ff
b = d & 0x3ff
c = 0x10000 + (a << 10) + b
first = 0xf0
size = 4
else
c = 0xfffd # Replacement character
first = 0xe0
size = 3
else
# Specification doesn't derive any character from c
continue
j = offset + size - 1
while j > offset
bytes[j--] = (c & 0x3f) | 0x80
c >>= 6
bytes[offset] = c | first
offset += size
ctx.offset = typeoffset
typesize(0x10, offset - contentoffset, bound, ctx)
ctx.offset = offset
# Boolean serialization
put.boolean = (val, ctx) ->
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 1 + val * 2)
ctx.offset++
# Number serialization
put.number = (val, ctx) ->
# If integer less than 2^32 encode as integer, otherwise we write it as 64 bit float.
# Javascript only support 64 bit floats, so encoding anything bigger than 2^32 as
# integer is pointless.
if val % 1 is 0 and Math.abs(val) <= 0xffffffff
if val > 0
typesize(0x4, val, val, ctx)
else
typesize(0x8, -val, -val, ctx)
else
ctx.resize(9)
ctx.view.setUint8(ctx.offset, 0xd)
ctx.view.setFloat64(ctx.offset + 1, val, true)
ctx.offset += 9
# Object serialization
put.object = (val, ctx) ->
# Handle binary fields
if val instanceof ArrayBuffer
typesize(0x14, val.byteLength, val.byteLength, ctx)
ctx.resize(val.byteLength)
ctx.bytes.set(new Uint8Array(val), ctx.offset)
ctx.offset += val.byteLength
# Serialization of arrays
else if val instanceof Array
typeoffset = ctx.offset
typesize(0x20, 0, 0x10000, ctx)
contentoffset = ctx.offset
for v in val
put[typeof v](v, ctx)
offset = ctx.offset
ctx.offset = typeoffset
typesize(0x20, offset - contentoffset, 0x10000, ctx)
ctx.offset = offset
# Serialize objects that isn't null
else if val isnt null
typeoffset = ctx.offset
typesize(0x24, 0, 0x10000, ctx)
contentoffset = ctx.offset
for k, v of val
put[typeof k](k, ctx)
put[typeof v](v, ctx)
offset = ctx.offset
ctx.offset = typeoffset
typesize(0x24, offset - contentoffset, 0x10000, ctx)
ctx.offset = offset
# Serialization of null
else #if data is null
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 0x0)
ctx.offset++
# Write type and size field with enought bit s.t. size can grow to bound later
typesize = (type, size, bound, ctx) ->
if size > bound
bound = size
if bound < 0x10000
if bound < 0x100
ctx.resize(2)
ctx.view.setUint8(ctx.offset, type)
ctx.view.setUint8(ctx.offset + 1, size)
ctx.offset += 2
else
ctx.resize(3)
ctx.view.setUint8(ctx.offset, type + 1)
ctx.view.setUint16(ctx.offset + 1, size, true)
ctx.offset += 3
else if bound < 0x100000000
ctx.resize(5)
ctx.view.setUint8(ctx.offset, type + 2)
ctx.view.setUint32(ctx.offset + 1, size, true)
ctx.offset += 5
else
# This code path is untested, will fail for numbers larger than
# 2^53 as Javascript numbers are 64bit floats.
# But let's hope documents larger 4GiB are unlikely, and assume
# documents larger than 9 PetaBytes isn't relevant.
#
# BJSON.coffee encodes numbers larger than 2^32 as 64 bit floats,
# however, other BJSON implementations could implement them as
# numbers, in which case this code is best effort to read these
# numbers.
#
# Technically, Javascript supports integers up to 2^53, however,
# bitwise operations on more than 32bit integers is not possible.
# This is why we use subtractions and division to find the upper
# 32 bits of the 64 bit integer. This transformation could have
# nasty side effects, who, knows... But browsers probably doesn't
# support ArrayBuffers larger than 4GiB anyway.
ctx.resize(9)
ctx.view.setUint8(ctx.offset, type + 3)
ctx.view.setUint32(ctx.offset + 1, size & 0xffffffff, true)
ctx.view.setUint32(ctx.offset + 5, (size - (size & 0xffffffff)) / 0x100000000, true)
ctx.offset += 9
# Serialize a JSON document to BJSON
@BJSON.serialize = (val) ->
ctx = new SerializationContext()
put[typeof val](val, ctx)
return ctx.buf.slice(0, ctx.offset)
| true | # Copyright 2012 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
@BJSON ?= {}
#### BJSON Parser
# String type parser
decodeString = (size, ctx) ->
#if TextDecoder?
# val = TextDecoder('utf-8').decode(new Uint8Array(ctx.buffer, ctx.offset, size))
# ctx.offset += size
# return val
offset = ctx.offset
end = ctx.offset + size
strs = []
# while there is text to read
while offset < end
buf = []
nextend = Math.min(end, offset + 0x7fff)
# while there's room for two entries in buf
while offset < nextend
b = ctx.bytes[offset++]
if not (b & 0x80)
# Add codepoint b
buf.push b
continue
i = 0
while (b << i) & 0x40
i++
c = b & (0xff >> i + 1)
while i > 0
i -= 1
if offset == end
i = -1
break
b = ctx.bytes[offset++]
if (b & 0xc0) != 0x80
i = -1
offset--
break
c = (c << 6) | (b & 0x3f)
if i < 0
c = 0xfffd # Replacement character
# Add codepoint c
if c <= 0xffff
buf.push c
else
c -= 0x10000
buf.push 0xd800 + ((c >> 10) & 0x3ff)
buf.push 0xdc00 + (c & 0x3ff)
# Decode codepoints and add string to list
strs.push String.fromCharCode(buf...)
ctx.offset += size
# Join and return decoded strings
if strs.length == 1
return strs[0]
return strs.join("")
# Array lookup is faster than Math.pow: http://jsperf.com/math-pow-vs-array-lookup
prim = [null, false, "", true]
# Read BJSON item
read = (ctx) ->
t = ctx.bytes[ctx.offset++]
tt = (t & 0x3c) >> 2 # High bits, indicating type
st = t & 0x3 # Low bits, indicating size of size field
# Types 0 and 3 are special cases, these take different arguments
if tt is 0
return prim[st]
if tt is 3
if st is 0
val = ctx.view.getFloat32(ctx.offset, true)
else
val = ctx.view.getFloat64(ctx.offset, true)
ctx.offset += 4 + 4 * st
return val
# If tt isn't 0 or 3, we must read a size field
if st < 2
if st is 0
size = ctx.bytes[ctx.offset++]
else
size = ctx.view.getUint16(ctx.offset, true)
ctx.offset += 2
else
if st is 2
size = ctx.view.getUint32(ctx.offset, true)
ctx.offset += 4
else
# This code path is untested, and will fail for numbers larger
# than 2^53, but let's hope documents large than 4GiB are unlikely.
# Technically, Javascript supports integers up to 2^53, however,
# bitwise operations on more than 32bit integers is not possible.
# This is why we use addition and multiplication to combine the
# upper and lower parts of the 64 bit integer.
# This transformation could have nasty side effects, who, knows...
# But browsers probably doesn't support ArrayBuffers larger than
# 4GiB anyway.
lower = ctx.view.getUint64(ctx.offset, true)
upper = ctx.view.getUint64(ctx.offset + 4, true)
size = lower + upper * 0x100000000
ctx.offset += 8
if tt < 3
return size * (3 - 2 * tt) # possible values are 1 and 2
else if tt is 4 # String
return decodeString(size, ctx)
else
if tt is 9 # Object
end = ctx.offset + size
obj = {}
while ctx.offset < end
key = read(ctx)
val = read(ctx)
obj[key] = val
return obj
else if tt is 8 # Array
end = ctx.offset + size
val = []
while ctx.offset < end
val.push read(ctx)
return val
else if tt is 5 # ArrayBuffer
val = ctx.buffer.slice(ctx.offset, size)
ctx.offset += size
return val
throw new Error("Type doesn't exists!!!")
# Parse a BJSON document
@BJSON.parse = (buf) ->
# Create a context and read using it
return read
buffer: buf
view: new DataView(buf)
bytes: new Uint8Array(buf)
offset: 0
#### BJSON Serialization
class SerializationContext
constructor: (size = 4096) ->
@buf = new ArrayBuffer(size)
@view = new DataView(@buf)
@bytes = new Uint8Array(@buf)
@offset = 0
resize: (size) ->
if @buf.byteLength - @offset < size
@buf = new ArrayBuffer((@offset + size) * 2)
bytes = new Uint8Array(@buf)
bytes.set(@bytes)
@bytes = bytes
@view = new DataView(@buf)
# Dictionary with serializers for different types, each taking a value and serializing to context
put = {}
# String serialization
# For DOM-string intepretation see: http://www.w3.org/TR/WebIDL/#idl-DOMString
# For UTF-8 encoding see: http://tools.ietf.org/html/rfc3629#section-3
put.string = (val, ctx) ->
if val.length is 0
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 0x2)
ctx.offset += 1
else
bound = val.length * 3
typeoffset = ctx.offset
typesize(0x10, 0, bound, ctx)
contentoffset = ctx.offset
ctx.resize(bound)
offset = ctx.offset
bytes = ctx.bytes
i = 0
n = val.length
while i < n
c = val.charCodeAt(i++)
size = 0
first = 0
if c < 0x80
bytes[offset++] = c
continue
else if c < 0x800
first = 0xc0
size = 2
else if c < 0xd800 or c > 0xdfff
first = 0xe0
size = 3
else if 0xdc00 <= c <= 0xdfff
c = 0xfffd # Replacement character
first = 0xe0
size = 3
else if 0xd800 <= c <= 0xdbff
if i < n
d = val.charCodeAt(i++)
if 0xdc00 <= d <= 0xdfff
a = c & 0x3ff
b = d & 0x3ff
c = 0x10000 + (a << 10) + b
first = 0xf0
size = 4
else
c = 0xfffd # Replacement character
first = 0xe0
size = 3
else
# Specification doesn't derive any character from c
continue
j = offset + size - 1
while j > offset
bytes[j--] = (c & 0x3f) | 0x80
c >>= 6
bytes[offset] = c | first
offset += size
ctx.offset = typeoffset
typesize(0x10, offset - contentoffset, bound, ctx)
ctx.offset = offset
# Boolean serialization
put.boolean = (val, ctx) ->
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 1 + val * 2)
ctx.offset++
# Number serialization
put.number = (val, ctx) ->
# If integer less than 2^32 encode as integer, otherwise we write it as 64 bit float.
# Javascript only support 64 bit floats, so encoding anything bigger than 2^32 as
# integer is pointless.
if val % 1 is 0 and Math.abs(val) <= 0xffffffff
if val > 0
typesize(0x4, val, val, ctx)
else
typesize(0x8, -val, -val, ctx)
else
ctx.resize(9)
ctx.view.setUint8(ctx.offset, 0xd)
ctx.view.setFloat64(ctx.offset + 1, val, true)
ctx.offset += 9
# Object serialization
put.object = (val, ctx) ->
# Handle binary fields
if val instanceof ArrayBuffer
typesize(0x14, val.byteLength, val.byteLength, ctx)
ctx.resize(val.byteLength)
ctx.bytes.set(new Uint8Array(val), ctx.offset)
ctx.offset += val.byteLength
# Serialization of arrays
else if val instanceof Array
typeoffset = ctx.offset
typesize(0x20, 0, 0x10000, ctx)
contentoffset = ctx.offset
for v in val
put[typeof v](v, ctx)
offset = ctx.offset
ctx.offset = typeoffset
typesize(0x20, offset - contentoffset, 0x10000, ctx)
ctx.offset = offset
# Serialize objects that isn't null
else if val isnt null
typeoffset = ctx.offset
typesize(0x24, 0, 0x10000, ctx)
contentoffset = ctx.offset
for k, v of val
put[typeof k](k, ctx)
put[typeof v](v, ctx)
offset = ctx.offset
ctx.offset = typeoffset
typesize(0x24, offset - contentoffset, 0x10000, ctx)
ctx.offset = offset
# Serialization of null
else #if data is null
ctx.resize(1)
ctx.view.setUint8(ctx.offset, 0x0)
ctx.offset++
# Write type and size field with enought bit s.t. size can grow to bound later
typesize = (type, size, bound, ctx) ->
if size > bound
bound = size
if bound < 0x10000
if bound < 0x100
ctx.resize(2)
ctx.view.setUint8(ctx.offset, type)
ctx.view.setUint8(ctx.offset + 1, size)
ctx.offset += 2
else
ctx.resize(3)
ctx.view.setUint8(ctx.offset, type + 1)
ctx.view.setUint16(ctx.offset + 1, size, true)
ctx.offset += 3
else if bound < 0x100000000
ctx.resize(5)
ctx.view.setUint8(ctx.offset, type + 2)
ctx.view.setUint32(ctx.offset + 1, size, true)
ctx.offset += 5
else
# This code path is untested, will fail for numbers larger than
# 2^53 as Javascript numbers are 64bit floats.
# But let's hope documents larger 4GiB are unlikely, and assume
# documents larger than 9 PetaBytes isn't relevant.
#
# BJSON.coffee encodes numbers larger than 2^32 as 64 bit floats,
# however, other BJSON implementations could implement them as
# numbers, in which case this code is best effort to read these
# numbers.
#
# Technically, Javascript supports integers up to 2^53, however,
# bitwise operations on more than 32bit integers is not possible.
# This is why we use subtractions and division to find the upper
# 32 bits of the 64 bit integer. This transformation could have
# nasty side effects, who, knows... But browsers probably doesn't
# support ArrayBuffers larger than 4GiB anyway.
ctx.resize(9)
ctx.view.setUint8(ctx.offset, type + 3)
ctx.view.setUint32(ctx.offset + 1, size & 0xffffffff, true)
ctx.view.setUint32(ctx.offset + 5, (size - (size & 0xffffffff)) / 0x100000000, true)
ctx.offset += 9
# Serialize a JSON document to BJSON
@BJSON.serialize = (val) ->
ctx = new SerializationContext()
put[typeof val](val, ctx)
return ctx.buf.slice(0, ctx.offset)
|
[
{
"context": "ser = require \"./twitterUser\"\n\ntwitter =\n id : 136861797\n accessToken : \"136861797-3GmHLyD80c6SsoY6CNz0",
"end": 60,
"score": 0.9455709457397461,
"start": 51,
"tag": "USERNAME",
"value": "136861797"
},
{
"context": "\n\ntwitter =\n id : 136861797\n ac... | tests/support/twitter.coffee | sharismlab/social-brain-framework | 2 | user = require "./twitterUser"
twitter =
id : 136861797
accessToken : "136861797-3GmHLyD80c6SsoY6CNz04lWEgUe4fkSQWO9YwLwi"
accessTokenSecret : "FJUTmsmlRCPONjNHd53MVaglGmRtIKt4TyDdWyMuPE"
user : user | 83470 | user = require "./twitterUser"
twitter =
id : 136861797
accessToken : "<KEY>"
accessTokenSecret : "<KEY>"
user : user | true | user = require "./twitterUser"
twitter =
id : 136861797
accessToken : "PI:KEY:<KEY>END_PI"
accessTokenSecret : "PI:KEY:<KEY>END_PI"
user : user |
[
{
"context": "module.exports =\n name: 'Shoutt'\n domain: 'localhost:3000'",
"end": 32,
"score": 0.979110062122345,
"start": 26,
"tag": "NAME",
"value": "Shoutt"
}
] | Client/src/js/config.coffee | Huntrr/Shout | 0 | module.exports =
name: 'Shoutt'
domain: 'localhost:3000' | 2210 | module.exports =
name: '<NAME>'
domain: 'localhost:3000' | true | module.exports =
name: 'PI:NAME:<NAME>END_PI'
domain: 'localhost:3000' |
[
{
"context": "calhost'\n username: process.env.USERNAME || 'admin'\n password: process.env.PASSWORD || '9999'\n ",
"end": 345,
"score": 0.99436354637146,
"start": 340,
"tag": "USERNAME",
"value": "admin"
},
{
"context": "e: process.env.USERNAME || 'admin'\n password: p... | test/device.coffee | vvvait/onvif | 1 | synthTest = not process.env.HOSTNAME
assert = require 'assert'
onvif = require('../lib/onvif')
serverMockup = require('./serverMockup') if synthTest
util = require('util')
describe 'Device', () ->
cam = null
before (done) ->
options = {
hostname: process.env.HOSTNAME || 'localhost'
username: process.env.USERNAME || 'admin'
password: process.env.PASSWORD || '9999'
port: if process.env.PORT then parseInt(process.env.PORT) else 10101
}
cam = new onvif.Cam options, done
describe 'getNTP', () ->
it 'should return NTP settings', (done) ->
cam.getNTP (err, data) ->
assert.equal err, null
done()
describe 'setNTP', () ->
if synthTest
it 'should set NTP with ipv4', (done) ->
cam.setNTP {
fromDHCP: false
type: 'IPv4'
ipv4Address: 'localhost'
}, (err) ->
assert.equal err, null
done()
it 'should set NTP with ipv6', (done) ->
cam.setNTP {
fromDHCP: false
type: 'IPv6'
ipv6Address: '::1/128'
dnsName: '8.8.8.8'
}, (err) ->
assert.equal err, null
done()
it 'should set NTP from DHCP', (done) ->
cam.setNTP {
fromDHCP: true
}, (err) ->
assert.equal err, null
done()
it 'should set multiple NTPs', (done) ->
cam.setNTP {
fromDHCP: false,
NTPManual: [
{
type: 'IPv4'
ipv4Address: 'localhost'
},
{
type: 'IPv6'
ipv6Address: '::1/128'
dnsName: '8.8.8.8'
},
]
}, (err) ->
assert.equal err, null
done()
describe 'getNetworkInterfaces', () ->
it 'should return a NetworkInterface', (done) ->
cam.getNetworkInterfaces (err, networkInterfaces) ->
assert.equal err, null
assert.equal networkInterfaces[0].$.token, 'eth0' # Defined in serverMockup/device.GetNetworkInterfaces.xml
done()
describe 'setNetworkInterfaces', () ->
it 'should set manual IPv4, update the Cam object with the new IP and return RebootNeeded', (done) ->
currentIP = cam.hostname
cam.setNetworkInterfaces {
interfaceToken: 'interfaceToken',
networkInterface: {
enabled: true,
IPv4: {
enabled: true,
DHCP: false,
manual: {
address: '127.0.0.1',
prefixLength: 24
}
}
}
}, (err, data) ->
newIP = cam.hostname # Save the new IP
cam.hostname = currentIP # Then set the original one for other tests
assert.equal newIP, '127.0.0.1'
assert.equal err, null
assert.equal data.rebootNeeded, false # Defined in serverMockup/device.SetNetworkInterfaces.xml
done()
it 'should set manual IPv6, update the Cam object with the new IP and return RebootNeeded', (done) ->
currentIP = cam.hostname
cam.setNetworkInterfaces {
interfaceToken: 'interfaceToken',
networkInterface: {
enabled: true,
IPv6: {
enabled: true,
DHCP: false,
manual: {
address: '::1',
prefixLength: 24
}
}
}
}, (err, data) ->
newIP = cam.hostname # Save the new IP
cam.hostname = currentIP # Then set the original one for other tests
assert.equal newIP, '::1'
assert.equal err, null
assert.equal data.rebootNeeded, false # Defined in serverMockup/device.SetNetworkInterfaces.xml
done()
describe 'getNetworkDefaultGateway', () ->
it 'should return a NetworkGateway', (done) ->
cam.getNetworkDefaultGateway (err, data) ->
assert.equal err, null
assert.equal data.IPv4Address, '192.168.0.1'
assert.equal data.IPv6Address, ''
done()
describe 'setNetworkDefaultGateway', () ->
it 'should set IPv4 address and return a NetworkGateway', (done) ->
cam.setNetworkDefaultGateway {
IPv4Address: '192.168.0.2'
}, (err, data) ->
assert.equal err, null
assert.equal typeof data.IPv4Address, 'string' # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
assert.equal typeof data.IPv6Address, 'string'
done()
it 'should set IPv6 address and return a NetworkGateway', (done) ->
cam.setNetworkDefaultGateway {
IPv6Address: '::2'
}, (err, data) ->
assert.equal err, null
assert.equal typeof data.IPv4Address, 'string' # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
assert.equal typeof data.IPv6Address, 'string'
done()
describe 'getDNS', () ->
it 'should return a DNSInformation', (done) ->
cam.getDNS (err, data) ->
assert.equal err, null
assert.equal data.fromDHCP, false # Values defined in serverMockup/device.GetDNS.xml
assert.ok Array.isArray(data.DNSManual)
assert.equal data.DNSManual[0].type, 'IPv4'
assert.equal data.DNSManual[0].IPv4Address, '4.4.4.4'
assert.equal data.DNSManual[1].type, 'IPv4'
assert.equal data.DNSManual[1].IPv4Address, '8.8.8.8'
done()
describe 'setDNS', () ->
it 'should set IPv4 address and return a DNSInformation', (done) ->
cam.setDNS {
fromDHCP: false,
DNSManual: [
{
type: 'IPv4',
IPv4Address: '5.5.5.5'
},
{
type: 'IPv4',
IPv4Address: '9.9.9.9'
}
]
}, (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data.DNSManual) # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
done()
it 'should set IPv6 address and return a DNSInformation', (done) ->
cam.setDNS {
fromDHCP: false,
DNSManual: [
{
type: 'IPv6',
IPv6Address: '2001:4860:4860::8888'
},
{
type: 'IPv6',
IPv6Address: '2001:4860:4860::8844'
}
]
}, (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data.DNSManual) # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
done()
describe 'setSystemFactoryDefault', () ->
it 'should request a soft factory default', (done) ->
cam.setSystemFactoryDefault (err) ->
assert.equal err, null
done()
it 'should request a hard factory default', (done) ->
cam.setSystemFactoryDefault true, (err) ->
assert.equal err, null
done()
describe 'getUsers', () ->
it 'should return a list of user', (done) ->
cam.getUsers (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
assert.equal data[0].username, 'admin'
assert.equal data[0].password, 'admin'
assert.equal data[0].userLevel, 'Administrator'
done()
describe 'createUsers', () ->
it 'should create users', (done) ->
cam.createUsers [
{
username: 'username1',
password: 'password1',
userLevel: 'User'
},
{
username: 'username2',
password: 'password2',
userLevel: 'User'
},
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
describe 'setUsers', () ->
it 'should set users', (done) ->
cam.setUsers [
{
username: 'username1',
password: 'password1',
userLevel: 'User'
},
{
username: 'username2',
password: 'password2',
userLevel: 'User'
},
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
describe 'deleteUsers', () ->
it 'should delete users', (done) ->
cam.deleteUsers [
{
username: 'username1',
password: 'password1',
userLevel: 'User'
},
'username2',
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
| 128885 | synthTest = not process.env.HOSTNAME
assert = require 'assert'
onvif = require('../lib/onvif')
serverMockup = require('./serverMockup') if synthTest
util = require('util')
describe 'Device', () ->
cam = null
before (done) ->
options = {
hostname: process.env.HOSTNAME || 'localhost'
username: process.env.USERNAME || 'admin'
password: <PASSWORD> || '<PASSWORD>'
port: if process.env.PORT then parseInt(process.env.PORT) else 10101
}
cam = new onvif.Cam options, done
describe 'getNTP', () ->
it 'should return NTP settings', (done) ->
cam.getNTP (err, data) ->
assert.equal err, null
done()
describe 'setNTP', () ->
if synthTest
it 'should set NTP with ipv4', (done) ->
cam.setNTP {
fromDHCP: false
type: 'IPv4'
ipv4Address: 'localhost'
}, (err) ->
assert.equal err, null
done()
it 'should set NTP with ipv6', (done) ->
cam.setNTP {
fromDHCP: false
type: 'IPv6'
ipv6Address: '::1/128'
dnsName: '8.8.8.8'
}, (err) ->
assert.equal err, null
done()
it 'should set NTP from DHCP', (done) ->
cam.setNTP {
fromDHCP: true
}, (err) ->
assert.equal err, null
done()
it 'should set multiple NTPs', (done) ->
cam.setNTP {
fromDHCP: false,
NTPManual: [
{
type: 'IPv4'
ipv4Address: 'localhost'
},
{
type: 'IPv6'
ipv6Address: '::1/128'
dnsName: '8.8.8.8'
},
]
}, (err) ->
assert.equal err, null
done()
describe 'getNetworkInterfaces', () ->
it 'should return a NetworkInterface', (done) ->
cam.getNetworkInterfaces (err, networkInterfaces) ->
assert.equal err, null
assert.equal networkInterfaces[0].$.token, 'eth0' # Defined in serverMockup/device.GetNetworkInterfaces.xml
done()
describe 'setNetworkInterfaces', () ->
it 'should set manual IPv4, update the Cam object with the new IP and return RebootNeeded', (done) ->
currentIP = cam.hostname
cam.setNetworkInterfaces {
interfaceToken: 'interfaceToken',
networkInterface: {
enabled: true,
IPv4: {
enabled: true,
DHCP: false,
manual: {
address: '127.0.0.1',
prefixLength: 24
}
}
}
}, (err, data) ->
newIP = cam.hostname # Save the new IP
cam.hostname = currentIP # Then set the original one for other tests
assert.equal newIP, '127.0.0.1'
assert.equal err, null
assert.equal data.rebootNeeded, false # Defined in serverMockup/device.SetNetworkInterfaces.xml
done()
it 'should set manual IPv6, update the Cam object with the new IP and return RebootNeeded', (done) ->
currentIP = cam.hostname
cam.setNetworkInterfaces {
interfaceToken: 'interfaceToken',
networkInterface: {
enabled: true,
IPv6: {
enabled: true,
DHCP: false,
manual: {
address: '::1',
prefixLength: 24
}
}
}
}, (err, data) ->
newIP = cam.hostname # Save the new IP
cam.hostname = currentIP # Then set the original one for other tests
assert.equal newIP, '::1'
assert.equal err, null
assert.equal data.rebootNeeded, false # Defined in serverMockup/device.SetNetworkInterfaces.xml
done()
describe 'getNetworkDefaultGateway', () ->
it 'should return a NetworkGateway', (done) ->
cam.getNetworkDefaultGateway (err, data) ->
assert.equal err, null
assert.equal data.IPv4Address, '192.168.0.1'
assert.equal data.IPv6Address, ''
done()
describe 'setNetworkDefaultGateway', () ->
it 'should set IPv4 address and return a NetworkGateway', (done) ->
cam.setNetworkDefaultGateway {
IPv4Address: '192.168.0.2'
}, (err, data) ->
assert.equal err, null
assert.equal typeof data.IPv4Address, 'string' # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
assert.equal typeof data.IPv6Address, 'string'
done()
it 'should set IPv6 address and return a NetworkGateway', (done) ->
cam.setNetworkDefaultGateway {
IPv6Address: '::2'
}, (err, data) ->
assert.equal err, null
assert.equal typeof data.IPv4Address, 'string' # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
assert.equal typeof data.IPv6Address, 'string'
done()
describe 'getDNS', () ->
it 'should return a DNSInformation', (done) ->
cam.getDNS (err, data) ->
assert.equal err, null
assert.equal data.fromDHCP, false # Values defined in serverMockup/device.GetDNS.xml
assert.ok Array.isArray(data.DNSManual)
assert.equal data.DNSManual[0].type, 'IPv4'
assert.equal data.DNSManual[0].IPv4Address, '172.16.17.32'
assert.equal data.DNSManual[1].type, 'IPv4'
assert.equal data.DNSManual[1].IPv4Address, '8.8.8.8'
done()
describe 'setDNS', () ->
it 'should set IPv4 address and return a DNSInformation', (done) ->
cam.setDNS {
fromDHCP: false,
DNSManual: [
{
type: 'IPv4',
IPv4Address: '172.16.17.32'
},
{
type: 'IPv4',
IPv4Address: '9.9.9.9'
}
]
}, (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data.DNSManual) # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
done()
it 'should set IPv6 address and return a DNSInformation', (done) ->
cam.setDNS {
fromDHCP: false,
DNSManual: [
{
type: 'IPv6',
IPv6Address: 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b'
},
{
type: 'IPv6',
IPv6Address: 'fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b'
}
]
}, (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data.DNSManual) # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
done()
describe 'setSystemFactoryDefault', () ->
it 'should request a soft factory default', (done) ->
cam.setSystemFactoryDefault (err) ->
assert.equal err, null
done()
it 'should request a hard factory default', (done) ->
cam.setSystemFactoryDefault true, (err) ->
assert.equal err, null
done()
describe 'getUsers', () ->
it 'should return a list of user', (done) ->
cam.getUsers (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
assert.equal data[0].username, 'admin'
assert.equal data[0].password, '<PASSWORD>'
assert.equal data[0].userLevel, 'Administrator'
done()
describe 'createUsers', () ->
it 'should create users', (done) ->
cam.createUsers [
{
username: 'username1',
password: '<PASSWORD>',
userLevel: 'User'
},
{
username: 'username2',
password: '<PASSWORD>',
userLevel: 'User'
},
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
describe 'setUsers', () ->
it 'should set users', (done) ->
cam.setUsers [
{
username: 'username1',
password: '<PASSWORD>',
userLevel: 'User'
},
{
username: 'username2',
password: '<PASSWORD>',
userLevel: 'User'
},
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
describe 'deleteUsers', () ->
it 'should delete users', (done) ->
cam.deleteUsers [
{
username: 'username1',
password: '<PASSWORD>',
userLevel: 'User'
},
'username2',
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
| true | synthTest = not process.env.HOSTNAME
assert = require 'assert'
onvif = require('../lib/onvif')
serverMockup = require('./serverMockup') if synthTest
util = require('util')
describe 'Device', () ->
cam = null
before (done) ->
options = {
hostname: process.env.HOSTNAME || 'localhost'
username: process.env.USERNAME || 'admin'
password: PI:PASSWORD:<PASSWORD>END_PI || 'PI:PASSWORD:<PASSWORD>END_PI'
port: if process.env.PORT then parseInt(process.env.PORT) else 10101
}
cam = new onvif.Cam options, done
describe 'getNTP', () ->
it 'should return NTP settings', (done) ->
cam.getNTP (err, data) ->
assert.equal err, null
done()
describe 'setNTP', () ->
if synthTest
it 'should set NTP with ipv4', (done) ->
cam.setNTP {
fromDHCP: false
type: 'IPv4'
ipv4Address: 'localhost'
}, (err) ->
assert.equal err, null
done()
it 'should set NTP with ipv6', (done) ->
cam.setNTP {
fromDHCP: false
type: 'IPv6'
ipv6Address: '::1/128'
dnsName: '8.8.8.8'
}, (err) ->
assert.equal err, null
done()
it 'should set NTP from DHCP', (done) ->
cam.setNTP {
fromDHCP: true
}, (err) ->
assert.equal err, null
done()
it 'should set multiple NTPs', (done) ->
cam.setNTP {
fromDHCP: false,
NTPManual: [
{
type: 'IPv4'
ipv4Address: 'localhost'
},
{
type: 'IPv6'
ipv6Address: '::1/128'
dnsName: '8.8.8.8'
},
]
}, (err) ->
assert.equal err, null
done()
describe 'getNetworkInterfaces', () ->
it 'should return a NetworkInterface', (done) ->
cam.getNetworkInterfaces (err, networkInterfaces) ->
assert.equal err, null
assert.equal networkInterfaces[0].$.token, 'eth0' # Defined in serverMockup/device.GetNetworkInterfaces.xml
done()
describe 'setNetworkInterfaces', () ->
it 'should set manual IPv4, update the Cam object with the new IP and return RebootNeeded', (done) ->
currentIP = cam.hostname
cam.setNetworkInterfaces {
interfaceToken: 'interfaceToken',
networkInterface: {
enabled: true,
IPv4: {
enabled: true,
DHCP: false,
manual: {
address: '127.0.0.1',
prefixLength: 24
}
}
}
}, (err, data) ->
newIP = cam.hostname # Save the new IP
cam.hostname = currentIP # Then set the original one for other tests
assert.equal newIP, '127.0.0.1'
assert.equal err, null
assert.equal data.rebootNeeded, false # Defined in serverMockup/device.SetNetworkInterfaces.xml
done()
it 'should set manual IPv6, update the Cam object with the new IP and return RebootNeeded', (done) ->
currentIP = cam.hostname
cam.setNetworkInterfaces {
interfaceToken: 'interfaceToken',
networkInterface: {
enabled: true,
IPv6: {
enabled: true,
DHCP: false,
manual: {
address: '::1',
prefixLength: 24
}
}
}
}, (err, data) ->
newIP = cam.hostname # Save the new IP
cam.hostname = currentIP # Then set the original one for other tests
assert.equal newIP, '::1'
assert.equal err, null
assert.equal data.rebootNeeded, false # Defined in serverMockup/device.SetNetworkInterfaces.xml
done()
describe 'getNetworkDefaultGateway', () ->
it 'should return a NetworkGateway', (done) ->
cam.getNetworkDefaultGateway (err, data) ->
assert.equal err, null
assert.equal data.IPv4Address, '192.168.0.1'
assert.equal data.IPv6Address, ''
done()
describe 'setNetworkDefaultGateway', () ->
it 'should set IPv4 address and return a NetworkGateway', (done) ->
cam.setNetworkDefaultGateway {
IPv4Address: '192.168.0.2'
}, (err, data) ->
assert.equal err, null
assert.equal typeof data.IPv4Address, 'string' # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
assert.equal typeof data.IPv6Address, 'string'
done()
it 'should set IPv6 address and return a NetworkGateway', (done) ->
cam.setNetworkDefaultGateway {
IPv6Address: '::2'
}, (err, data) ->
assert.equal err, null
assert.equal typeof data.IPv4Address, 'string' # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
assert.equal typeof data.IPv6Address, 'string'
done()
describe 'getDNS', () ->
it 'should return a DNSInformation', (done) ->
cam.getDNS (err, data) ->
assert.equal err, null
assert.equal data.fromDHCP, false # Values defined in serverMockup/device.GetDNS.xml
assert.ok Array.isArray(data.DNSManual)
assert.equal data.DNSManual[0].type, 'IPv4'
assert.equal data.DNSManual[0].IPv4Address, 'PI:IP_ADDRESS:172.16.17.32END_PI'
assert.equal data.DNSManual[1].type, 'IPv4'
assert.equal data.DNSManual[1].IPv4Address, '8.8.8.8'
done()
describe 'setDNS', () ->
it 'should set IPv4 address and return a DNSInformation', (done) ->
cam.setDNS {
fromDHCP: false,
DNSManual: [
{
type: 'IPv4',
IPv4Address: 'PI:IP_ADDRESS:172.16.17.32END_PI'
},
{
type: 'IPv4',
IPv4Address: '9.9.9.9'
}
]
}, (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data.DNSManual) # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
done()
it 'should set IPv6 address and return a DNSInformation', (done) ->
cam.setDNS {
fromDHCP: false,
DNSManual: [
{
type: 'IPv6',
IPv6Address: 'PI:IP_ADDRESS:fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5bEND_PI'
},
{
type: 'IPv6',
IPv6Address: 'PI:IP_ADDRESS:fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3bEND_PI'
}
]
}, (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data.DNSManual) # Impossible to test the set values as the response is hard written in serverMockup/device.GetNetworkDefaultGateway.xml
done()
describe 'setSystemFactoryDefault', () ->
it 'should request a soft factory default', (done) ->
cam.setSystemFactoryDefault (err) ->
assert.equal err, null
done()
it 'should request a hard factory default', (done) ->
cam.setSystemFactoryDefault true, (err) ->
assert.equal err, null
done()
describe 'getUsers', () ->
it 'should return a list of user', (done) ->
cam.getUsers (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
assert.equal data[0].username, 'admin'
assert.equal data[0].password, 'PI:PASSWORD:<PASSWORD>END_PI'
assert.equal data[0].userLevel, 'Administrator'
done()
describe 'createUsers', () ->
it 'should create users', (done) ->
cam.createUsers [
{
username: 'username1',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
userLevel: 'User'
},
{
username: 'username2',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
userLevel: 'User'
},
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
describe 'setUsers', () ->
it 'should set users', (done) ->
cam.setUsers [
{
username: 'username1',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
userLevel: 'User'
},
{
username: 'username2',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
userLevel: 'User'
},
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
describe 'deleteUsers', () ->
it 'should delete users', (done) ->
cam.deleteUsers [
{
username: 'username1',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
userLevel: 'User'
},
'username2',
], (err, data) ->
assert.equal err, null
assert.ok Array.isArray(data)
done()
|
[
{
"context": "equest.setRequestHeader('X-Auth-Token', Accounts._storedLoginToken())\n\n\t\t\tsuccess: (data) ->\n\t\t\t\t$('#afModal').modal",
"end": 5320,
"score": 0.5971968770027161,
"start": 5304,
"tag": "KEY",
"value": "storedLoginToken"
}
] | packages/steedos-creator-autoform-modals/lib/client/modals.coffee | zonglu233/fuel-car | 0 | registeredAutoFormHooks = ['cmForm']
defaultFormId = 'cmForm'
cmOnSuccessCallback = null
AutoForm.addHooks 'cmForm',
onSuccess: ->
$('#afModal').modal('hide')
onError: (operation,error) ->
console.error error
if error.reason
toastr?.error?(TAPi18n.__(error.reason))
else if error.message
toastr?.error?(TAPi18n.__(error.message))
else
toastr?.error?(error)
collectionObj = (name) ->
name.split('.').reduce (o, x) ->
o[x]
, window
oDataOperation = (type, url, data, object_name)->
self = this
$.ajax
type: type
url: url
data: JSON.stringify(data)
dataType: 'json'
contentType: "application/json"
processData: false
beforeSend: (request) ->
request.setRequestHeader 'X-User-Id', Meteor.userId()
request.setRequestHeader 'X-Auth-Token', Accounts._storedLoginToken()
success: (data) ->
if Session.get("cmOperation") == "insert"
_id = data.value[0]._id
else if Session.get("cmOperation") == "update"
_id = data._id
# console.log _id
data = {_id: _id}
data.type = type
data.object_name = object_name
self.done(null, data)
error: (jqXHR, textStatus, errorThrown) ->
# console.log(errorThrown);
self.done(jqXHR.responseJSON.error.message)
getObjectName = (collectionName)->
return collectionName.replace(/Creator.Collections./, "")
getSimpleSchema = (collectionName)->
if collectionName
object_name = getObjectName collectionName
object_fields = Creator.getObject(object_name).fields
_fields = Creator.getFields(object_name)
schema = collectionObj(collectionName).simpleSchema()._schema
fields = Session.get("cmFields")
final_schema = {}
if fields
fields = fields.replace(/\ /g, "").split(",")
_.each fields, (field)->
if object_fields[field]?.type == "grid"
table_fields = _.filter _fields, (f)->
reg = new RegExp("^(" + field + ")(\\.\\$\\.){1}\\w+")
return reg.test(f)
_.each table_fields, (f)->
_.extend(final_schema, _.pick(schema, f))
obj = _.pick(schema, field, field + ".$")
_.extend(final_schema, obj)
else
final_schema = schema
if Session.get 'cmMeteorMethod'
#新增_ids虚拟字段,以实现条记录同时更新
final_schema._ids =
type: String
optional: true
autoform:
type: "hidden"
#新增_object_name虚拟字段,以让后台method知道更新哪个表
final_schema._object_name =
type: String
optional: true
autoform:
type: "hidden"
defaultValue: ->
return getObjectName collectionName
return new SimpleSchema(final_schema)
Template.CreatorAutoformModals.rendered = ->
self = this;
$('#afModal').modal(show: false)
onEscKey = (e) ->
if e.keyCode == 27
$('#afModal').modal 'hide'
$('#afModal').on 'show.bs.modal', ->
self.shouldUpdateQuickForm.set(true)
operation = Session.get 'cmOperation'
if operation == 'update'
AutoForm.resetForm(Session.get('cmFormId') or defaultFormId)
$('#afModal').on 'shown.bs.modal', ->
if Steedos?.setModalMaxHeight
Steedos.setModalMaxHeight()
$(window).bind 'keyup', onEscKey
setTimeout ->
$("#afModal .form-control:first").focus()
, 100
$('#afModal').on 'hidden.bs.modal', ->
$(window).unbind 'keyup', onEscKey
doc = Session.get 'cmDoc'
sessionKeys = [
'cmCollection',
'cmOperation',
'cmDoc',
'cmButtonHtml',
'cmFields',
'cmOmitFields',
'cmButtonContent',
'cmTitle',
'cmButtonClasses',
'cmPrompt',
'cmTemplate',
'cmLabelClass',
'cmInputColClass',
'cmPlaceholder',
'cmFormId',
'cmAutoformType',
'cmMeteorMethod',
'cmCloseButtonContent',
'cmCloseButtonClasses',
'cmShowRemoveButton',
'cmIsMultipleUpdate',
'cmTargetIds',
'cmEditSingleField',
'cmFullScreen'
]
delete Session.keys[key] for key in sessionKeys
Session.set("cmIsMultipleUpdate", false)
self.shouldUpdateQuickForm.set(false)
AutoForm.resetForm(Session.get('cmFormId') or defaultFormId)
# 如果用户操作为保存并新建 再次触发一次点击事件
if Session.get 'cmShowAgain'
keyPress = Session.get 'cmPressKey'
keyPress = '.' + keyPress.replace(/\s+/ig, '.')
Meteor.defer ()->
Session.set 'cmDoc', doc
$(keyPress).click()
Template.CreatorAutoformModals.events
'click button.btn-insert': (event,template) ->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
'click button.btn-update': (event,template)->
isMultipleUpdate = Session.get('cmIsMultipleUpdate')
targetIds = Session.get('cmTargetIds')
isMultipleChecked = template.$(".ckb-multiple-update").is(":checked")
formId = Session.get('cmFormId') or defaultFormId
if isMultipleUpdate and isMultipleChecked and targetIds?.length > 1
template.$("[name=_ids]").val(targetIds.join(","))
else
template.$("[name=_ids]").val(Session.get("cmDoc")._id)
template.$("##{formId}").submit()
'click button.btn-remove': (event,template)->
collection = Session.get 'cmCollection'
object_name = getObjectName(collection)
url = Meteor.absoluteUrl()
_id = Session.get('cmDoc')._id
url = Steedos.absoluteUrl "/api/odata/v4/#{Steedos.spaceId()}/#{object_name}/#{_id}"
$.ajax
type: "delete"
url: url
dataType: "json"
contentType: "application/json"
beforeSend: (request) ->
request.setRequestHeader('X-User-Id', Meteor.userId())
request.setRequestHeader('X-Auth-Token', Accounts._storedLoginToken())
success: (data) ->
$('#afModal').modal 'hide'
cmOnSuccessCallback?()
toastr?.success?(t("afModal_remove_suc"))
error: (jqXHR, textStatus, errorThrown) ->
console.log(errorThrown)
'click button.btn-update-and-create': (event,template)->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
Session.set 'cmShowAgain', true
'click button.btn-insert-and-create': (event,template)->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
Session.set 'cmShowAgain', true
'click .group-section-control': (event, template) ->
event.preventDefault()
event.stopPropagation()
$(event.currentTarget).closest('.group-section').toggleClass('slds-is-open')
helpers =
cmCollection: () ->
Session.get 'cmCollection'
cmOperation: () ->
Session.get 'cmOperation'
cmDoc: () ->
Session.get 'cmDoc'
cmButtonHtml: () ->
Session.get 'cmButtonHtml'
cmFields: () ->
Session.get 'cmFields'
cmOmitFields: () ->
Session.get 'cmOmitFields'
cmButtonContent: () ->
Session.get 'cmButtonContent'
cmCloseButtonContent: () ->
Session.get 'cmCloseButtonContent'
cmTitle: () ->
Session.get 'cmTitle'
cmButtonClasses: () ->
Session.get 'cmButtonClasses'
cmCloseButtonClasses: () ->
Session.get 'cmCloseButtonClasses'
cmPrompt: () ->
Session.get 'cmPrompt'
cmTemplate: () ->
Session.get('cmTemplate') || "bootstrap3-horizontal"
cmLabelClass: () ->
Session.get('cmLabelClass') || "col-sm-2"
cmInputColClass: () ->
Session.get('cmInputColClass') || "col-sm-10"
cmPlaceholder: () ->
Session.get 'cmPlaceholder'
cmFormId: () ->
Session.get('cmFormId') or defaultFormId
cmAutoformType: () ->
# cmAutoformType会影响传递给method的参数
if Session.get 'cmUseOdataApi'
return undefined
if Session.get 'cmMeteorMethod'
if Session.get("cmOperation") == "insert"
return 'method'
if Session.get('cmOperation') == "update"
return 'method-update'
else
Session.get 'cmOperation'
cmModalDialogClass: () ->
Session.get 'cmModalDialogClass'
cmModalContentClass: () ->
Session.get 'cmModalContentClass'
cmMeteorMethod: () ->
Session.get 'cmMeteorMethod'
title: () ->
StringTemplate.compile '{{{cmTitle}}}', helpers
prompt: () ->
StringTemplate.compile '{{{cmPrompt}}}', helpers
buttonContent: () ->
StringTemplate.compile '{{{cmButtonContent}}}', helpers
closeButtonContent: () ->
StringTemplate.compile '{{{cmCloseButtonContent}}}', helpers
cmShowRemoveButton: () ->
Session.get 'cmShowRemoveButton'
shouldUpdateQuickForm: () ->
return Template.instance()?.shouldUpdateQuickForm.get()
cmSaveAndInsert: ()->
Session.get 'cmSaveAndInsert'
cmIsMultipleUpdate: ()->
isMultiple = Session.get('cmIsMultipleUpdate') and Session.get('cmTargetIds')?.length > 1
return isMultiple
isUseMethod: ()->
if Session.get 'cmMeteorMethod'
return true
else
return false
cmTargetIds: ()->
Session.get('cmTargetIds')
schema: ()->
cmCollection = Session.get 'cmCollection'
return getSimpleSchema(cmCollection)
schemaFields: ()->
cmCollection = Session.get 'cmCollection'
keys = []
if cmCollection
schemaInstance = getSimpleSchema(cmCollection)
schema = schemaInstance._schema
firstLevelKeys = schemaInstance._firstLevelSchemaKeys
object_name = getObjectName cmCollection
permission_fields = _.clone(Creator.getFields(object_name))
unless permission_fields
permission_fields = []
if Session.get 'cmMeteorMethod'
permission_fields.push "_ids"
permission_fields.push "_object_name"
if Session.get 'cmFields'
cmFields = Session.get('cmFields').replace(/\ /g, "")
cmFields = cmFields.split(",")
firstLevelKeys = _.intersection(firstLevelKeys, cmFields)
if Session.get 'cmOmitFields'
firstLevelKeys = _.difference firstLevelKeys, [Session.get('cmOmitFields')]
_.each schema, (value, key) ->
if (_.indexOf firstLevelKeys, key) > -1
if !value.autoform?.omit
keys.push key
if keys.length == 1
finalFields =
grouplessFields: [keys]
return finalFields
hiddenFields = Creator.getHiddenFields(schema)
disabledFields = Creator.getDisabledFields(schema)
fieldGroups = []
fieldsForGroup = []
isSingle = Session.get "cmEditSingleField"
grouplessFields = []
grouplessFields = Creator.getFieldsWithNoGroup(schema)
grouplessFields = Creator.getFieldsInFirstLevel(firstLevelKeys, grouplessFields)
if permission_fields
grouplessFields = _.intersection(permission_fields, grouplessFields)
grouplessFields = Creator.getFieldsWithoutOmit(schema, grouplessFields)
grouplessFields = Creator.getFieldsForReorder(schema, grouplessFields, isSingle)
fieldGroupNames = Creator.getSortedFieldGroupNames(schema)
_.each fieldGroupNames, (fieldGroupName) ->
fieldsForGroup = Creator.getFieldsForGroup(schema, fieldGroupName)
fieldsForGroup = Creator.getFieldsInFirstLevel(firstLevelKeys, fieldsForGroup)
if permission_fields
fieldsForGroup = _.intersection(permission_fields, fieldsForGroup)
fieldsForGroup = Creator.getFieldsWithoutOmit(schema, fieldsForGroup)
fieldsForGroup = Creator.getFieldsForReorder(schema, fieldsForGroup, isSingle)
fieldGroups.push
name: fieldGroupName
fields: fieldsForGroup
finalFields =
grouplessFields: grouplessFields
groupFields: fieldGroups
hiddenFields: hiddenFields
disabledFields: disabledFields
console.log finalFields
return finalFields
isMobile: ()->
if $(window).width() < 767
return true
else
return false
isDisabled: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
return fields[key].disabled
disabledFieldsValue: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
defaultValue = fields[key].defaultValue
if _.isFunction(defaultValue)
defaultValue = defaultValue()
return defaultValue
getLabel: (key)->
return AutoForm.getLabelForField(key)
isSingle: ()->
return Session.get("cmEditSingleField")
isFullScreen: ()->
return Session.get("cmFullScreen")
hasInlineHelpText: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
return fields[key]?.inlineHelpText
Template.CreatorAutoformModals.helpers helpers
Template.CreatorFormField.helpers helpers
Template.CreatorFormField.onRendered ->
self = this
self.$(".has-inline-text").each ->
id = "info_" + $(".control-label", $(this)).attr("for")
html = """
<span class="help-info" id="#{id}">
<i class="ion ion-information-circled"></i>
</span>
"""
$(".control-label", $(this)).after(html)
self.$(".info-popover").each ->
_id = $("~ .form-group .help-info", $(this)).attr("id");
$(this).dxPopover
target: "#" + _id,
showEvent: "mouseenter",
hideEvent: "mouseleave",
position: "top",
width: 300,
animation: {
show: {
type: "pop",
from: {
scale: 0
},
to: {
scale: 1
}
},
hide: {
type: "fade",
from: 1,
to: 0
}
}
Template.CreatorAfModal.events
'click *': (e, t) ->
e.preventDefault()
html = t.$('*').html()
if t.data.collectionName
if t.data.operation == "update"
title = "编辑#{t.data.collectionName}"
else if t.data.operation == "insert"
title = "新建#{t.data.collectionName}"
else if t.data.operation == "remove"
title = "删除#{t.data.collectionName}"
else
title = html
#新增_ids虚拟字段,以实现条记录同时更新
fields = t.data.fields
if fields and fields.length
if fields.split(",").length == 1
Session.set "cmEditSingleField", true
fields = _.union(fields.split(","),"_ids","_object_name").join(",")
else
Session.set "cmEditSingleField", false
Session.set 'cmCollection', t.data.collection
Session.set 'cmOperation', t.data.operation
Session.set 'cmFields', fields
Session.set 'cmOmitFields', t.data.omitFields
Session.set 'cmButtonHtml', html
Session.set 'cmTitle', t.data.title or title
Session.set 'cmTemplate', t.data.template
Session.set 'cmLabelClass', t.data.labelClass or t.data['label-class']
Session.set 'cmInputColClass', t.data.inputColClass or t.data['input-col-class']
Session.set 'cmPlaceholder', if t.data.placeholder is true then 'schemaLabel' else ''
Session.set 'cmFormId', t.data.formId
Session.set 'cmMeteorMethod', t.data.meteormethod
Session.set 'cmModalDialogClass', t.data.dialogClass
Session.set 'cmModalContentClass', t.data.contentClass
Session.set 'cmShowRemoveButton', t.data.showRemoveButton or false
Session.set 'cmSaveAndInsert', t.data.saveAndInsert
Session.set 'cmUseOdataApi', t.data.useOdataApi
cmOnSuccessCallback = t.data.onSuccess
if not _.contains registeredAutoFormHooks, t.data.formId
# userId = Meteor.userId()
# cmCollection = Session.get 'cmCollection'
# object_name = getObjectName(cmCollection)
# console.log "afModal-object_name", object_name
# triggers = Creator.getObject(object_name).triggers
AutoForm.addHooks t.data.formId,
before:
method: (doc)->
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "before.insert"
trigger.todo.apply({object_name: object_name},[userId, doc])
else if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "before.update"
trigger.todo.apply({object_name: object_name},[userId, doc])
return doc
after:
method: (error, result)->
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "after.insert"
trigger.todo.apply({object_name: object_name},[userId, result])
else if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "after.update"
trigger.todo.apply({object_name: object_name},[userId, result])
return result
onSubmit: (insertDoc, updateDoc, currentDoc)->
console.log insertDoc
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
self = this
urls = []
if Session.get("cmOperation") == "insert"
data = insertDoc
type = "post"
urls.push Steedos.absoluteUrl("/api/odata/v4/#{Steedos.spaceId()}/#{object_name}")
delete data._object_name
if Session.get("cmOperation") == "update"
if Session.get("cmMeteorMethod")
if updateDoc["$set"]
_id = updateDoc["$set"]._ids || Session.get("cmDoc")._id
else
_id = Session.get("cmDoc")._id
else
_id = Session.get("cmDoc")._id
if updateDoc["$set"]
delete updateDoc["$set"]._ids
delete updateDoc["$set"]._object_name
if updateDoc["$unset"]
delete updateDoc["$unset"]._ids
delete updateDoc["$unset"]._object_name
# insertDoc里面的值是最全最精确的
updateDoc["$set"] = insertDoc
_ids = _id.split(",")
_.each _ids, (id)->
urls.push Steedos.absoluteUrl("/api/odata/v4/#{Steedos.spaceId()}/#{object_name}/#{id}")
data = updateDoc
type = "put"
console.log "begin......", data
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and (trigger.when == "before.insert" or trigger.when == "after.insert")
trigger.todo.apply({object_name: object_name},[userId, data])
if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and (trigger.when == "before.update" or trigger.when == "after.update")
trigger.todo.apply({object_name: object_name},[userId, data])
_.each urls, (url)->
oDataOperation.call(self, type, url, data, object_name)
return false
onSuccess: (operation,result)->
$('#afModal').modal 'hide'
# if result.type == "post"
# app_id = Session.get("app_id")
# object_name = result.object_name
# record_id = result._id
# url = "/app/#{app_id}/#{object_name}/view/#{record_id}"
# FlowRouter.go url
onError: (operation,error) ->
console.error error
if error.reason
toastr?.error?(TAPi18n.__(error.reason))
else if error.message
toastr?.error?(TAPi18n.__(error.message))
else
toastr?.error?(error)
registeredAutoFormHooks.push t.data.formId
if t.data.doc
Session.set 'cmDoc', collectionObj(t.data.collection).findOne _id: t.data.doc
if t.data.showRemoveButton
t.data.buttonContent = false
if t.data.buttonContent or t.data.buttonContent is false
Session.set 'cmButtonContent', t.data.buttonContent
else if t.data.operation == 'insert'
Session.set 'cmButtonContent', 'Create'
else if t.data.operation == 'update'
Session.set 'cmButtonContent', 'Update'
else if t.data.operation == 'remove'
Session.set 'cmButtonContent', 'Delete'
if t.data.buttonClasses
Session.set 'cmButtonClasses', t.data.buttonClasses
else if t.data.operation == 'remove'
Session.set 'cmButtonClasses', 'btn btn-danger'
else
Session.set 'cmButtonClasses', 'btn btn-primary'
Session.set 'cmCloseButtonContent', t.data.closeButtonContent or ''
Session.set 'cmCloseButtonClasses', t.data.closeButtonClasses or 'btn btn-danger'
if t.data.prompt
Session.set 'cmPrompt', t.data.prompt
else if t.data.operation == 'remove'
Session.set 'cmPrompt', 'Are you sure?'
else
Session.set 'cmPrompt', ''
# 记录本次点击事件的className
keyClassName = e.currentTarget.className
Session.set 'cmPressKey', keyClassName
# 上次的操作是保存并新建,清空 cmDoc,并设置 cmOperation为 insert
if Session.get 'cmShowAgain'
console.log "t.data.operation", t.data.operation
if t.data.operation == 'update'
Session.set 'cmDoc', undefined
Session.set 'cmOperation', 'insert'
# 重置 cmShowAgain
Session.set 'cmShowAgain', false
$('#afModal').data('bs.modal').options.backdrop = t.data.backdrop or true
$('#afModal').modal 'show'
Template.CreatorAutoformModals.onCreated ->
self = this;
self.shouldUpdateQuickForm = new ReactiveVar(true);
Template.CreatorAutoformModals.onDestroyed ->
Session.set 'cmIsMultipleUpdate', false
Session.set 'cmTargetIds', null
| 43261 | registeredAutoFormHooks = ['cmForm']
defaultFormId = 'cmForm'
cmOnSuccessCallback = null
AutoForm.addHooks 'cmForm',
onSuccess: ->
$('#afModal').modal('hide')
onError: (operation,error) ->
console.error error
if error.reason
toastr?.error?(TAPi18n.__(error.reason))
else if error.message
toastr?.error?(TAPi18n.__(error.message))
else
toastr?.error?(error)
collectionObj = (name) ->
name.split('.').reduce (o, x) ->
o[x]
, window
oDataOperation = (type, url, data, object_name)->
self = this
$.ajax
type: type
url: url
data: JSON.stringify(data)
dataType: 'json'
contentType: "application/json"
processData: false
beforeSend: (request) ->
request.setRequestHeader 'X-User-Id', Meteor.userId()
request.setRequestHeader 'X-Auth-Token', Accounts._storedLoginToken()
success: (data) ->
if Session.get("cmOperation") == "insert"
_id = data.value[0]._id
else if Session.get("cmOperation") == "update"
_id = data._id
# console.log _id
data = {_id: _id}
data.type = type
data.object_name = object_name
self.done(null, data)
error: (jqXHR, textStatus, errorThrown) ->
# console.log(errorThrown);
self.done(jqXHR.responseJSON.error.message)
getObjectName = (collectionName)->
return collectionName.replace(/Creator.Collections./, "")
getSimpleSchema = (collectionName)->
if collectionName
object_name = getObjectName collectionName
object_fields = Creator.getObject(object_name).fields
_fields = Creator.getFields(object_name)
schema = collectionObj(collectionName).simpleSchema()._schema
fields = Session.get("cmFields")
final_schema = {}
if fields
fields = fields.replace(/\ /g, "").split(",")
_.each fields, (field)->
if object_fields[field]?.type == "grid"
table_fields = _.filter _fields, (f)->
reg = new RegExp("^(" + field + ")(\\.\\$\\.){1}\\w+")
return reg.test(f)
_.each table_fields, (f)->
_.extend(final_schema, _.pick(schema, f))
obj = _.pick(schema, field, field + ".$")
_.extend(final_schema, obj)
else
final_schema = schema
if Session.get 'cmMeteorMethod'
#新增_ids虚拟字段,以实现条记录同时更新
final_schema._ids =
type: String
optional: true
autoform:
type: "hidden"
#新增_object_name虚拟字段,以让后台method知道更新哪个表
final_schema._object_name =
type: String
optional: true
autoform:
type: "hidden"
defaultValue: ->
return getObjectName collectionName
return new SimpleSchema(final_schema)
Template.CreatorAutoformModals.rendered = ->
self = this;
$('#afModal').modal(show: false)
onEscKey = (e) ->
if e.keyCode == 27
$('#afModal').modal 'hide'
$('#afModal').on 'show.bs.modal', ->
self.shouldUpdateQuickForm.set(true)
operation = Session.get 'cmOperation'
if operation == 'update'
AutoForm.resetForm(Session.get('cmFormId') or defaultFormId)
$('#afModal').on 'shown.bs.modal', ->
if Steedos?.setModalMaxHeight
Steedos.setModalMaxHeight()
$(window).bind 'keyup', onEscKey
setTimeout ->
$("#afModal .form-control:first").focus()
, 100
$('#afModal').on 'hidden.bs.modal', ->
$(window).unbind 'keyup', onEscKey
doc = Session.get 'cmDoc'
sessionKeys = [
'cmCollection',
'cmOperation',
'cmDoc',
'cmButtonHtml',
'cmFields',
'cmOmitFields',
'cmButtonContent',
'cmTitle',
'cmButtonClasses',
'cmPrompt',
'cmTemplate',
'cmLabelClass',
'cmInputColClass',
'cmPlaceholder',
'cmFormId',
'cmAutoformType',
'cmMeteorMethod',
'cmCloseButtonContent',
'cmCloseButtonClasses',
'cmShowRemoveButton',
'cmIsMultipleUpdate',
'cmTargetIds',
'cmEditSingleField',
'cmFullScreen'
]
delete Session.keys[key] for key in sessionKeys
Session.set("cmIsMultipleUpdate", false)
self.shouldUpdateQuickForm.set(false)
AutoForm.resetForm(Session.get('cmFormId') or defaultFormId)
# 如果用户操作为保存并新建 再次触发一次点击事件
if Session.get 'cmShowAgain'
keyPress = Session.get 'cmPressKey'
keyPress = '.' + keyPress.replace(/\s+/ig, '.')
Meteor.defer ()->
Session.set 'cmDoc', doc
$(keyPress).click()
Template.CreatorAutoformModals.events
'click button.btn-insert': (event,template) ->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
'click button.btn-update': (event,template)->
isMultipleUpdate = Session.get('cmIsMultipleUpdate')
targetIds = Session.get('cmTargetIds')
isMultipleChecked = template.$(".ckb-multiple-update").is(":checked")
formId = Session.get('cmFormId') or defaultFormId
if isMultipleUpdate and isMultipleChecked and targetIds?.length > 1
template.$("[name=_ids]").val(targetIds.join(","))
else
template.$("[name=_ids]").val(Session.get("cmDoc")._id)
template.$("##{formId}").submit()
'click button.btn-remove': (event,template)->
collection = Session.get 'cmCollection'
object_name = getObjectName(collection)
url = Meteor.absoluteUrl()
_id = Session.get('cmDoc')._id
url = Steedos.absoluteUrl "/api/odata/v4/#{Steedos.spaceId()}/#{object_name}/#{_id}"
$.ajax
type: "delete"
url: url
dataType: "json"
contentType: "application/json"
beforeSend: (request) ->
request.setRequestHeader('X-User-Id', Meteor.userId())
request.setRequestHeader('X-Auth-Token', Accounts._<KEY>())
success: (data) ->
$('#afModal').modal 'hide'
cmOnSuccessCallback?()
toastr?.success?(t("afModal_remove_suc"))
error: (jqXHR, textStatus, errorThrown) ->
console.log(errorThrown)
'click button.btn-update-and-create': (event,template)->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
Session.set 'cmShowAgain', true
'click button.btn-insert-and-create': (event,template)->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
Session.set 'cmShowAgain', true
'click .group-section-control': (event, template) ->
event.preventDefault()
event.stopPropagation()
$(event.currentTarget).closest('.group-section').toggleClass('slds-is-open')
helpers =
cmCollection: () ->
Session.get 'cmCollection'
cmOperation: () ->
Session.get 'cmOperation'
cmDoc: () ->
Session.get 'cmDoc'
cmButtonHtml: () ->
Session.get 'cmButtonHtml'
cmFields: () ->
Session.get 'cmFields'
cmOmitFields: () ->
Session.get 'cmOmitFields'
cmButtonContent: () ->
Session.get 'cmButtonContent'
cmCloseButtonContent: () ->
Session.get 'cmCloseButtonContent'
cmTitle: () ->
Session.get 'cmTitle'
cmButtonClasses: () ->
Session.get 'cmButtonClasses'
cmCloseButtonClasses: () ->
Session.get 'cmCloseButtonClasses'
cmPrompt: () ->
Session.get 'cmPrompt'
cmTemplate: () ->
Session.get('cmTemplate') || "bootstrap3-horizontal"
cmLabelClass: () ->
Session.get('cmLabelClass') || "col-sm-2"
cmInputColClass: () ->
Session.get('cmInputColClass') || "col-sm-10"
cmPlaceholder: () ->
Session.get 'cmPlaceholder'
cmFormId: () ->
Session.get('cmFormId') or defaultFormId
cmAutoformType: () ->
# cmAutoformType会影响传递给method的参数
if Session.get 'cmUseOdataApi'
return undefined
if Session.get 'cmMeteorMethod'
if Session.get("cmOperation") == "insert"
return 'method'
if Session.get('cmOperation') == "update"
return 'method-update'
else
Session.get 'cmOperation'
cmModalDialogClass: () ->
Session.get 'cmModalDialogClass'
cmModalContentClass: () ->
Session.get 'cmModalContentClass'
cmMeteorMethod: () ->
Session.get 'cmMeteorMethod'
title: () ->
StringTemplate.compile '{{{cmTitle}}}', helpers
prompt: () ->
StringTemplate.compile '{{{cmPrompt}}}', helpers
buttonContent: () ->
StringTemplate.compile '{{{cmButtonContent}}}', helpers
closeButtonContent: () ->
StringTemplate.compile '{{{cmCloseButtonContent}}}', helpers
cmShowRemoveButton: () ->
Session.get 'cmShowRemoveButton'
shouldUpdateQuickForm: () ->
return Template.instance()?.shouldUpdateQuickForm.get()
cmSaveAndInsert: ()->
Session.get 'cmSaveAndInsert'
cmIsMultipleUpdate: ()->
isMultiple = Session.get('cmIsMultipleUpdate') and Session.get('cmTargetIds')?.length > 1
return isMultiple
isUseMethod: ()->
if Session.get 'cmMeteorMethod'
return true
else
return false
cmTargetIds: ()->
Session.get('cmTargetIds')
schema: ()->
cmCollection = Session.get 'cmCollection'
return getSimpleSchema(cmCollection)
schemaFields: ()->
cmCollection = Session.get 'cmCollection'
keys = []
if cmCollection
schemaInstance = getSimpleSchema(cmCollection)
schema = schemaInstance._schema
firstLevelKeys = schemaInstance._firstLevelSchemaKeys
object_name = getObjectName cmCollection
permission_fields = _.clone(Creator.getFields(object_name))
unless permission_fields
permission_fields = []
if Session.get 'cmMeteorMethod'
permission_fields.push "_ids"
permission_fields.push "_object_name"
if Session.get 'cmFields'
cmFields = Session.get('cmFields').replace(/\ /g, "")
cmFields = cmFields.split(",")
firstLevelKeys = _.intersection(firstLevelKeys, cmFields)
if Session.get 'cmOmitFields'
firstLevelKeys = _.difference firstLevelKeys, [Session.get('cmOmitFields')]
_.each schema, (value, key) ->
if (_.indexOf firstLevelKeys, key) > -1
if !value.autoform?.omit
keys.push key
if keys.length == 1
finalFields =
grouplessFields: [keys]
return finalFields
hiddenFields = Creator.getHiddenFields(schema)
disabledFields = Creator.getDisabledFields(schema)
fieldGroups = []
fieldsForGroup = []
isSingle = Session.get "cmEditSingleField"
grouplessFields = []
grouplessFields = Creator.getFieldsWithNoGroup(schema)
grouplessFields = Creator.getFieldsInFirstLevel(firstLevelKeys, grouplessFields)
if permission_fields
grouplessFields = _.intersection(permission_fields, grouplessFields)
grouplessFields = Creator.getFieldsWithoutOmit(schema, grouplessFields)
grouplessFields = Creator.getFieldsForReorder(schema, grouplessFields, isSingle)
fieldGroupNames = Creator.getSortedFieldGroupNames(schema)
_.each fieldGroupNames, (fieldGroupName) ->
fieldsForGroup = Creator.getFieldsForGroup(schema, fieldGroupName)
fieldsForGroup = Creator.getFieldsInFirstLevel(firstLevelKeys, fieldsForGroup)
if permission_fields
fieldsForGroup = _.intersection(permission_fields, fieldsForGroup)
fieldsForGroup = Creator.getFieldsWithoutOmit(schema, fieldsForGroup)
fieldsForGroup = Creator.getFieldsForReorder(schema, fieldsForGroup, isSingle)
fieldGroups.push
name: fieldGroupName
fields: fieldsForGroup
finalFields =
grouplessFields: grouplessFields
groupFields: fieldGroups
hiddenFields: hiddenFields
disabledFields: disabledFields
console.log finalFields
return finalFields
isMobile: ()->
if $(window).width() < 767
return true
else
return false
isDisabled: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
return fields[key].disabled
disabledFieldsValue: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
defaultValue = fields[key].defaultValue
if _.isFunction(defaultValue)
defaultValue = defaultValue()
return defaultValue
getLabel: (key)->
return AutoForm.getLabelForField(key)
isSingle: ()->
return Session.get("cmEditSingleField")
isFullScreen: ()->
return Session.get("cmFullScreen")
hasInlineHelpText: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
return fields[key]?.inlineHelpText
Template.CreatorAutoformModals.helpers helpers
Template.CreatorFormField.helpers helpers
Template.CreatorFormField.onRendered ->
self = this
self.$(".has-inline-text").each ->
id = "info_" + $(".control-label", $(this)).attr("for")
html = """
<span class="help-info" id="#{id}">
<i class="ion ion-information-circled"></i>
</span>
"""
$(".control-label", $(this)).after(html)
self.$(".info-popover").each ->
_id = $("~ .form-group .help-info", $(this)).attr("id");
$(this).dxPopover
target: "#" + _id,
showEvent: "mouseenter",
hideEvent: "mouseleave",
position: "top",
width: 300,
animation: {
show: {
type: "pop",
from: {
scale: 0
},
to: {
scale: 1
}
},
hide: {
type: "fade",
from: 1,
to: 0
}
}
Template.CreatorAfModal.events
'click *': (e, t) ->
e.preventDefault()
html = t.$('*').html()
if t.data.collectionName
if t.data.operation == "update"
title = "编辑#{t.data.collectionName}"
else if t.data.operation == "insert"
title = "新建#{t.data.collectionName}"
else if t.data.operation == "remove"
title = "删除#{t.data.collectionName}"
else
title = html
#新增_ids虚拟字段,以实现条记录同时更新
fields = t.data.fields
if fields and fields.length
if fields.split(",").length == 1
Session.set "cmEditSingleField", true
fields = _.union(fields.split(","),"_ids","_object_name").join(",")
else
Session.set "cmEditSingleField", false
Session.set 'cmCollection', t.data.collection
Session.set 'cmOperation', t.data.operation
Session.set 'cmFields', fields
Session.set 'cmOmitFields', t.data.omitFields
Session.set 'cmButtonHtml', html
Session.set 'cmTitle', t.data.title or title
Session.set 'cmTemplate', t.data.template
Session.set 'cmLabelClass', t.data.labelClass or t.data['label-class']
Session.set 'cmInputColClass', t.data.inputColClass or t.data['input-col-class']
Session.set 'cmPlaceholder', if t.data.placeholder is true then 'schemaLabel' else ''
Session.set 'cmFormId', t.data.formId
Session.set 'cmMeteorMethod', t.data.meteormethod
Session.set 'cmModalDialogClass', t.data.dialogClass
Session.set 'cmModalContentClass', t.data.contentClass
Session.set 'cmShowRemoveButton', t.data.showRemoveButton or false
Session.set 'cmSaveAndInsert', t.data.saveAndInsert
Session.set 'cmUseOdataApi', t.data.useOdataApi
cmOnSuccessCallback = t.data.onSuccess
if not _.contains registeredAutoFormHooks, t.data.formId
# userId = Meteor.userId()
# cmCollection = Session.get 'cmCollection'
# object_name = getObjectName(cmCollection)
# console.log "afModal-object_name", object_name
# triggers = Creator.getObject(object_name).triggers
AutoForm.addHooks t.data.formId,
before:
method: (doc)->
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "before.insert"
trigger.todo.apply({object_name: object_name},[userId, doc])
else if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "before.update"
trigger.todo.apply({object_name: object_name},[userId, doc])
return doc
after:
method: (error, result)->
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "after.insert"
trigger.todo.apply({object_name: object_name},[userId, result])
else if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "after.update"
trigger.todo.apply({object_name: object_name},[userId, result])
return result
onSubmit: (insertDoc, updateDoc, currentDoc)->
console.log insertDoc
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
self = this
urls = []
if Session.get("cmOperation") == "insert"
data = insertDoc
type = "post"
urls.push Steedos.absoluteUrl("/api/odata/v4/#{Steedos.spaceId()}/#{object_name}")
delete data._object_name
if Session.get("cmOperation") == "update"
if Session.get("cmMeteorMethod")
if updateDoc["$set"]
_id = updateDoc["$set"]._ids || Session.get("cmDoc")._id
else
_id = Session.get("cmDoc")._id
else
_id = Session.get("cmDoc")._id
if updateDoc["$set"]
delete updateDoc["$set"]._ids
delete updateDoc["$set"]._object_name
if updateDoc["$unset"]
delete updateDoc["$unset"]._ids
delete updateDoc["$unset"]._object_name
# insertDoc里面的值是最全最精确的
updateDoc["$set"] = insertDoc
_ids = _id.split(",")
_.each _ids, (id)->
urls.push Steedos.absoluteUrl("/api/odata/v4/#{Steedos.spaceId()}/#{object_name}/#{id}")
data = updateDoc
type = "put"
console.log "begin......", data
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and (trigger.when == "before.insert" or trigger.when == "after.insert")
trigger.todo.apply({object_name: object_name},[userId, data])
if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and (trigger.when == "before.update" or trigger.when == "after.update")
trigger.todo.apply({object_name: object_name},[userId, data])
_.each urls, (url)->
oDataOperation.call(self, type, url, data, object_name)
return false
onSuccess: (operation,result)->
$('#afModal').modal 'hide'
# if result.type == "post"
# app_id = Session.get("app_id")
# object_name = result.object_name
# record_id = result._id
# url = "/app/#{app_id}/#{object_name}/view/#{record_id}"
# FlowRouter.go url
onError: (operation,error) ->
console.error error
if error.reason
toastr?.error?(TAPi18n.__(error.reason))
else if error.message
toastr?.error?(TAPi18n.__(error.message))
else
toastr?.error?(error)
registeredAutoFormHooks.push t.data.formId
if t.data.doc
Session.set 'cmDoc', collectionObj(t.data.collection).findOne _id: t.data.doc
if t.data.showRemoveButton
t.data.buttonContent = false
if t.data.buttonContent or t.data.buttonContent is false
Session.set 'cmButtonContent', t.data.buttonContent
else if t.data.operation == 'insert'
Session.set 'cmButtonContent', 'Create'
else if t.data.operation == 'update'
Session.set 'cmButtonContent', 'Update'
else if t.data.operation == 'remove'
Session.set 'cmButtonContent', 'Delete'
if t.data.buttonClasses
Session.set 'cmButtonClasses', t.data.buttonClasses
else if t.data.operation == 'remove'
Session.set 'cmButtonClasses', 'btn btn-danger'
else
Session.set 'cmButtonClasses', 'btn btn-primary'
Session.set 'cmCloseButtonContent', t.data.closeButtonContent or ''
Session.set 'cmCloseButtonClasses', t.data.closeButtonClasses or 'btn btn-danger'
if t.data.prompt
Session.set 'cmPrompt', t.data.prompt
else if t.data.operation == 'remove'
Session.set 'cmPrompt', 'Are you sure?'
else
Session.set 'cmPrompt', ''
# 记录本次点击事件的className
keyClassName = e.currentTarget.className
Session.set 'cmPressKey', keyClassName
# 上次的操作是保存并新建,清空 cmDoc,并设置 cmOperation为 insert
if Session.get 'cmShowAgain'
console.log "t.data.operation", t.data.operation
if t.data.operation == 'update'
Session.set 'cmDoc', undefined
Session.set 'cmOperation', 'insert'
# 重置 cmShowAgain
Session.set 'cmShowAgain', false
$('#afModal').data('bs.modal').options.backdrop = t.data.backdrop or true
$('#afModal').modal 'show'
Template.CreatorAutoformModals.onCreated ->
self = this;
self.shouldUpdateQuickForm = new ReactiveVar(true);
Template.CreatorAutoformModals.onDestroyed ->
Session.set 'cmIsMultipleUpdate', false
Session.set 'cmTargetIds', null
| true | registeredAutoFormHooks = ['cmForm']
defaultFormId = 'cmForm'
cmOnSuccessCallback = null
AutoForm.addHooks 'cmForm',
onSuccess: ->
$('#afModal').modal('hide')
onError: (operation,error) ->
console.error error
if error.reason
toastr?.error?(TAPi18n.__(error.reason))
else if error.message
toastr?.error?(TAPi18n.__(error.message))
else
toastr?.error?(error)
collectionObj = (name) ->
name.split('.').reduce (o, x) ->
o[x]
, window
oDataOperation = (type, url, data, object_name)->
self = this
$.ajax
type: type
url: url
data: JSON.stringify(data)
dataType: 'json'
contentType: "application/json"
processData: false
beforeSend: (request) ->
request.setRequestHeader 'X-User-Id', Meteor.userId()
request.setRequestHeader 'X-Auth-Token', Accounts._storedLoginToken()
success: (data) ->
if Session.get("cmOperation") == "insert"
_id = data.value[0]._id
else if Session.get("cmOperation") == "update"
_id = data._id
# console.log _id
data = {_id: _id}
data.type = type
data.object_name = object_name
self.done(null, data)
error: (jqXHR, textStatus, errorThrown) ->
# console.log(errorThrown);
self.done(jqXHR.responseJSON.error.message)
getObjectName = (collectionName)->
return collectionName.replace(/Creator.Collections./, "")
getSimpleSchema = (collectionName)->
if collectionName
object_name = getObjectName collectionName
object_fields = Creator.getObject(object_name).fields
_fields = Creator.getFields(object_name)
schema = collectionObj(collectionName).simpleSchema()._schema
fields = Session.get("cmFields")
final_schema = {}
if fields
fields = fields.replace(/\ /g, "").split(",")
_.each fields, (field)->
if object_fields[field]?.type == "grid"
table_fields = _.filter _fields, (f)->
reg = new RegExp("^(" + field + ")(\\.\\$\\.){1}\\w+")
return reg.test(f)
_.each table_fields, (f)->
_.extend(final_schema, _.pick(schema, f))
obj = _.pick(schema, field, field + ".$")
_.extend(final_schema, obj)
else
final_schema = schema
if Session.get 'cmMeteorMethod'
#新增_ids虚拟字段,以实现条记录同时更新
final_schema._ids =
type: String
optional: true
autoform:
type: "hidden"
#新增_object_name虚拟字段,以让后台method知道更新哪个表
final_schema._object_name =
type: String
optional: true
autoform:
type: "hidden"
defaultValue: ->
return getObjectName collectionName
return new SimpleSchema(final_schema)
Template.CreatorAutoformModals.rendered = ->
self = this;
$('#afModal').modal(show: false)
onEscKey = (e) ->
if e.keyCode == 27
$('#afModal').modal 'hide'
$('#afModal').on 'show.bs.modal', ->
self.shouldUpdateQuickForm.set(true)
operation = Session.get 'cmOperation'
if operation == 'update'
AutoForm.resetForm(Session.get('cmFormId') or defaultFormId)
$('#afModal').on 'shown.bs.modal', ->
if Steedos?.setModalMaxHeight
Steedos.setModalMaxHeight()
$(window).bind 'keyup', onEscKey
setTimeout ->
$("#afModal .form-control:first").focus()
, 100
$('#afModal').on 'hidden.bs.modal', ->
$(window).unbind 'keyup', onEscKey
doc = Session.get 'cmDoc'
sessionKeys = [
'cmCollection',
'cmOperation',
'cmDoc',
'cmButtonHtml',
'cmFields',
'cmOmitFields',
'cmButtonContent',
'cmTitle',
'cmButtonClasses',
'cmPrompt',
'cmTemplate',
'cmLabelClass',
'cmInputColClass',
'cmPlaceholder',
'cmFormId',
'cmAutoformType',
'cmMeteorMethod',
'cmCloseButtonContent',
'cmCloseButtonClasses',
'cmShowRemoveButton',
'cmIsMultipleUpdate',
'cmTargetIds',
'cmEditSingleField',
'cmFullScreen'
]
delete Session.keys[key] for key in sessionKeys
Session.set("cmIsMultipleUpdate", false)
self.shouldUpdateQuickForm.set(false)
AutoForm.resetForm(Session.get('cmFormId') or defaultFormId)
# 如果用户操作为保存并新建 再次触发一次点击事件
if Session.get 'cmShowAgain'
keyPress = Session.get 'cmPressKey'
keyPress = '.' + keyPress.replace(/\s+/ig, '.')
Meteor.defer ()->
Session.set 'cmDoc', doc
$(keyPress).click()
Template.CreatorAutoformModals.events
'click button.btn-insert': (event,template) ->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
'click button.btn-update': (event,template)->
isMultipleUpdate = Session.get('cmIsMultipleUpdate')
targetIds = Session.get('cmTargetIds')
isMultipleChecked = template.$(".ckb-multiple-update").is(":checked")
formId = Session.get('cmFormId') or defaultFormId
if isMultipleUpdate and isMultipleChecked and targetIds?.length > 1
template.$("[name=_ids]").val(targetIds.join(","))
else
template.$("[name=_ids]").val(Session.get("cmDoc")._id)
template.$("##{formId}").submit()
'click button.btn-remove': (event,template)->
collection = Session.get 'cmCollection'
object_name = getObjectName(collection)
url = Meteor.absoluteUrl()
_id = Session.get('cmDoc')._id
url = Steedos.absoluteUrl "/api/odata/v4/#{Steedos.spaceId()}/#{object_name}/#{_id}"
$.ajax
type: "delete"
url: url
dataType: "json"
contentType: "application/json"
beforeSend: (request) ->
request.setRequestHeader('X-User-Id', Meteor.userId())
request.setRequestHeader('X-Auth-Token', Accounts._PI:KEY:<KEY>END_PI())
success: (data) ->
$('#afModal').modal 'hide'
cmOnSuccessCallback?()
toastr?.success?(t("afModal_remove_suc"))
error: (jqXHR, textStatus, errorThrown) ->
console.log(errorThrown)
'click button.btn-update-and-create': (event,template)->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
Session.set 'cmShowAgain', true
'click button.btn-insert-and-create': (event,template)->
formId = Session.get('cmFormId') or defaultFormId
$("#"+formId, "#afModal").submit()
Session.set 'cmShowAgain', true
'click .group-section-control': (event, template) ->
event.preventDefault()
event.stopPropagation()
$(event.currentTarget).closest('.group-section').toggleClass('slds-is-open')
helpers =
cmCollection: () ->
Session.get 'cmCollection'
cmOperation: () ->
Session.get 'cmOperation'
cmDoc: () ->
Session.get 'cmDoc'
cmButtonHtml: () ->
Session.get 'cmButtonHtml'
cmFields: () ->
Session.get 'cmFields'
cmOmitFields: () ->
Session.get 'cmOmitFields'
cmButtonContent: () ->
Session.get 'cmButtonContent'
cmCloseButtonContent: () ->
Session.get 'cmCloseButtonContent'
cmTitle: () ->
Session.get 'cmTitle'
cmButtonClasses: () ->
Session.get 'cmButtonClasses'
cmCloseButtonClasses: () ->
Session.get 'cmCloseButtonClasses'
cmPrompt: () ->
Session.get 'cmPrompt'
cmTemplate: () ->
Session.get('cmTemplate') || "bootstrap3-horizontal"
cmLabelClass: () ->
Session.get('cmLabelClass') || "col-sm-2"
cmInputColClass: () ->
Session.get('cmInputColClass') || "col-sm-10"
cmPlaceholder: () ->
Session.get 'cmPlaceholder'
cmFormId: () ->
Session.get('cmFormId') or defaultFormId
cmAutoformType: () ->
# cmAutoformType会影响传递给method的参数
if Session.get 'cmUseOdataApi'
return undefined
if Session.get 'cmMeteorMethod'
if Session.get("cmOperation") == "insert"
return 'method'
if Session.get('cmOperation') == "update"
return 'method-update'
else
Session.get 'cmOperation'
cmModalDialogClass: () ->
Session.get 'cmModalDialogClass'
cmModalContentClass: () ->
Session.get 'cmModalContentClass'
cmMeteorMethod: () ->
Session.get 'cmMeteorMethod'
title: () ->
StringTemplate.compile '{{{cmTitle}}}', helpers
prompt: () ->
StringTemplate.compile '{{{cmPrompt}}}', helpers
buttonContent: () ->
StringTemplate.compile '{{{cmButtonContent}}}', helpers
closeButtonContent: () ->
StringTemplate.compile '{{{cmCloseButtonContent}}}', helpers
cmShowRemoveButton: () ->
Session.get 'cmShowRemoveButton'
shouldUpdateQuickForm: () ->
return Template.instance()?.shouldUpdateQuickForm.get()
cmSaveAndInsert: ()->
Session.get 'cmSaveAndInsert'
cmIsMultipleUpdate: ()->
isMultiple = Session.get('cmIsMultipleUpdate') and Session.get('cmTargetIds')?.length > 1
return isMultiple
isUseMethod: ()->
if Session.get 'cmMeteorMethod'
return true
else
return false
cmTargetIds: ()->
Session.get('cmTargetIds')
schema: ()->
cmCollection = Session.get 'cmCollection'
return getSimpleSchema(cmCollection)
schemaFields: ()->
cmCollection = Session.get 'cmCollection'
keys = []
if cmCollection
schemaInstance = getSimpleSchema(cmCollection)
schema = schemaInstance._schema
firstLevelKeys = schemaInstance._firstLevelSchemaKeys
object_name = getObjectName cmCollection
permission_fields = _.clone(Creator.getFields(object_name))
unless permission_fields
permission_fields = []
if Session.get 'cmMeteorMethod'
permission_fields.push "_ids"
permission_fields.push "_object_name"
if Session.get 'cmFields'
cmFields = Session.get('cmFields').replace(/\ /g, "")
cmFields = cmFields.split(",")
firstLevelKeys = _.intersection(firstLevelKeys, cmFields)
if Session.get 'cmOmitFields'
firstLevelKeys = _.difference firstLevelKeys, [Session.get('cmOmitFields')]
_.each schema, (value, key) ->
if (_.indexOf firstLevelKeys, key) > -1
if !value.autoform?.omit
keys.push key
if keys.length == 1
finalFields =
grouplessFields: [keys]
return finalFields
hiddenFields = Creator.getHiddenFields(schema)
disabledFields = Creator.getDisabledFields(schema)
fieldGroups = []
fieldsForGroup = []
isSingle = Session.get "cmEditSingleField"
grouplessFields = []
grouplessFields = Creator.getFieldsWithNoGroup(schema)
grouplessFields = Creator.getFieldsInFirstLevel(firstLevelKeys, grouplessFields)
if permission_fields
grouplessFields = _.intersection(permission_fields, grouplessFields)
grouplessFields = Creator.getFieldsWithoutOmit(schema, grouplessFields)
grouplessFields = Creator.getFieldsForReorder(schema, grouplessFields, isSingle)
fieldGroupNames = Creator.getSortedFieldGroupNames(schema)
_.each fieldGroupNames, (fieldGroupName) ->
fieldsForGroup = Creator.getFieldsForGroup(schema, fieldGroupName)
fieldsForGroup = Creator.getFieldsInFirstLevel(firstLevelKeys, fieldsForGroup)
if permission_fields
fieldsForGroup = _.intersection(permission_fields, fieldsForGroup)
fieldsForGroup = Creator.getFieldsWithoutOmit(schema, fieldsForGroup)
fieldsForGroup = Creator.getFieldsForReorder(schema, fieldsForGroup, isSingle)
fieldGroups.push
name: fieldGroupName
fields: fieldsForGroup
finalFields =
grouplessFields: grouplessFields
groupFields: fieldGroups
hiddenFields: hiddenFields
disabledFields: disabledFields
console.log finalFields
return finalFields
isMobile: ()->
if $(window).width() < 767
return true
else
return false
isDisabled: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
return fields[key].disabled
disabledFieldsValue: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
defaultValue = fields[key].defaultValue
if _.isFunction(defaultValue)
defaultValue = defaultValue()
return defaultValue
getLabel: (key)->
return AutoForm.getLabelForField(key)
isSingle: ()->
return Session.get("cmEditSingleField")
isFullScreen: ()->
return Session.get("cmFullScreen")
hasInlineHelpText: (key)->
cmCollection = Session.get 'cmCollection'
if cmCollection
object_name = getObjectName(cmCollection)
fields = Creator.getObject(object_name).fields
return fields[key]?.inlineHelpText
Template.CreatorAutoformModals.helpers helpers
Template.CreatorFormField.helpers helpers
Template.CreatorFormField.onRendered ->
self = this
self.$(".has-inline-text").each ->
id = "info_" + $(".control-label", $(this)).attr("for")
html = """
<span class="help-info" id="#{id}">
<i class="ion ion-information-circled"></i>
</span>
"""
$(".control-label", $(this)).after(html)
self.$(".info-popover").each ->
_id = $("~ .form-group .help-info", $(this)).attr("id");
$(this).dxPopover
target: "#" + _id,
showEvent: "mouseenter",
hideEvent: "mouseleave",
position: "top",
width: 300,
animation: {
show: {
type: "pop",
from: {
scale: 0
},
to: {
scale: 1
}
},
hide: {
type: "fade",
from: 1,
to: 0
}
}
Template.CreatorAfModal.events
'click *': (e, t) ->
e.preventDefault()
html = t.$('*').html()
if t.data.collectionName
if t.data.operation == "update"
title = "编辑#{t.data.collectionName}"
else if t.data.operation == "insert"
title = "新建#{t.data.collectionName}"
else if t.data.operation == "remove"
title = "删除#{t.data.collectionName}"
else
title = html
#新增_ids虚拟字段,以实现条记录同时更新
fields = t.data.fields
if fields and fields.length
if fields.split(",").length == 1
Session.set "cmEditSingleField", true
fields = _.union(fields.split(","),"_ids","_object_name").join(",")
else
Session.set "cmEditSingleField", false
Session.set 'cmCollection', t.data.collection
Session.set 'cmOperation', t.data.operation
Session.set 'cmFields', fields
Session.set 'cmOmitFields', t.data.omitFields
Session.set 'cmButtonHtml', html
Session.set 'cmTitle', t.data.title or title
Session.set 'cmTemplate', t.data.template
Session.set 'cmLabelClass', t.data.labelClass or t.data['label-class']
Session.set 'cmInputColClass', t.data.inputColClass or t.data['input-col-class']
Session.set 'cmPlaceholder', if t.data.placeholder is true then 'schemaLabel' else ''
Session.set 'cmFormId', t.data.formId
Session.set 'cmMeteorMethod', t.data.meteormethod
Session.set 'cmModalDialogClass', t.data.dialogClass
Session.set 'cmModalContentClass', t.data.contentClass
Session.set 'cmShowRemoveButton', t.data.showRemoveButton or false
Session.set 'cmSaveAndInsert', t.data.saveAndInsert
Session.set 'cmUseOdataApi', t.data.useOdataApi
cmOnSuccessCallback = t.data.onSuccess
if not _.contains registeredAutoFormHooks, t.data.formId
# userId = Meteor.userId()
# cmCollection = Session.get 'cmCollection'
# object_name = getObjectName(cmCollection)
# console.log "afModal-object_name", object_name
# triggers = Creator.getObject(object_name).triggers
AutoForm.addHooks t.data.formId,
before:
method: (doc)->
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "before.insert"
trigger.todo.apply({object_name: object_name},[userId, doc])
else if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "before.update"
trigger.todo.apply({object_name: object_name},[userId, doc])
return doc
after:
method: (error, result)->
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "after.insert"
trigger.todo.apply({object_name: object_name},[userId, result])
else if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and trigger.when == "after.update"
trigger.todo.apply({object_name: object_name},[userId, result])
return result
onSubmit: (insertDoc, updateDoc, currentDoc)->
console.log insertDoc
userId = Meteor.userId()
cmCollection = Session.get 'cmCollection'
object_name = getObjectName(cmCollection)
triggers = Creator.getObject(object_name).triggers
self = this
urls = []
if Session.get("cmOperation") == "insert"
data = insertDoc
type = "post"
urls.push Steedos.absoluteUrl("/api/odata/v4/#{Steedos.spaceId()}/#{object_name}")
delete data._object_name
if Session.get("cmOperation") == "update"
if Session.get("cmMeteorMethod")
if updateDoc["$set"]
_id = updateDoc["$set"]._ids || Session.get("cmDoc")._id
else
_id = Session.get("cmDoc")._id
else
_id = Session.get("cmDoc")._id
if updateDoc["$set"]
delete updateDoc["$set"]._ids
delete updateDoc["$set"]._object_name
if updateDoc["$unset"]
delete updateDoc["$unset"]._ids
delete updateDoc["$unset"]._object_name
# insertDoc里面的值是最全最精确的
updateDoc["$set"] = insertDoc
_ids = _id.split(",")
_.each _ids, (id)->
urls.push Steedos.absoluteUrl("/api/odata/v4/#{Steedos.spaceId()}/#{object_name}/#{id}")
data = updateDoc
type = "put"
console.log "begin......", data
if triggers
if Session.get("cmOperation") == "insert"
_.each triggers, (trigger, key)->
if trigger.on == "client" and (trigger.when == "before.insert" or trigger.when == "after.insert")
trigger.todo.apply({object_name: object_name},[userId, data])
if Session.get("cmOperation") == "update"
_.each triggers, (trigger, key)->
if trigger.on == "client" and (trigger.when == "before.update" or trigger.when == "after.update")
trigger.todo.apply({object_name: object_name},[userId, data])
_.each urls, (url)->
oDataOperation.call(self, type, url, data, object_name)
return false
onSuccess: (operation,result)->
$('#afModal').modal 'hide'
# if result.type == "post"
# app_id = Session.get("app_id")
# object_name = result.object_name
# record_id = result._id
# url = "/app/#{app_id}/#{object_name}/view/#{record_id}"
# FlowRouter.go url
onError: (operation,error) ->
console.error error
if error.reason
toastr?.error?(TAPi18n.__(error.reason))
else if error.message
toastr?.error?(TAPi18n.__(error.message))
else
toastr?.error?(error)
registeredAutoFormHooks.push t.data.formId
if t.data.doc
Session.set 'cmDoc', collectionObj(t.data.collection).findOne _id: t.data.doc
if t.data.showRemoveButton
t.data.buttonContent = false
if t.data.buttonContent or t.data.buttonContent is false
Session.set 'cmButtonContent', t.data.buttonContent
else if t.data.operation == 'insert'
Session.set 'cmButtonContent', 'Create'
else if t.data.operation == 'update'
Session.set 'cmButtonContent', 'Update'
else if t.data.operation == 'remove'
Session.set 'cmButtonContent', 'Delete'
if t.data.buttonClasses
Session.set 'cmButtonClasses', t.data.buttonClasses
else if t.data.operation == 'remove'
Session.set 'cmButtonClasses', 'btn btn-danger'
else
Session.set 'cmButtonClasses', 'btn btn-primary'
Session.set 'cmCloseButtonContent', t.data.closeButtonContent or ''
Session.set 'cmCloseButtonClasses', t.data.closeButtonClasses or 'btn btn-danger'
if t.data.prompt
Session.set 'cmPrompt', t.data.prompt
else if t.data.operation == 'remove'
Session.set 'cmPrompt', 'Are you sure?'
else
Session.set 'cmPrompt', ''
# 记录本次点击事件的className
keyClassName = e.currentTarget.className
Session.set 'cmPressKey', keyClassName
# 上次的操作是保存并新建,清空 cmDoc,并设置 cmOperation为 insert
if Session.get 'cmShowAgain'
console.log "t.data.operation", t.data.operation
if t.data.operation == 'update'
Session.set 'cmDoc', undefined
Session.set 'cmOperation', 'insert'
# 重置 cmShowAgain
Session.set 'cmShowAgain', false
$('#afModal').data('bs.modal').options.backdrop = t.data.backdrop or true
$('#afModal').modal 'show'
Template.CreatorAutoformModals.onCreated ->
self = this;
self.shouldUpdateQuickForm = new ReactiveVar(true);
Template.CreatorAutoformModals.onDestroyed ->
Session.set 'cmIsMultipleUpdate', false
Session.set 'cmTargetIds', null
|
[
{
"context": " user.set\n email : 'user@example.com'\n password : '$ecre8'\n ",
"end": 793,
"score": 0.999916672706604,
"start": 777,
"tag": "EMAIL",
"value": "user@example.com"
},
{
"context": "'user@example.com'\n p... | source/TextUml/Scripts/specs/application/models/user.coffee | marufsiddiqui/textuml-dotnet | 1 | define (require) ->
_ = require 'underscore'
User = require '../../../application/models/user'
repeatString = require('../../helpers').repeatString
describe 'models/user', ->
user = null
beforeEach -> user = new User
describe '#defaults', ->
it 'has #email', ->
expect(user.defaults()).to.have.property 'email'
it 'has #password', ->
expect(user.defaults()).to.have.property 'password'
it 'has #confirmPassword', ->
expect(user.defaults()).to.have.property 'confirmPassword'
describe '#url', ->
it 'is set', -> expect(user.url).to.exist
describe 'validation', ->
describe 'valid', ->
beforeEach ->
user.set
email : 'user@example.com'
password : '$ecre8'
confirmPassword : '$ecre8'
it 'is valid', -> expect(user.isValid()).to.be.ok
describe 'invalid', ->
describe '#email', ->
describe 'missing', ->
beforeEach ->
user.set
password : '$secret'
confirmPassword : '$ecre8'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe 'blank', ->
beforeEach ->
user.set
email : ''
password : '$secret'
confirmPassword : '$ecre8'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe 'incorrect format', ->
beforeEach ->
user.set
email : 'foo bar'
password : '$secret'
confirmPassword : '$ecre8'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe '#password', ->
describe 'missing', ->
beforeEach ->
user.set
email : 'user@example.com'
confirmPassword : '$ecre8'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'blank', ->
beforeEach ->
user.set
email : 'user@example.com'
password : ''
confirmPassword : '$ecre8'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'less than minimum length', ->
beforeEach ->
user.set
email : 'user@example.com'
password : repeatString 5
confirmPassword : '$ecre8'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'more than maximum length', ->
beforeEach ->
user.set
email : 'user@example.com'
password : repeatString 65
confirmPassword : '$ecre8'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe '#confirmPassword', ->
describe 'missing', ->
beforeEach ->
user.set
email : 'user@example.com'
password : '$ecre8'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword'
describe 'blank', ->
beforeEach ->
user.set
email : 'user@example.com'
password : '$ecre8'
confirmPassword : ''
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword'
describe 'do not match', ->
beforeEach ->
user.set
email : 'user@example.com'
password : '$ecre8'
confirmPassword : 'foo bar'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword' | 200547 | define (require) ->
_ = require 'underscore'
User = require '../../../application/models/user'
repeatString = require('../../helpers').repeatString
describe 'models/user', ->
user = null
beforeEach -> user = new User
describe '#defaults', ->
it 'has #email', ->
expect(user.defaults()).to.have.property 'email'
it 'has #password', ->
expect(user.defaults()).to.have.property 'password'
it 'has #confirmPassword', ->
expect(user.defaults()).to.have.property 'confirmPassword'
describe '#url', ->
it 'is set', -> expect(user.url).to.exist
describe 'validation', ->
describe 'valid', ->
beforeEach ->
user.set
email : '<EMAIL>'
password : <PASSWORD>'
confirmPassword : <PASSWORD>'
it 'is valid', -> expect(user.isValid()).to.be.ok
describe 'invalid', ->
describe '#email', ->
describe 'missing', ->
beforeEach ->
user.set
password : <PASSWORD>'
confirmPassword : <PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe 'blank', ->
beforeEach ->
user.set
email : ''
password : <PASSWORD>'
confirmPassword : <PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe 'incorrect format', ->
beforeEach ->
user.set
email : 'foo bar'
password : <PASSWORD>'
confirmPassword : <PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe '#password', ->
describe 'missing', ->
beforeEach ->
user.set
email : '<EMAIL>'
confirmPassword : <PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'blank', ->
beforeEach ->
user.set
email : '<EMAIL>'
password : ''
confirmPassword : <PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'less than minimum length', ->
beforeEach ->
user.set
email : '<EMAIL>'
password : repeatString 5
confirmPassword : <PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'more than maximum length', ->
beforeEach ->
user.set
email : '<EMAIL>'
password : <PASSWORD>String <PASSWORD>
confirmPassword : <PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe '#confirmPassword', ->
describe 'missing', ->
beforeEach ->
user.set
email : '<EMAIL>'
password : <PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword'
describe 'blank', ->
beforeEach ->
user.set
email : '<EMAIL>'
password : <PASSWORD>'
confirmPassword : ''
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword'
describe 'do not match', ->
beforeEach ->
user.set
email : '<EMAIL>'
password : <PASSWORD>'
confirmPassword : '<PASSWORD>'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword' | true | define (require) ->
_ = require 'underscore'
User = require '../../../application/models/user'
repeatString = require('../../helpers').repeatString
describe 'models/user', ->
user = null
beforeEach -> user = new User
describe '#defaults', ->
it 'has #email', ->
expect(user.defaults()).to.have.property 'email'
it 'has #password', ->
expect(user.defaults()).to.have.property 'password'
it 'has #confirmPassword', ->
expect(user.defaults()).to.have.property 'confirmPassword'
describe '#url', ->
it 'is set', -> expect(user.url).to.exist
describe 'validation', ->
describe 'valid', ->
beforeEach ->
user.set
email : 'PI:EMAIL:<EMAIL>END_PI'
password : PI:PASSWORD:<PASSWORD>END_PI'
confirmPassword : PI:PASSWORD:<PASSWORD>END_PI'
it 'is valid', -> expect(user.isValid()).to.be.ok
describe 'invalid', ->
describe '#email', ->
describe 'missing', ->
beforeEach ->
user.set
password : PI:PASSWORD:<PASSWORD>END_PI'
confirmPassword : PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe 'blank', ->
beforeEach ->
user.set
email : ''
password : PI:PASSWORD:<PASSWORD>END_PI'
confirmPassword : PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe 'incorrect format', ->
beforeEach ->
user.set
email : 'foo bar'
password : PI:PASSWORD:<PASSWORD>END_PI'
confirmPassword : PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'email'
describe '#password', ->
describe 'missing', ->
beforeEach ->
user.set
email : 'PI:EMAIL:<EMAIL>END_PI'
confirmPassword : PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'blank', ->
beforeEach ->
user.set
email : 'PI:EMAIL:<EMAIL>END_PI'
password : ''
confirmPassword : PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'less than minimum length', ->
beforeEach ->
user.set
email : 'PI:EMAIL:<EMAIL>END_PI'
password : repeatString 5
confirmPassword : PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe 'more than maximum length', ->
beforeEach ->
user.set
email : 'PI:EMAIL:<EMAIL>END_PI'
password : PI:PASSWORD:<PASSWORD>END_PIString PI:PASSWORD:<PASSWORD>END_PI
confirmPassword : PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'password'
describe '#confirmPassword', ->
describe 'missing', ->
beforeEach ->
user.set
email : 'PI:EMAIL:<EMAIL>END_PI'
password : PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword'
describe 'blank', ->
beforeEach ->
user.set
email : 'PI:EMAIL:<EMAIL>END_PI'
password : PI:PASSWORD:<PASSWORD>END_PI'
confirmPassword : ''
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword'
describe 'do not match', ->
beforeEach ->
user.set
email : 'PI:EMAIL:<EMAIL>END_PI'
password : PI:PASSWORD:<PASSWORD>END_PI'
confirmPassword : 'PI:PASSWORD:<PASSWORD>END_PI'
it 'is invalid', ->
expect(user.isValid()).to.not.be.ok
expect(user.validationError).to.have.property 'confirmPassword' |
[
{
"context": "(callback) ->\n Users.create [\n username: 'my_username_1',\n email: 'my_1@email.com',\n password: ",
"end": 607,
"score": 0.9996296167373657,
"start": 594,
"tag": "USERNAME",
"value": "my_username_1"
},
{
"context": " [\n username: 'my_username_1... | test/exists.coffee | wdavidw/node-ron | 14 |
should = require 'should'
try config = require '../conf/test' catch e
ron = require '../lib'
client = Users = null
before (next) ->
client = ron config
Users = client.get
name: 'users'
properties:
user_id: identifier: true
username: unique: true
email: index: true
next()
beforeEach (next) ->
Users.clear next
afterEach (next) ->
client.redis.keys '*', (err, keys) ->
should.not.exists err
keys.should.eql []
next()
after (next) ->
client.quit next
describe 'exists', ->
create = (callback) ->
Users.create [
username: 'my_username_1',
email: 'my_1@email.com',
password: 'my_password'
,
username: 'my_username_2',
email: 'my_2@email.com',
password: 'my_password'
], (err, users) ->
should.ifError err
callback null, users
it 'Test exists # true # identifier', (next) ->
create (err, users) ->
user = users[1]
Users.exists user.user_id, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # true # record with identifier', (next) ->
create (err, users) ->
user = users[1]
Users.exists {user_id: user.user_id}, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # true # record with unique property stored in hash', (next) ->
create (err, users) ->
user = users[1]
Users.exists {username: user.username}, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # false # indentifier', (next) ->
Users.exists 'missing', (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
it 'Test exists # false # record with identifier', (next) ->
Users.exists {user_id: 'missing'}, (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
it 'Test exists # false # record with unique property stored in hash', (next) ->
Users.exists {username: 'missing'}, (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
| 75566 |
should = require 'should'
try config = require '../conf/test' catch e
ron = require '../lib'
client = Users = null
before (next) ->
client = ron config
Users = client.get
name: 'users'
properties:
user_id: identifier: true
username: unique: true
email: index: true
next()
beforeEach (next) ->
Users.clear next
afterEach (next) ->
client.redis.keys '*', (err, keys) ->
should.not.exists err
keys.should.eql []
next()
after (next) ->
client.quit next
describe 'exists', ->
create = (callback) ->
Users.create [
username: 'my_username_1',
email: '<EMAIL>',
password: '<PASSWORD>'
,
username: 'my_username_2',
email: '<EMAIL>',
password: '<PASSWORD>'
], (err, users) ->
should.ifError err
callback null, users
it 'Test exists # true # identifier', (next) ->
create (err, users) ->
user = users[1]
Users.exists user.user_id, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # true # record with identifier', (next) ->
create (err, users) ->
user = users[1]
Users.exists {user_id: user.user_id}, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # true # record with unique property stored in hash', (next) ->
create (err, users) ->
user = users[1]
Users.exists {username: user.username}, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # false # indentifier', (next) ->
Users.exists 'missing', (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
it 'Test exists # false # record with identifier', (next) ->
Users.exists {user_id: 'missing'}, (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
it 'Test exists # false # record with unique property stored in hash', (next) ->
Users.exists {username: 'missing'}, (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
| true |
should = require 'should'
try config = require '../conf/test' catch e
ron = require '../lib'
client = Users = null
before (next) ->
client = ron config
Users = client.get
name: 'users'
properties:
user_id: identifier: true
username: unique: true
email: index: true
next()
beforeEach (next) ->
Users.clear next
afterEach (next) ->
client.redis.keys '*', (err, keys) ->
should.not.exists err
keys.should.eql []
next()
after (next) ->
client.quit next
describe 'exists', ->
create = (callback) ->
Users.create [
username: 'my_username_1',
email: 'PI:EMAIL:<EMAIL>END_PI',
password: 'PI:PASSWORD:<PASSWORD>END_PI'
,
username: 'my_username_2',
email: 'PI:EMAIL:<EMAIL>END_PI',
password: 'PI:PASSWORD:<PASSWORD>END_PI'
], (err, users) ->
should.ifError err
callback null, users
it 'Test exists # true # identifier', (next) ->
create (err, users) ->
user = users[1]
Users.exists user.user_id, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # true # record with identifier', (next) ->
create (err, users) ->
user = users[1]
Users.exists {user_id: user.user_id}, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # true # record with unique property stored in hash', (next) ->
create (err, users) ->
user = users[1]
Users.exists {username: user.username}, (err, userId) ->
should.not.exist err
userId.should.eql user.user_id
Users.clear next
it 'Test exists # false # indentifier', (next) ->
Users.exists 'missing', (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
it 'Test exists # false # record with identifier', (next) ->
Users.exists {user_id: 'missing'}, (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
it 'Test exists # false # record with unique property stored in hash', (next) ->
Users.exists {username: 'missing'}, (err, exists) ->
should.not.exist err
should.not.exist exists
Users.clear next
|
[
{
"context": "'\n\ntest \"capitalized\", ->\n emblem =\n \"\"\"\n %Alex alex\n %Alex\n %Woot\n \"\"\"\n shouldCompileToStrin",
"end": 32537,
"score": 0.8708382248878479,
"start": 32536,
"tag": "NAME",
"value": "a"
},
{
"context": "\ntest \"capitalized\", ->\n emblem =\n \"... | bower_components/emblem/test/qunit_spec.coffee | Naviam/cloudcron | 0 |
# Test Setup: Set up an environment that'll work for both Node and Qunit tests.
Ember = window?.Emblem || @Emblem || {}
# These are needed for the full version ember to load properly
LoadedEmber = LoadedEmber || {}
Ember.Handlebars = LoadedEmber.Handlebars
Ember.warn = LoadedEmber.warn
if Emblem?
# Qunit testing
_equal = equal
equals = equal = (a, b, msg) ->
# Allow exec with missing message params
_equal(a, b, msg || '')
# In QUnit, we use module() instead of nonexistent suite()
window.suite = module
else
# Setup for Node package testing
Handlebars = require('handlebars')
EmberHandlebars = require('./resources/ember-template-compiler.js').EmberHandlebars
Emblem = require('../lib/emblem')
# TODO: replace with real expect()
`expect = function() {};`
{equal, equals, ok, throws} = require("assert")
unless CompilerContext?
# Note that this doesn't have the same context separation as the rspec test.
# Both should be run for full acceptance of the two libary modes.
CompilerContext =
compile: (template, options) ->
Emblem.compile(Handlebars, template, options)
supportsEachHelperDataKeywords = Handlebars.VERSION.slice(0, 3) >= 1.2
supportsSubexpressions = Handlebars.VERSION.slice(0, 3) >= 1.3
precompileEmber = (emblem) ->
Emblem.precompile(EmberHandlebars, emblem).toString()
shouldEmberPrecompileToHelper = (emblem, helper = 'bind-attr') ->
result = precompileEmber emblem
ok (result.match "helpers.#{helper}") or (result.match "helpers\\['#{helper}'\\]")
result
shouldCompileToString = (string, hashOrArray, expected) ->
if hashOrArray.constructor == String
shouldCompileToWithPartials(string, {}, false, hashOrArray, null, true)
else
shouldCompileToWithPartials(string, hashOrArray, false, expected, null, true)
shouldCompileTo = (string, hashOrArray, expected, message) ->
if hashOrArray.constructor == String
shouldCompileToWithPartials(string, {}, false, hashOrArray, message)
else
shouldCompileToWithPartials(string, hashOrArray, false, expected, message)
shouldCompileToWithPartials = (string, hashOrArray, partials, expected, message, strings) ->
options = null
if strings
options = {}
options.stringParams = true
result = compileWithPartials(string, hashOrArray, partials, options)
equal(result, expected, "'" + result + "' should === '" + expected + "': " + message)
compileWithPartials = (string, hashOrArray, partials, options = {}) ->
template = CompilerContext.compile(string, options)
if Object::toString.call(hashOrArray) == "[object Array]"
if helpers = hashOrArray[1]
for prop of Handlebars.helpers
helpers[prop] = helpers[prop] || Handlebars.helpers[prop]
ary = []
ary.push(hashOrArray[0])
ary.push
helpers: hashOrArray[1]
partials: hashOrArray[2]
else
ary = [hashOrArray]
template.apply(this, ary)
shouldThrow = (fn, exMessage) ->
caught = false
try
fn()
catch e
caught = true
if exMessage
ok e.message.match(exMessage), "exception message matched"
ok(caught, "an exception was thrown")
Handlebars.registerHelper 'echo', (param) ->
"ECHO #{param}"
suite "html one-liners"
test "element only", ->
shouldCompileTo "p", "<p></p>"
test "with text", ->
shouldCompileTo "p Hello", "<p>Hello</p>"
test "with more complex text", ->
shouldCompileTo "p Hello, how's it going with you today?", "<p>Hello, how's it going with you today?</p>"
test "with trailing space", ->
shouldCompileTo "p Hello ", "<p>Hello </p>"
suite "html multi-lines"
test "two lines", ->
emblem =
"""
p This is
pretty cool.
"""
shouldCompileTo emblem, "<p>This is pretty cool.</p>"
test "three lines", ->
emblem =
"""
p This is
pretty damn
cool.
"""
shouldCompileTo emblem, "<p>This is pretty damn cool.</p>"
test "three lines w/ embedded html", ->
emblem =
"""
p This is
pretty <span>damn</span>
cool.
"""
shouldCompileTo emblem, "<p>This is pretty <span>damn</span> cool.</p>"
test "indentation doesn't need to match starting inline content's", ->
emblem =
"""
span Hello,
How are you?
"""
shouldCompileTo emblem, "<span>Hello, How are you?</span>"
test "indentation may vary between parent/child, must be consistent within inline-block", ->
emblem =
"""
div
span Hello,
How are you?
Excellent.
p asd
"""
shouldCompileTo emblem, "<div><span>Hello, How are you? Excellent.</span><p>asd</p></div>"
emblem =
"""
div
span Hello,
How are you?
Excellent.
"""
shouldThrow -> CompilerContext.compile emblem
test "indentation may vary between parent/child, must be consistent within inline-block pt 2", ->
emblem =
"""
div
span Hello,
How are you?
Excellent.
"""
shouldCompileTo emblem, "<div><span>Hello, How are you? Excellent.</span></div>"
test "w/ mustaches", ->
emblem =
"""
div
span Hello,
{{foo}} are you?
Excellent.
"""
shouldCompileTo emblem, { foo: "YEAH" }, "<div><span>Hello, YEAH are you? Excellent.</span></div>"
test "w/ block mustaches", ->
emblem =
'''
p Hello, #{ sally | Hello},
and {{sally: span Hello}}!
'''
shouldCompileTo emblem,
'<p>Hello, <sally class="none">Hello</sally>, and <sally class="none"><span>Hello</span></sally>!</p>'
emblem =
'''
p Hello, #{ sally: span: a Hello}!
'''
shouldCompileTo emblem,
'<p>Hello, <sally class="none"><span><a>Hello</a></span></sally>!</p>'
test "with followup", ->
emblem =
"""
p This is
pretty cool.
p Hello.
"""
shouldCompileTo emblem, "<p>This is pretty cool.</p><p>Hello.</p>"
suite '#{} syntax'
test 'acts like {{}}', ->
emblem =
'''
span Yo #{foo}, I herd.
'''
shouldCompileTo emblem,
{ foo: '<span>123</span>' },
"<span>Yo <span>123</span>, I herd.</span>"
test 'can start inline content', ->
emblem =
'''
span #{foo}, I herd.
'''
shouldCompileTo emblem, { foo: "dawg" }, "<span>dawg, I herd.</span>"
test 'can end inline content', ->
emblem =
'''
span I herd #{foo}
'''
shouldCompileTo emblem, { foo: "dawg" }, "<span>I herd dawg</span>"
test "doesn't screw up parsing when # used in text nodes", ->
emblem =
'''
span OMG #YOLO
'''
shouldCompileTo emblem, "<span>OMG #YOLO</span>"
test "# can be only thing on line", ->
emblem =
'''
span #
'''
shouldCompileTo emblem, "<span>#</span>"
### TODO: this
test "can be escaped", ->
emblem =
'''
span #\\{yes}
'''
shouldCompileTo emblem, '<span>#{yes}</span>'
###
runTextLineSuite = (ch) ->
sct = (emblem, obj, expected) ->
unless expected?
expected = obj
obj = {}
unless ch == '`'
expected = expected.replace /\n/g, ""
# Replace tabs with optional trailing whitespace.
if ch == "'"
expected = expected.replace /\t/g, " "
else
expected = expected.replace /\t/g, ""
emblem = emblem.replace /_/g, ch
shouldCompileTo emblem, obj, expected
suite "text lines starting with '#{ch}'"
test "basic", -> sct "_ What what", "What what\n\t"
test "with html", ->
sct '_ What <span id="woot" data-t="oof" class="f">what</span>!',
'What <span id="woot" data-t="oof" class="f">what</span>!\n\t'
test "multiline", ->
emblem =
"""
_ Blork
Snork
"""
sct emblem, "Blork\nSnork\n\t"
test "triple multiline", ->
emblem =
"""
_ Blork
Snork
Bork
"""
sct emblem, "Blork\nSnork\nBork\n\t"
test "quadruple multiline", ->
emblem =
"""
_ Blork
Snork
Bork
Fork
"""
sct emblem, "Blork\nSnork\nBork\nFork\n\t"
test "multiline w/ trailing whitespace", ->
emblem =
"""
_ Blork
Snork
"""
sct emblem, "Blork \nSnork\n\t"
test "secondline", ->
emblem =
"""
_
Good
"""
sct emblem, "Good\n\t"
test "secondline multiline", ->
emblem =
"""
_
Good
Bork
"""
sct emblem, "Good\nBork\n\t"
test "with a mustache", ->
emblem =
"""
_ Bork {{foo}}!
"""
sct emblem,
{ foo: "YEAH" },
'Bork YEAH!\n\t'
test "with mustaches", ->
emblem =
"""
_ Bork {{foo}} {{{bar}}}!
"""
sct emblem,
{ foo: "YEAH", bar: "<span>NO</span>"},
'Bork YEAH <span>NO</span>!\n\t'
test "indented, then in a row", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance2
dude
gnar
foo
"""
sct emblem, "Good\n riddance2\n dude\n gnar\n foo\n\t"
test "indented, then in a row, then indented", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance2
dude
gnar
foo
far
faz
"""
sct emblem, "Good \n riddance2 \n dude \n gnar \n foo \n far \n faz \n\t"
test "uneven indentation megatest", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance
dude
"""
sct emblem, "Good\n riddance\ndude\n\t"
emblem =
"""
_
Good
riddance3
dude
"""
sct emblem, "Good\n riddance3\n dude\n\t"
emblem =
"""
_ Good
riddance
dude
"""
sct emblem, "Good\nriddance\n dude\n\t"
test "on each line", ->
emblem =
"""
pre
_ This
_ should
_ hopefully
_ work, and work well.
"""
sct emblem, '<pre>This\n\t should\n\t hopefully\n\t work, and work well.\n\t</pre>'
test "with blank", ->
emblem =
"""
pre
_ This
_ should
_
_ hopefully
_ work, and work well.
"""
sct emblem, '<pre>This\n\t should\n\t\n\t hopefully\n\t work, and work well.\n\t</pre>'
runTextLineSuite '|'
runTextLineSuite '`'
runTextLineSuite "'"
suite "text line starting with angle bracket"
test "can start with angle bracket html", ->
emblem =
"""
<span>Hello</span>
"""
shouldCompileTo emblem, "<span>Hello</span>"
test "can start with angle bracket html and go to multiple lines", ->
emblem =
"""
<span>Hello dude,
what's up?</span>
"""
shouldCompileTo emblem, "<span>Hello dude, what's up?</span>"
suite "preprocessor"
test "it strips out preceding whitespace", ->
emblem =
"""
p Hello
"""
shouldCompileTo emblem, "<p>Hello</p>"
test "it handles preceding indentation", ->
emblem = " p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
test "it handles preceding indentation and newlines", ->
emblem = "\n p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
test "it handles preceding indentation and newlines pt 2", ->
emblem = " \n p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
suite "comments"
test "it strips out single line '/' comments", ->
emblem =
"""
p Hello
/ A comment
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "it strips out multi-line '/' comments", ->
emblem =
"""
p Hello
/ A comment
that goes on to two lines
even three!
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "it strips out multi-line '/' comments without text on the first line", ->
emblem =
"""
p Hello
/
A comment
that goes on to two lines
even three!
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "mix and match with various indentation", ->
emblem =
"""
/ A test
p Hello
span
/ This is gnarly
p Yessir nope.
/ Nothin but comments
so many comments.
/
p Should not show up
"""
shouldCompileTo emblem, "<p>Hello</p><span><p>Yessir nope.</p></span>"
test "uneven indentation", ->
emblem =
"""
/ nop
nope
nope
"""
shouldCompileTo emblem, ""
test "uneven indentation 2", ->
emblem =
"""
/ n
no
nop
nope
"""
shouldCompileTo emblem, ""
test "uneven indentation 3", ->
emblem =
"""
/ n
no
nop
nope
"""
shouldCompileTo emblem, ""
test "empty first line", ->
emblem =
"""
/
nop
nope
nope
no
"""
shouldCompileTo emblem, ""
test "on same line as html content", ->
emblem =
"""
.container / This comment doesn't show up
.row / Nor does this
p Hello
"""
shouldCompileTo emblem, '<div class="container"><div class="row"><p>Hello</p></div></div>'
test "on same line as mustache content", ->
shouldCompileTo 'frank text="YES" text2="NO" / omg', 'WOO: YES NO'
test "on same line as colon syntax", ->
emblem =
"""
ul: li: span / omg
| Hello
"""
shouldCompileTo emblem, '<ul><li><span>Hello</span></li></ul>'
suite "indentation"
# This test used to make sure the emblem code threw, but now we
# support multi-line syntax.
test "it doesn't throw when indenting after a line with inline content", ->
emblem =
"""
p Hello
p invalid
"""
shouldCompileTo emblem, "<p>Hello p invalid</p>"
test "it throws on half dedent", ->
emblem =
"""
p
span This is ok
span This aint
"""
shouldThrow -> CompilerContext.compile emblem
test "new indentation levels don't have to match parents'", ->
emblem =
"""
p
span
div
span yes
"""
shouldCompileTo emblem, "<p><span><div><span>yes</span></div></span></p>"
suite "whitespace fussiness"
test "spaces after html elements", ->
shouldCompileTo "p \n span asd", "<p><span>asd</span></p>"
shouldCompileTo "p \nspan \n\ndiv\nspan", "<p></p><span></span><div></div><span></span>"
test "spaces after mustaches", ->
shouldCompileTo "each foo \n p \n span", { foo: [1,2] }, "<p></p><span></span><p></p><span></span>"
suite "attribute shorthand"
test "id shorthand", ->
shouldCompileTo "#woot", '<div id="woot"></div>'
shouldCompileTo "span#woot", '<span id="woot"></span>'
test "class shorthand", ->
shouldCompileTo ".woot", '<div class="woot"></div>'
shouldCompileTo "span.woot", '<span class="woot"></span>'
shouldCompileTo "span.woot.loot", '<span class="woot loot"></span>'
test "class can come first", ->
shouldCompileTo ".woot#hello", '<div id="hello" class="woot"></div>'
shouldCompileTo "span.woot#hello", '<span id="hello" class="woot"></span>'
shouldCompileTo "span.woot.loot#hello", '<span id="hello" class="woot loot"></span>'
shouldCompileTo "span.woot.loot#hello.boot", '<span id="hello" class="woot loot boot"></span>'
suite "full attributes - tags with content"
test "class only", ->
shouldCompileTo 'p class="yes" Blork', '<p class="yes">Blork</p>'
test "id only", ->
shouldCompileTo 'p id="yes" Hyeah', '<p id="yes">Hyeah</p>'
test "class and id", ->
shouldCompileTo 'p id="yes" class="no" Blork', '<p id="yes" class="no">Blork</p>'
test "class and id and embedded html one-liner", ->
shouldCompileTo 'p id="yes" class="no" One <b>asd</b>!', '<p id="yes" class="no">One <b>asd</b>!</p>'
test "nesting", ->
emblem =
"""
p class="hello" data-foo="gnarly"
span Yes
"""
shouldCompileTo emblem, '<p class="hello" data-foo="gnarly"><span>Yes</span></p>'
suite "full attributes - mixed quotes"
test "single empty", ->
shouldCompileTo "p class=''", '<p class=""></p>'
test "single full", ->
shouldCompileTo "p class='woot yeah'", '<p class="woot yeah"></p>'
test "mixed", ->
shouldCompileTo "p class='woot \"oof\" yeah'", '<p class="woot "oof" yeah"></p>'
suite "full attributes - tags without content"
test "empty", ->
shouldCompileTo 'p class=""', '<p class=""></p>'
test "class only", ->
shouldCompileTo 'p class="yes"', '<p class="yes"></p>'
test "id only", ->
shouldCompileTo 'p id="yes"', '<p id="yes"></p>'
test "class and id", ->
shouldCompileTo 'p id="yes" class="no"', '<p id="yes" class="no"></p>'
suite "full attributes w/ mustaches"
test "with mustache", ->
shouldCompileTo 'p class="foo {{yes}}"', {yes: "ALEX"}, '<p class="foo ALEX"></p>'
shouldCompileTo 'p class="foo {{yes}}" Hello', {yes: "ALEX"}, '<p class="foo ALEX">Hello</p>'
emblem =
"""
p class="foo {{yes}}"
| Hello
"""
shouldCompileTo emblem, {yes: "ALEX"}, '<p class="foo ALEX">Hello</p>'
test "with mustache calling helper", ->
shouldCompileTo 'p class="foo {{{echo "YES"}}}"', '<p class="foo ECHO YES"></p>'
shouldCompileTo 'p class="foo #{echo "NO"} and {{{echo "YES"}}}" Hello', '<p class="foo ECHO NO and ECHO YES">Hello</p>'
emblem =
"""
p class="foo {{echo "BORF"}}"
| Hello
"""
shouldCompileTo emblem, '<p class="foo ECHO BORF">Hello</p>'
suite "boolean attributes"
test "static", ->
shouldCompileTo 'p borf=true', '<p borf></p>'
shouldCompileTo 'p borf=true Woot', '<p borf>Woot</p>'
shouldCompileTo 'p borf=false', '<p></p>'
shouldCompileTo 'p borf=false Nork', '<p>Nork</p>'
shouldCompileTo 'option selected=true Thingeroo', '<option selected>Thingeroo</option>'
#test "dynamic", ->
## TODO
#shouldCompileTo 'p borf=foo', { foo: true }, '<p borf></p>'
#shouldCompileTo 'p borf=foo', { foo: false }, '<p></p>'
#shouldCompileTo 'p borf=foo Yeah', { foo: true }, '<p borf>Yeah</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: false }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: null }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: undefined }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: 0 }, '<p borf="0">Naww</p>'
suite "html nested"
test "basic", ->
emblem =
"""
p
span Hello
strong Hi
div
p Hooray
"""
shouldCompileTo emblem, '<p><span>Hello</span><strong>Hi</strong></p><div><p>Hooray</p></div>'
test "empty nest", ->
emblem =
"""
p
span
strong
i
"""
shouldCompileTo emblem, '<p><span><strong><i></i></strong></span></p>'
test "empty nest w/ attribute shorthand", ->
emblem =
"""
p.woo
span#yes
strong.no.yes
i
"""
shouldCompileTo emblem, '<p class="woo"><span id="yes"><strong class="no yes"><i></i></strong></span></p>'
suite "simple mustache"
test "various one-liners", ->
emblem =
"""
= foo
arf
p = foo
span.foo
p data-foo="yes" = goo
"""
shouldCompileTo emblem,
{ foo: "ASD", arf: "QWE", goo: "WER" },
'ASDQWE<p>ASD</p><span class="foo"></span><p data-foo="yes">WER</p>'
test "double =='s un-escape", ->
emblem =
"""
== foo
foo
p == foo
"""
shouldCompileTo emblem,
{ foo: '<span>123</span>' },
'<span>123</span><span>123</span><p><span>123</span></p>'
test "nested combo syntax", ->
emblem =
"""
ul = each items
li = foo
"""
shouldCompileTo emblem,
{ items: [ { foo: "YEAH"}, { foo: "BOI" } ] },
'<ul><li>YEAH</li><li>BOI</li></ul>'
suite "mustache helpers"
Handlebars.registerHelper 'booltest', (options) ->
hash = options.hash
result = if hash.what == true
"true"
else if hash.what == false
"false"
else "neither"
result
Handlebars.registerHelper 'hashtypetest', (options) ->
hash = options.hash
typeof hash.what
Handlebars.registerHelper 'typetest', (num, options) ->
typeof num
Handlebars.registerHelper 'frank', ->
options = arguments[arguments.length - 1]
"WOO: #{options.hash.text} #{options.hash.text2}"
Handlebars.registerHelper 'sally', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
param = params[0] || 'none'
if options.fn
content = options.fn @
new Handlebars.SafeString """<sally class="#{param}">#{content}</sally>"""
else
content = param
new Handlebars.SafeString """<sally class="#{param}">#{content}</sally>"""
test "basic", -> shouldCompileTo 'echo foo', {foo: "YES"}, 'ECHO YES'
test "hashed parameters should work", ->
shouldCompileTo 'frank text="YES" text2="NO"', 'WOO: YES NO'
Handlebars.registerHelper 'concatenator', ->
options = arguments[arguments.length - 1]
new Handlebars.SafeString ("'#{key}'='#{value}'" for key, value of options.hash).sort().join( " " )
test "negative integers should work", ->
shouldCompileTo 'concatenator positive=100 negative=-100', "'negative'='-100' 'positive'='100'"
test "booleans", ->
shouldCompileToString 'typetest true', 'boolean'
shouldCompileToString 'typetest false', 'boolean'
shouldCompileTo 'booltest what=false', 'false'
shouldCompileTo 'booltest what=true', 'true'
shouldCompileTo 'booltest what="false"', 'neither'
shouldCompileTo 'booltest what="true"', 'neither'
test "integers", ->
shouldCompileToString 'typetest 200', 'number'
shouldCompileTo 'hashtypetest what=1', 'number'
shouldCompileTo 'hashtypetest what=200', 'number'
test "nesting", ->
emblem =
"""
sally
p Hello
"""
shouldCompileTo emblem, '<sally class="none"><p>Hello</p></sally>'
test "recursive nesting", ->
emblem =
"""
sally
sally
p Hello
"""
shouldCompileTo emblem, '<sally class="none"><sally class="none"><p>Hello</p></sally></sally>'
test "recursive nesting pt 2", ->
emblem =
"""
sally
sally thing
p Hello
"""
shouldCompileTo emblem, { thing: "woot" }, '<sally class="none"><sally class="woot"><p>Hello</p></sally></sally>'
Handlebars.registerHelper 'view', (param, a, b, c) ->
options = arguments[arguments.length - 1]
content = param
content = options.fn @ if options.fn
hashString = ""
for own k,v of options.hash
hashString += " #{k}=#{v}"
hashString = " nohash" unless hashString
new Handlebars.SafeString """<#{param}#{hashString}>#{content}</#{param}>"""
suite "capitalized line-starter"
test "should invoke `view` helper by default", ->
emblem =
"""
SomeView
"""
shouldEmberPrecompileToHelper emblem, 'view'
#shouldCompileToString emblem, '<SomeView nohash>SomeView</SomeView>'
test "should not invoke `view` helper for vanilla HB", ->
emblem =
"""
SomeView
"""
shouldCompileToString emblem, {SomeView: "ALEX"}, 'ALEX'
test "should support block mode", ->
emblem =
"""
SomeView
p View content
"""
#shouldCompileToString emblem, '<SomeView nohash><p>View content</p></SomeView>'
shouldEmberPrecompileToHelper emblem, 'view'
test "should not kick in if preceded by equal sign", ->
emblem =
"""
= SomeView
"""
shouldCompileTo emblem, { SomeView: 'erp' }, 'erp'
test "should not kick in explicit {{mustache}}", ->
emblem =
"""
p Yeah {{SomeView}}
"""
shouldCompileTo emblem, { SomeView: 'erp' }, '<p>Yeah erp</p>'
# TODO test overriding the default helper name (instead of always "view")
suite "bang syntax defaults to `unbound` helper syntax"
Handlebars.registerHelper 'unbound', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
stringedParams = params.join(' ')
content = if options.fn then options.fn @ else stringedParams
new Handlebars.SafeString """<unbound class="#{stringedParams}">#{content}</unbound>"""
test "bang helper defaults to `unbound` invocation", ->
emblem =
"""
foo! Yar
= foo!
"""
shouldCompileToString emblem, '<unbound class="foo Yar">foo Yar</unbound><unbound class="foo">foo</unbound>'
test "bang helper works with blocks", ->
emblem =
"""
hey! you suck
= foo!
"""
shouldCompileToString emblem, '<unbound class="hey you suck"><unbound class="foo">foo</unbound></unbound>'
suite "question mark syntax defaults to `if` helper syntax"
test "? helper defaults to `if` invocation", ->
emblem =
"""
foo?
p Yeah
"""
shouldCompileTo emblem, { foo: true }, '<p>Yeah</p>'
test "else works", ->
emblem =
"""
foo?
p Yeah
else
p No
"""
shouldCompileTo emblem, { foo: false }, '<p>No</p>'
test "compound", ->
emblem =
"""
p = foo?
| Hooray
else
| No
p = bar?
| Hooray
else
| No
"""
shouldCompileTo emblem, { foo: true, bar: false }, '<p>Hooray</p><p>No</p>'
test "compound", ->
emblem =
"""
p = foo?
bar
else
baz
"""
shouldCompileTo emblem, { foo: true, bar: "borf", baz: "narsty" }, '<p>borf</p>'
suite "conditionals"
test "simple if statement", ->
emblem =
"""
if foo
| Foo
if bar
| Bar
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'Foo'
test "if else ", ->
emblem =
"""
if foo
| Foo
if bar
| Bar
else
| Woot
else
| WRONG
if bar
| WRONG
else
| Hooray
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'FooWootHooray'
test "else with preceding `=`", ->
emblem =
"""
= if foo
p Yeah
= else
p No
= if bar
p Yeah!
= else
p No!
=if bar
p Yeah!
=else
p No!
"""
shouldCompileTo emblem, {foo: true, bar: false}, '<p>Yeah</p><p>No!</p><p>No!</p>'
test "unless", ->
emblem =
"""
unless bar
| Foo
unless foo
| Bar
else
| Woot
else
| WRONG
unless foo
| WRONG
else
| Hooray
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'FooWootHooray'
test "else followed by newline doesn't gobble else content", ->
emblem =
"""
if something
p something
else
if nothing
p nothing
else
p not nothing
"""
shouldCompileTo emblem, {}, '<p>not nothing</p>'
suite "class shorthand and explicit declaration is coalesced"
test "when literal class is used", ->
shouldCompileTo 'p.foo class="bar"', '<p class="foo bar"></p>'
test "when ember expression is used with variable", ->
shouldCompileTo 'p.foo class=bar', {bar: 'baz'}, '<p bind-attr class to :foo bar></p>'
test "when ember expression is used with variable in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ bar }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "when ember expression is used with constant in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ :bar }'
ok -1 != result.indexOf '\'class\': (":foo :bar")'
test "when ember expression is used with constant and variable in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ :bar bar }'
ok -1 != result.indexOf '\'class\': (":foo :bar bar")'
test "when ember expression is used with bind-attr", ->
result = shouldEmberPrecompileToHelper 'p.foo{ bind-attr class="bar" }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "when ember expression is used with bind-attr and multiple attrs", ->
result = shouldEmberPrecompileToHelper 'p.foo{ bind-attr something=bind class="bar" }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "only with bind-attr helper", ->
result = shouldEmberPrecompileToHelper 'p.foo{ someHelper class="bar" }', 'someHelper'
ok -1 != result.indexOf '\'class\': ("bar")'
ok -1 != result.indexOf 'class=\\"foo\\"'
bindAttrHelper = ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
bindingString = ""
for own k,v of options.hash
bindingString += " #{k} to #{v}"
bindingString = " narf" unless bindingString
param = params[0] || 'none'
"bind-attr#{bindingString}"
Handlebars.registerHelper 'bind-attr', bindAttrHelper
EmberHandlebars.registerHelper 'bind-attr', bindAttrHelper
suite "bind-attr behavior for unquoted attribute values"
test "basic", ->
emblem = 'p class=foo'
shouldCompileTo emblem, {foo:"YEAH"}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "basic w/ underscore", ->
emblem = 'p class=foo_urns'
shouldCompileTo emblem, {foo_urns: "YEAH"}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "subproperties", ->
emblem = 'p class=foo._death.woot'
shouldCompileTo emblem, {foo: { _death: { woot: "YEAH" } }}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "multiple", ->
shouldCompileTo 'p class=foo id="yup" data-thinger=yeah Hooray', { foo: "FOO", yeah: "YEAH" },
'<p class="FOO" id="yup" data-thinger="YEAH">Hooray</p>'
test "class bind-attr special syntax", ->
emblem = 'p class=foo:bar:baz'
shouldEmberPrecompileToHelper emblem
shouldThrow (-> CompilerContext.compile emblem)
test "class bind-attr braced syntax w/ underscores and dashes", ->
shouldEmberPrecompileToHelper 'p class={f-oo:bar :b_az}'
shouldEmberPrecompileToHelper 'p class={ f-oo:bar :b_az }'
shouldEmberPrecompileToHelper 'p class={ f-oo:bar :b_az } Hello'
emblem =
"""
.input-prepend class={ filterOn:input-append }
span.add-on
"""
shouldEmberPrecompileToHelper emblem
test "exclamation modifier (vanilla)", ->
emblem = 'p class=foo!'
# exclamation is no-op in vanilla HB
shouldCompileTo emblem, {foo:"YEAH"}, '<p class="YEAH"></p>'
test "exclamation modifier (ember)", ->
emblem = 'p class=foo!'
result = precompileEmber emblem
ok result.match /p class/
ok result.match /helpers\.unbound.*foo/
suite "in-tag explicit mustache"
Handlebars.registerHelper 'inTagHelper', (p) ->
return p;
test "single", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
test "double", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
test "triple", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
Handlebars.registerHelper 'insertClass', (p) ->
return 'class="' + p + '"'
test "with singlestache", ->
shouldCompileTo 'p{insertClass foo} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "singlestache can be used in text nodes", ->
shouldCompileTo 'p Hello {dork}', '<p>Hello {dork}</p>'
test "with doublestache", ->
shouldCompileTo 'p{{insertClass foo}} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "with triplestache", ->
shouldCompileTo 'p{{{insertClass foo}}} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "multiple", ->
shouldCompileTo 'p{{{insertClass foo}}}{{{insertClass boo}}} Hello',
{foo: "yar", boo: "nar"},
'<p class="yar" class="nar">Hello</p>'
test "with nesting", ->
emblem =
"""
p{{bind-attr class="foo"}}
span Hello
"""
shouldCompileTo emblem, {foo: "yar"},
'<p bind-attr class to foo><span>Hello</span></p>'
suite "actions"
Handlebars.registerHelper 'action', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
hashString = ""
paramsString = params.join('|')
# TODO: bad because it relies on hash ordering?
# is this guaranteed? guess it doesn't rreeeeeally
# matter since order's not being tested.
for own k,v of options.hash
hashString += " #{k}=#{v}"
hashString = " nohash" unless hashString
"action #{paramsString}#{hashString}"
test "basic (click)", ->
emblem =
"""
button click="submitComment" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment on=click>Submit Comment</button>'
test "basic (click) followed by attr", ->
emblem =
"""
button click="submitComment" class="foo" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment on=click class="foo">Submit Comment</button>'
emblem =
"""
button click="submitComment 'omg'" class="foo" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment|omg on=click class="foo">Submit Comment</button>'
test "nested (mouseEnter)", ->
emblem =
"""
a mouseEnter='submitComment target="view"'
| Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view on=mouseEnter>Submit Comment</a>'
test "nested (mouseEnter, doublequoted)", ->
emblem =
"""
a mouseEnter="submitComment target='view'"
| Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view on=mouseEnter>Submit Comment</a>'
test "manual", ->
emblem =
"""
a{action submitComment target="view"} Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view>Submit Comment</a>'
test "manual nested", ->
emblem =
"""
a{action submitComment target="view"}
p Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view><p>Submit Comment</p></a>'
suite "haml style"
test "basic", ->
emblem =
"""
%borf
"""
shouldCompileToString emblem, '<borf></borf>'
test "nested", ->
emblem =
"""
%borf
%sporf Hello
"""
shouldCompileToString emblem, '<borf><sporf>Hello</sporf></borf>'
test "capitalized", ->
emblem =
"""
%Alex alex
%Alex
%Woot
"""
shouldCompileToString emblem, '<Alex>alex</Alex><Alex><Woot></Woot></Alex>'
test "funky chars", ->
emblem =
"""
%borf:narf
%borf:narf Hello, {{foo}}.
%alex = foo
"""
shouldCompileToString emblem,
{ foo: "Alex" },
'<borf:narf></borf:narf><borf:narf>Hello, Alex.</borf:narf><alex>Alex</alex>'
suite "line-based errors"
test "line number is provided for pegjs error", ->
emblem =
"""
p Hello
p Hello {{narf}
"""
shouldThrow (-> CompilerContext.compile emblem), "line 2"
# https://github.com/machty/emblem.js/issues/6
test "single quote test", ->
emblem =
"""
button click='p' Frank
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click>Frank</button>'
test "double quote test", ->
emblem =
"""
button click="p" Frank
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click>Frank</button>'
test "no quote test", ->
emblem =
"""
button click=p Frank
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click>Frank</button>'
suite "mustache DOM attribute shorthand"
test "tagName w/o space", ->
emblem =
"""
App.FunView%span
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*span/
test "tagName w/ space", ->
emblem =
"""
App.FunView %span
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*span/
test "tagName block", ->
emblem =
"""
view App.FunView%span
p Hello
"""
shouldCompileToString emblem, '<App.FunView tagName=span><p>Hello</p></App.FunView>'
test "class w/ space (needs space)", ->
emblem =
"""
App.FunView .bork
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork/
test "multiple classes", ->
emblem =
"""
App.FunView .bork.snork
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork.*snork/
test "elementId", ->
emblem =
"""
App.FunView#ohno
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /elementId.*ohno/
test "mixed w/ hash`", ->
emblem =
"""
App.FunView .bork.snork funbags="yeah"
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork.*snork/
ok result.match /hash/
ok result.match /funbags/
ok result.match /yeah/
test "mixture of all`", ->
emblem =
"""
App.FunView%alex#hell.bork.snork funbags="yeah"
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*alex/
ok result.match /elementId.*hell/
ok result.match /class.*bork.*snork/
ok result.match /hash/
ok result.match /funbags/
ok result.match /yeah/
suite "self-closing html tags"
test "br", ->
emblem =
"""
br
"""
shouldCompileToString emblem, '<br />'
test "br paragraph example", ->
emblem =
"""
p
| LOL!
br
| BORF!
"""
shouldCompileToString emblem, '<p>LOL!<br />BORF!</p>'
test "input", ->
emblem =
"""
input type="text"
"""
shouldCompileToString emblem, '<input type="text" />'
suite "ember."
test "should precompile with EmberHandlebars", ->
emblem =
"""
input type="text"
"""
result = Emblem.precompile(EmberHandlebars, 'p Hello').toString()
ok result.match '<p>Hello</p>'
suite "old school handlebars"
test "array", ->
emblem =
'''
goodbyes
| #{text}!
| cruel #{world}!
'''
hash = {goodbyes: [{text: "goodbye"}, {text: "Goodbye"}, {text: "GOODBYE"}], world: "world"}
shouldCompileToString emblem, hash, "goodbye! Goodbye! GOODBYE! cruel world!"
hash = {goodbyes: [], world: "world"}
shouldCompileToString emblem, hash, "cruel world!"
Handlebars.registerPartial('hbPartial', '<a href="/people/{{id}}">{{name}}</a>')
test "calling handlebars partial", ->
emblem =
'''
> hbPartial
| Hello #{> hbPartial}
'''
shouldCompileToString emblem,
{ id: 666, name: "Death" },
'<a href="/people/666">Death</a>Hello <a href="/people/666">Death</a>'
Emblem.registerPartial(Handlebars, 'emblemPartial', 'a href="/people/{{id}}" = name')
Emblem.registerPartial(Handlebars, 'emblemPartialB', 'p Grr')
Emblem.registerPartial(Handlebars, 'emblemPartialC', 'p = a')
test "calling emblem partial", ->
shouldCompileToString '> emblemPartial', { id: 666, name: "Death" }, '<a href="/people/666">Death</a>'
test "calling emblem partial with context", ->
shouldCompileToString '> emblemPartialC foo', { foo: { a: "YES" } }, '<p>YES</p>'
test "partials in mustaches", ->
emblem =
"""
| Hello, {{> emblemPartialC foo}}{{>emblemPartialB}}{{>emblemPartialB }}
"""
shouldCompileToString emblem, { foo: { a: "YES" } }, 'Hello, <p>YES</p><p>Grr</p><p>Grr</p>'
test "handlebars dot-separated paths with segment-literal notation", ->
emblem =
'''
p = articles.[3]
'''
shouldCompileTo emblem, { articles: ['zero', 'one', 'two', 'three']}, '<p>three</p>'
test "handlebars dot-separated paths with segment-literal notation, more nesting", ->
emblem =
'''
p = articles.[3].[#comments].[0]
'''
shouldCompileTo emblem, { articles: [{}, {}, {}, {'#comments': ['bazinga']}]}, '<p>bazinga</p>'
test "../path as inMustacheParam recognized correctly as pathIdNode instead of classShorthand", ->
Handlebars.registerHelper 'jumpToParent', (link) ->
new Handlebars.SafeString "<a href='#{link}'>Jump to parent top</a>"
emblem =
'''
each children
jumpToParent ../parentLink
'''
shouldCompileTo emblem, {parentLink: '#anchor', children: [{}]}, '<a href=\'#anchor\'>Jump to parent top</a>'
test "block as #each", ->
emblem =
'''
thangs
p Woot #{yeah}
'''
shouldCompileToString emblem, { thangs: [{yeah: 123}, {yeah:456}] }, '<p>Woot 123</p><p>Woot 456</p>'
if supportsEachHelperDataKeywords
suite "each block helper keywords prefixed by @"
test "#each with @index", ->
emblem =
'''
thangs
p #{@index} Woot #{yeah}
'''
shouldCompileToString emblem, { thangs: [{yeah: 123}, {yeah:456}] }, '<p>0 Woot 123</p><p>1 Woot 456</p>'
test "#each with @key", ->
emblem =
'''
each thangs
p #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>@key: 123</p><p>works!: 456</p>'
test "#each with @key, @index", ->
emblem =
'''
each thangs
p #{@index} #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>0 @key: 123</p><p>1 works!: 456</p>'
test "#each with @key, @first", ->
emblem =
'''
each thangs
if @first
p First item
else
p #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>First item</p><p>works!: 456</p>'
###
test "partial in block", ->
emblem =
"""
ul = people
> link
"""
data =
people: [
{ "name": "Alan", "id": 1 }
{ "name": "Yehuda", "id": 2 }
]
shouldCompileToString emblem, data, '<ul><a href="/people/1">Alan</a><a href="/people/2">Yehuda</a><ul>'
###
#suite "helper hash"
#test "quoteless values get treated as bindings", ->
#emblem =
#"""
#view SomeView a=b
#| Yes
#"""
#shouldCompileToString emblem, '<SomeView aBinding=b>Yes</SomeView>'
#test "more complex", ->
#emblem =
#"""
#view SomeView a=b foo=thing.gnar
#"""
#shouldCompileToString emblem, '<SomeView aBinding=b fooBinding=thing.gnar>SomeView</SomeView>'
suite "inline block helper"
test "text only", ->
emblem =
"""
view SomeView | Hello
"""
shouldCompileToString emblem, '<SomeView nohash>Hello</SomeView>'
test "multiline", ->
emblem =
"""
view SomeView | Hello,
How are you?
Sup?
"""
shouldCompileToString emblem, '<SomeView nohash>Hello, How are you? Sup?</SomeView>'
test "more complicated", ->
emblem =
"""
view SomeView borf="yes" | Hello,
How are you?
Sup?
"""
shouldCompileToString emblem, '<SomeView borf=yes>Hello, How are you? Sup?</SomeView>'
suite "copy paste html"
test "indented", ->
emblem =
"""
<p>
<span>This be some text</span>
<title>Basic HTML Sample Page</title>
</p>
"""
shouldCompileToString emblem, '<p><span>This be some text</span><title>Basic HTML Sample Page</title></p>'
test "flatlina", ->
emblem =
"""
<p>
<span>This be some text</span>
<title>Basic HTML Sample Page</title>
</p>
"""
shouldCompileToString emblem, '<p><span>This be some text</span><title>Basic HTML Sample Page</title></p>'
test "bigass", ->
expect(0)
return "PENDING"
emblem =
"""
<div class="content">
<p>
We design and develop ambitious web and mobile applications,
</p>
<p>
A more official portfolio page is on its way, but in the meantime,
check out
</p>
</div>
"""
expected = '<div class="content"><p> We design and develop ambitious web and mobile applications, </p><p> A more official portfolio page is on its way, but in the meantime, check out</p></div>'
shouldCompileToString emblem, expected
suite "`this` keyword"
test "basic", ->
emblem = '''
each foo
p = this
this
'''
shouldCompileTo emblem,
{ foo: [ "Alex", "Emily" ] },
'<p>Alex</p>Alex<p>Emily</p>Emily'
suite "colon separator"
test "basic", ->
emblem = 'each foo: p Hello, #{this}'
shouldCompileTo emblem,
{ foo: [ "Alex", "Emily", "Nicole" ] },
'<p>Hello, Alex</p><p>Hello, Emily</p><p>Hello, Nicole</p>'
test "html stack", ->
emblem = '.container: .row: .span5: span Hello'
shouldCompileToString emblem,
'<div class="container"><div class="row"><div class="span5"><span>Hello</span></div></div></div>'
test "epic", ->
emblem = '''
.container: .row: .span5
ul#list data-foo="yes": each foo: li
span: this
'''
shouldCompileTo emblem, { foo: ["a","b"] },
'<div class="container"><div class="row"><div class="span5"><ul id="list" data-foo="yes"><li><span>a</span></li><li><span>b</span></li></ul></div></div></div>'
test "html stack elements only", ->
emblem = 'p: span: div: p: foo'
shouldCompileToString emblem, { foo: "alex" },
'<p><span><div><p>alex</p></div></span></p>'
test "mixed separators", ->
emblem = '.fun = each foo: %nork = this'
shouldCompileTo emblem,
{ foo: [ "Alex", "Emily", "Nicole" ] },
'<div class="fun"><nork>Alex</nork><nork>Emily</nork><nork>Nicole</nork></div>'
test "mixed separators rewritten", ->
emblem = '.fun: each foo: %nork: this'
shouldCompileTo emblem,
{ foo: [ "Alex", "Emily", "Nicole" ] },
'<div class="fun"><nork>Alex</nork><nork>Emily</nork><nork>Nicole</nork></div>'
test "with text terminator", ->
emblem = '.fun: view SomeView | Hello'
shouldCompileToString emblem, '<div class="fun"><SomeView nohash>Hello</SomeView></div>'
test "test from heartsentwined", ->
shouldCompileTo 'li data-foo=bar: a', { bar: "abc" }, '<li data-foo="abc"><a></a></li>'
shouldCompileTo "li data-foo='bar': a", '<li data-foo="bar"><a></a></li>'
test "mixture of colon and indentation", ->
emblem = """
li data-foo=bar: a
baz
"""
shouldCompileTo emblem, { bar: "abc", baz: "Hello" }, '<li data-foo="abc"><a>Hello</a></li>'
test "mixture of colon and indentation pt.2", ->
emblem = """
ul
li data-foo=bar: a quux
li data-foo='bar': a quux
li data-foo=bar href='#': a quux
"""
result = precompileEmber emblem
ok(!result.match "a quux")
suite "base indent / predent"
test "predent", ->
emblem = " \n"
s =
"""
pre
` This
` should
` hopefully
` work, and work well.
"""
emblem += s
shouldCompileToString emblem, '<pre>This\n should\n hopefully\n work, and work well.\n</pre>'
test "mixture", ->
emblem = " \n"
emblem += " p Hello\n"
emblem += " p\n"
emblem += " | Woot\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "mixture w/o opening blank", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += " | Woot\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "w/ blank lines", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += "\n"
emblem += " | Woot\n"
emblem += "\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "w/ blank whitespaced lines", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += "\n"
emblem += " | Woot\n"
emblem += " \n"
emblem += " \n"
emblem += " \n"
emblem += "\n"
emblem += " span yes\n"
emblem += "\n"
emblem += " sally\n"
emblem += "\n"
emblem += " \n"
emblem += " | Woot\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span><sally class="none">Woot</sally>'
suite "EOL Whitespace"
test "shouldn't be necessary to insert a space", ->
emblem =
"""
p Hello,
How are you?
p I'm fine, thank you.
"""
shouldCompileToString emblem, "<p>Hello, How are you?</p><p>I'm fine, thank you.</p>"
suite "misc."
test "end with indent", ->
expect(0)
return "PENDING"
emblem =
"""
div
p
span Butts
em fpokasd
iunw
paosdk
"""
shouldCompileToString emblem, '<div><p><span>Buttsem fpokasdiunw paosdk</span></p></div>'
test "capitalized view helper should not kick in if suffix modifiers present", ->
emblem =
"""
Foo!
"""
shouldCompileToString emblem, '<unbound class="Foo">Foo</unbound>'
test "GH-26: no need for space before equal sign", ->
emblem =
"""
span= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span>YEAH</span>'
emblem =
"""
span.foo= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span class="foo">YEAH</span>'
emblem =
"""
span#hooray.foo= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span id="hooray" class="foo">YEAH</span>'
emblem =
"""
#hooray= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<div id="hooray">YEAH</div>'
emblem =
"""
.hooray= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<div class="hooray">YEAH</div>'
test "numbers in shorthand", ->
shouldCompileToString '#4a', '<div id="4a"></div>'
shouldCompileToString '.4a', '<div class="4a"></div>'
shouldCompileToString '.4', '<div class="4"></div>'
shouldCompileToString '#4', '<div id="4"></div>'
shouldCompileToString '%4', '<4></4>'
shouldCompileToString '%4 ermagerd', '<4>ermagerd</4>'
shouldCompileToString '%4#4.4 ermagerd', '<4 id="4" class="4">ermagerd</4>'
test "Emblem has a VERSION defined", ->
ok(Emblem.VERSION, "Emblem.VERSION should be defined")
test "Windows line endings", ->
emblem = ".navigation\r\n p Hello\r\n#main\r\n | hi"
shouldCompileToString emblem, '<div class="navigation"><p>Hello</p></div><div id="main">hi</div>'
test "backslash doesn't cause infinite loop", ->
emblem =
'''
| \\
'''
shouldCompileTo emblem, "\\"
test "backslash doesn't cause infinite loop with letter", ->
emblem =
'''
| \\a
'''
shouldCompileTo emblem, "\\a"
test "self closing tag with forward slash", ->
emblem =
'''
p/
%bork/
.omg/
#hello.boo/
p/ class="asdasd"
'''
shouldCompileTo emblem, '<p /><bork /><div class="omg" /><div id="hello" class="boo" /><p class="asdasd" />'
test "tagnames and attributes with colons", ->
emblem =
'''
%al:ex match:neer="snork" Hello!
'''
shouldCompileTo emblem, '<al:ex match:neer="snork">Hello!</al:ex>'
test "windows newlines", ->
emblem = "\r\n \r\n p Hello\r\n\r\n"
shouldCompileTo emblem, '<p>Hello</p>'
if supportsSubexpressions
suite "subexpressions"
Handlebars.registerHelper 'echo', (param) ->
"ECHO #{param}"
Handlebars.registerHelper 'echofun', ->
options = Array.prototype.pop.call(arguments)
"FUN = #{options.hash.fun}"
Handlebars.registerHelper 'hello', (param) ->
"hello"
Handlebars.registerHelper 'equal', (x, y) ->
x == y
test "arg-less helper", ->
emblem = 'p {{echo (hello)}}'
shouldCompileTo emblem, '<p>ECHO hello</p>'
emblem = '= echo (hello)'
shouldCompileTo emblem, 'ECHO hello'
test "helper w args", ->
emblem = 'p {{echo (equal 1 1)}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal 1 1)'
shouldCompileTo emblem, 'ECHO true'
test "supports much nesting", ->
emblem = 'p {{echo (equal (equal 1 1) true)}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal (equal 1 1) true)'
shouldCompileTo emblem, 'ECHO true'
test "with hashes", ->
emblem = 'p {{echo (equal (equal 1 1) true fun="yes")}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal (equal 1 1) true fun="yes")'
shouldCompileTo emblem, 'ECHO true'
test "as hashes", ->
emblem = 'p {{echofun fun=(equal 1 1)}}'
shouldCompileTo emblem, '<p>FUN = true</p>'
emblem = '= echofun fun=(equal 1 1)'
shouldCompileTo emblem, 'FUN = true'
test "complex expression", ->
emblem = 'p {{echofun true (hello how="are" you=false) 1 not=true fun=(equal "ECHO hello" (echo (hello))) win="yes"}}'
shouldCompileTo emblem, '<p>FUN = true</p>'
emblem = '= echofun true (hello how="are" you=false) 1 not=true fun=(equal "ECHO hello" (echo (hello))) win="yes"'
shouldCompileTo emblem, 'FUN = true'
| 175762 |
# Test Setup: Set up an environment that'll work for both Node and Qunit tests.
Ember = window?.Emblem || @Emblem || {}
# These are needed for the full version ember to load properly
LoadedEmber = LoadedEmber || {}
Ember.Handlebars = LoadedEmber.Handlebars
Ember.warn = LoadedEmber.warn
if Emblem?
# Qunit testing
_equal = equal
equals = equal = (a, b, msg) ->
# Allow exec with missing message params
_equal(a, b, msg || '')
# In QUnit, we use module() instead of nonexistent suite()
window.suite = module
else
# Setup for Node package testing
Handlebars = require('handlebars')
EmberHandlebars = require('./resources/ember-template-compiler.js').EmberHandlebars
Emblem = require('../lib/emblem')
# TODO: replace with real expect()
`expect = function() {};`
{equal, equals, ok, throws} = require("assert")
unless CompilerContext?
# Note that this doesn't have the same context separation as the rspec test.
# Both should be run for full acceptance of the two libary modes.
CompilerContext =
compile: (template, options) ->
Emblem.compile(Handlebars, template, options)
supportsEachHelperDataKeywords = Handlebars.VERSION.slice(0, 3) >= 1.2
supportsSubexpressions = Handlebars.VERSION.slice(0, 3) >= 1.3
precompileEmber = (emblem) ->
Emblem.precompile(EmberHandlebars, emblem).toString()
shouldEmberPrecompileToHelper = (emblem, helper = 'bind-attr') ->
result = precompileEmber emblem
ok (result.match "helpers.#{helper}") or (result.match "helpers\\['#{helper}'\\]")
result
shouldCompileToString = (string, hashOrArray, expected) ->
if hashOrArray.constructor == String
shouldCompileToWithPartials(string, {}, false, hashOrArray, null, true)
else
shouldCompileToWithPartials(string, hashOrArray, false, expected, null, true)
shouldCompileTo = (string, hashOrArray, expected, message) ->
if hashOrArray.constructor == String
shouldCompileToWithPartials(string, {}, false, hashOrArray, message)
else
shouldCompileToWithPartials(string, hashOrArray, false, expected, message)
shouldCompileToWithPartials = (string, hashOrArray, partials, expected, message, strings) ->
options = null
if strings
options = {}
options.stringParams = true
result = compileWithPartials(string, hashOrArray, partials, options)
equal(result, expected, "'" + result + "' should === '" + expected + "': " + message)
compileWithPartials = (string, hashOrArray, partials, options = {}) ->
template = CompilerContext.compile(string, options)
if Object::toString.call(hashOrArray) == "[object Array]"
if helpers = hashOrArray[1]
for prop of Handlebars.helpers
helpers[prop] = helpers[prop] || Handlebars.helpers[prop]
ary = []
ary.push(hashOrArray[0])
ary.push
helpers: hashOrArray[1]
partials: hashOrArray[2]
else
ary = [hashOrArray]
template.apply(this, ary)
shouldThrow = (fn, exMessage) ->
caught = false
try
fn()
catch e
caught = true
if exMessage
ok e.message.match(exMessage), "exception message matched"
ok(caught, "an exception was thrown")
Handlebars.registerHelper 'echo', (param) ->
"ECHO #{param}"
suite "html one-liners"
test "element only", ->
shouldCompileTo "p", "<p></p>"
test "with text", ->
shouldCompileTo "p Hello", "<p>Hello</p>"
test "with more complex text", ->
shouldCompileTo "p Hello, how's it going with you today?", "<p>Hello, how's it going with you today?</p>"
test "with trailing space", ->
shouldCompileTo "p Hello ", "<p>Hello </p>"
suite "html multi-lines"
test "two lines", ->
emblem =
"""
p This is
pretty cool.
"""
shouldCompileTo emblem, "<p>This is pretty cool.</p>"
test "three lines", ->
emblem =
"""
p This is
pretty damn
cool.
"""
shouldCompileTo emblem, "<p>This is pretty damn cool.</p>"
test "three lines w/ embedded html", ->
emblem =
"""
p This is
pretty <span>damn</span>
cool.
"""
shouldCompileTo emblem, "<p>This is pretty <span>damn</span> cool.</p>"
test "indentation doesn't need to match starting inline content's", ->
emblem =
"""
span Hello,
How are you?
"""
shouldCompileTo emblem, "<span>Hello, How are you?</span>"
test "indentation may vary between parent/child, must be consistent within inline-block", ->
emblem =
"""
div
span Hello,
How are you?
Excellent.
p asd
"""
shouldCompileTo emblem, "<div><span>Hello, How are you? Excellent.</span><p>asd</p></div>"
emblem =
"""
div
span Hello,
How are you?
Excellent.
"""
shouldThrow -> CompilerContext.compile emblem
test "indentation may vary between parent/child, must be consistent within inline-block pt 2", ->
emblem =
"""
div
span Hello,
How are you?
Excellent.
"""
shouldCompileTo emblem, "<div><span>Hello, How are you? Excellent.</span></div>"
test "w/ mustaches", ->
emblem =
"""
div
span Hello,
{{foo}} are you?
Excellent.
"""
shouldCompileTo emblem, { foo: "YEAH" }, "<div><span>Hello, YEAH are you? Excellent.</span></div>"
test "w/ block mustaches", ->
emblem =
'''
p Hello, #{ sally | Hello},
and {{sally: span Hello}}!
'''
shouldCompileTo emblem,
'<p>Hello, <sally class="none">Hello</sally>, and <sally class="none"><span>Hello</span></sally>!</p>'
emblem =
'''
p Hello, #{ sally: span: a Hello}!
'''
shouldCompileTo emblem,
'<p>Hello, <sally class="none"><span><a>Hello</a></span></sally>!</p>'
test "with followup", ->
emblem =
"""
p This is
pretty cool.
p Hello.
"""
shouldCompileTo emblem, "<p>This is pretty cool.</p><p>Hello.</p>"
suite '#{} syntax'
test 'acts like {{}}', ->
emblem =
'''
span Yo #{foo}, I herd.
'''
shouldCompileTo emblem,
{ foo: '<span>123</span>' },
"<span>Yo <span>123</span>, I herd.</span>"
test 'can start inline content', ->
emblem =
'''
span #{foo}, I herd.
'''
shouldCompileTo emblem, { foo: "dawg" }, "<span>dawg, I herd.</span>"
test 'can end inline content', ->
emblem =
'''
span I herd #{foo}
'''
shouldCompileTo emblem, { foo: "dawg" }, "<span>I herd dawg</span>"
test "doesn't screw up parsing when # used in text nodes", ->
emblem =
'''
span OMG #YOLO
'''
shouldCompileTo emblem, "<span>OMG #YOLO</span>"
test "# can be only thing on line", ->
emblem =
'''
span #
'''
shouldCompileTo emblem, "<span>#</span>"
### TODO: this
test "can be escaped", ->
emblem =
'''
span #\\{yes}
'''
shouldCompileTo emblem, '<span>#{yes}</span>'
###
runTextLineSuite = (ch) ->
sct = (emblem, obj, expected) ->
unless expected?
expected = obj
obj = {}
unless ch == '`'
expected = expected.replace /\n/g, ""
# Replace tabs with optional trailing whitespace.
if ch == "'"
expected = expected.replace /\t/g, " "
else
expected = expected.replace /\t/g, ""
emblem = emblem.replace /_/g, ch
shouldCompileTo emblem, obj, expected
suite "text lines starting with '#{ch}'"
test "basic", -> sct "_ What what", "What what\n\t"
test "with html", ->
sct '_ What <span id="woot" data-t="oof" class="f">what</span>!',
'What <span id="woot" data-t="oof" class="f">what</span>!\n\t'
test "multiline", ->
emblem =
"""
_ Blork
Snork
"""
sct emblem, "Blork\nSnork\n\t"
test "triple multiline", ->
emblem =
"""
_ Blork
Snork
Bork
"""
sct emblem, "Blork\nSnork\nBork\n\t"
test "quadruple multiline", ->
emblem =
"""
_ Blork
Snork
Bork
Fork
"""
sct emblem, "Blork\nSnork\nBork\nFork\n\t"
test "multiline w/ trailing whitespace", ->
emblem =
"""
_ Blork
Snork
"""
sct emblem, "Blork \nSnork\n\t"
test "secondline", ->
emblem =
"""
_
Good
"""
sct emblem, "Good\n\t"
test "secondline multiline", ->
emblem =
"""
_
Good
Bork
"""
sct emblem, "Good\nBork\n\t"
test "with a mustache", ->
emblem =
"""
_ Bork {{foo}}!
"""
sct emblem,
{ foo: "YEAH" },
'Bork YEAH!\n\t'
test "with mustaches", ->
emblem =
"""
_ Bork {{foo}} {{{bar}}}!
"""
sct emblem,
{ foo: "YEAH", bar: "<span>NO</span>"},
'Bork YEAH <span>NO</span>!\n\t'
test "indented, then in a row", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance2
dude
gnar
foo
"""
sct emblem, "Good\n riddance2\n dude\n gnar\n foo\n\t"
test "indented, then in a row, then indented", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance2
dude
gnar
foo
far
faz
"""
sct emblem, "Good \n riddance2 \n dude \n gnar \n foo \n far \n faz \n\t"
test "uneven indentation megatest", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance
dude
"""
sct emblem, "Good\n riddance\ndude\n\t"
emblem =
"""
_
Good
riddance3
dude
"""
sct emblem, "Good\n riddance3\n dude\n\t"
emblem =
"""
_ Good
riddance
dude
"""
sct emblem, "Good\nriddance\n dude\n\t"
test "on each line", ->
emblem =
"""
pre
_ This
_ should
_ hopefully
_ work, and work well.
"""
sct emblem, '<pre>This\n\t should\n\t hopefully\n\t work, and work well.\n\t</pre>'
test "with blank", ->
emblem =
"""
pre
_ This
_ should
_
_ hopefully
_ work, and work well.
"""
sct emblem, '<pre>This\n\t should\n\t\n\t hopefully\n\t work, and work well.\n\t</pre>'
runTextLineSuite '|'
runTextLineSuite '`'
runTextLineSuite "'"
suite "text line starting with angle bracket"
test "can start with angle bracket html", ->
emblem =
"""
<span>Hello</span>
"""
shouldCompileTo emblem, "<span>Hello</span>"
test "can start with angle bracket html and go to multiple lines", ->
emblem =
"""
<span>Hello dude,
what's up?</span>
"""
shouldCompileTo emblem, "<span>Hello dude, what's up?</span>"
suite "preprocessor"
test "it strips out preceding whitespace", ->
emblem =
"""
p Hello
"""
shouldCompileTo emblem, "<p>Hello</p>"
test "it handles preceding indentation", ->
emblem = " p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
test "it handles preceding indentation and newlines", ->
emblem = "\n p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
test "it handles preceding indentation and newlines pt 2", ->
emblem = " \n p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
suite "comments"
test "it strips out single line '/' comments", ->
emblem =
"""
p Hello
/ A comment
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "it strips out multi-line '/' comments", ->
emblem =
"""
p Hello
/ A comment
that goes on to two lines
even three!
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "it strips out multi-line '/' comments without text on the first line", ->
emblem =
"""
p Hello
/
A comment
that goes on to two lines
even three!
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "mix and match with various indentation", ->
emblem =
"""
/ A test
p Hello
span
/ This is gnarly
p Yessir nope.
/ Nothin but comments
so many comments.
/
p Should not show up
"""
shouldCompileTo emblem, "<p>Hello</p><span><p>Yessir nope.</p></span>"
test "uneven indentation", ->
emblem =
"""
/ nop
nope
nope
"""
shouldCompileTo emblem, ""
test "uneven indentation 2", ->
emblem =
"""
/ n
no
nop
nope
"""
shouldCompileTo emblem, ""
test "uneven indentation 3", ->
emblem =
"""
/ n
no
nop
nope
"""
shouldCompileTo emblem, ""
test "empty first line", ->
emblem =
"""
/
nop
nope
nope
no
"""
shouldCompileTo emblem, ""
test "on same line as html content", ->
emblem =
"""
.container / This comment doesn't show up
.row / Nor does this
p Hello
"""
shouldCompileTo emblem, '<div class="container"><div class="row"><p>Hello</p></div></div>'
test "on same line as mustache content", ->
shouldCompileTo 'frank text="YES" text2="NO" / omg', 'WOO: YES NO'
test "on same line as colon syntax", ->
emblem =
"""
ul: li: span / omg
| Hello
"""
shouldCompileTo emblem, '<ul><li><span>Hello</span></li></ul>'
suite "indentation"
# This test used to make sure the emblem code threw, but now we
# support multi-line syntax.
test "it doesn't throw when indenting after a line with inline content", ->
emblem =
"""
p Hello
p invalid
"""
shouldCompileTo emblem, "<p>Hello p invalid</p>"
test "it throws on half dedent", ->
emblem =
"""
p
span This is ok
span This aint
"""
shouldThrow -> CompilerContext.compile emblem
test "new indentation levels don't have to match parents'", ->
emblem =
"""
p
span
div
span yes
"""
shouldCompileTo emblem, "<p><span><div><span>yes</span></div></span></p>"
suite "whitespace fussiness"
test "spaces after html elements", ->
shouldCompileTo "p \n span asd", "<p><span>asd</span></p>"
shouldCompileTo "p \nspan \n\ndiv\nspan", "<p></p><span></span><div></div><span></span>"
test "spaces after mustaches", ->
shouldCompileTo "each foo \n p \n span", { foo: [1,2] }, "<p></p><span></span><p></p><span></span>"
suite "attribute shorthand"
test "id shorthand", ->
shouldCompileTo "#woot", '<div id="woot"></div>'
shouldCompileTo "span#woot", '<span id="woot"></span>'
test "class shorthand", ->
shouldCompileTo ".woot", '<div class="woot"></div>'
shouldCompileTo "span.woot", '<span class="woot"></span>'
shouldCompileTo "span.woot.loot", '<span class="woot loot"></span>'
test "class can come first", ->
shouldCompileTo ".woot#hello", '<div id="hello" class="woot"></div>'
shouldCompileTo "span.woot#hello", '<span id="hello" class="woot"></span>'
shouldCompileTo "span.woot.loot#hello", '<span id="hello" class="woot loot"></span>'
shouldCompileTo "span.woot.loot#hello.boot", '<span id="hello" class="woot loot boot"></span>'
suite "full attributes - tags with content"
test "class only", ->
shouldCompileTo 'p class="yes" Blork', '<p class="yes">Blork</p>'
test "id only", ->
shouldCompileTo 'p id="yes" Hyeah', '<p id="yes">Hyeah</p>'
test "class and id", ->
shouldCompileTo 'p id="yes" class="no" Blork', '<p id="yes" class="no">Blork</p>'
test "class and id and embedded html one-liner", ->
shouldCompileTo 'p id="yes" class="no" One <b>asd</b>!', '<p id="yes" class="no">One <b>asd</b>!</p>'
test "nesting", ->
emblem =
"""
p class="hello" data-foo="gnarly"
span Yes
"""
shouldCompileTo emblem, '<p class="hello" data-foo="gnarly"><span>Yes</span></p>'
suite "full attributes - mixed quotes"
test "single empty", ->
shouldCompileTo "p class=''", '<p class=""></p>'
test "single full", ->
shouldCompileTo "p class='woot yeah'", '<p class="woot yeah"></p>'
test "mixed", ->
shouldCompileTo "p class='woot \"oof\" yeah'", '<p class="woot "oof" yeah"></p>'
suite "full attributes - tags without content"
test "empty", ->
shouldCompileTo 'p class=""', '<p class=""></p>'
test "class only", ->
shouldCompileTo 'p class="yes"', '<p class="yes"></p>'
test "id only", ->
shouldCompileTo 'p id="yes"', '<p id="yes"></p>'
test "class and id", ->
shouldCompileTo 'p id="yes" class="no"', '<p id="yes" class="no"></p>'
suite "full attributes w/ mustaches"
test "with mustache", ->
shouldCompileTo 'p class="foo {{yes}}"', {yes: "ALEX"}, '<p class="foo ALEX"></p>'
shouldCompileTo 'p class="foo {{yes}}" Hello', {yes: "ALEX"}, '<p class="foo ALEX">Hello</p>'
emblem =
"""
p class="foo {{yes}}"
| Hello
"""
shouldCompileTo emblem, {yes: "ALEX"}, '<p class="foo ALEX">Hello</p>'
test "with mustache calling helper", ->
shouldCompileTo 'p class="foo {{{echo "YES"}}}"', '<p class="foo ECHO YES"></p>'
shouldCompileTo 'p class="foo #{echo "NO"} and {{{echo "YES"}}}" Hello', '<p class="foo ECHO NO and ECHO YES">Hello</p>'
emblem =
"""
p class="foo {{echo "BORF"}}"
| Hello
"""
shouldCompileTo emblem, '<p class="foo ECHO BORF">Hello</p>'
suite "boolean attributes"
test "static", ->
shouldCompileTo 'p borf=true', '<p borf></p>'
shouldCompileTo 'p borf=true Woot', '<p borf>Woot</p>'
shouldCompileTo 'p borf=false', '<p></p>'
shouldCompileTo 'p borf=false Nork', '<p>Nork</p>'
shouldCompileTo 'option selected=true Thingeroo', '<option selected>Thingeroo</option>'
#test "dynamic", ->
## TODO
#shouldCompileTo 'p borf=foo', { foo: true }, '<p borf></p>'
#shouldCompileTo 'p borf=foo', { foo: false }, '<p></p>'
#shouldCompileTo 'p borf=foo Yeah', { foo: true }, '<p borf>Yeah</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: false }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: null }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: undefined }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: 0 }, '<p borf="0">Naww</p>'
suite "html nested"
test "basic", ->
emblem =
"""
p
span Hello
strong Hi
div
p Hooray
"""
shouldCompileTo emblem, '<p><span>Hello</span><strong>Hi</strong></p><div><p>Hooray</p></div>'
test "empty nest", ->
emblem =
"""
p
span
strong
i
"""
shouldCompileTo emblem, '<p><span><strong><i></i></strong></span></p>'
test "empty nest w/ attribute shorthand", ->
emblem =
"""
p.woo
span#yes
strong.no.yes
i
"""
shouldCompileTo emblem, '<p class="woo"><span id="yes"><strong class="no yes"><i></i></strong></span></p>'
suite "simple mustache"
test "various one-liners", ->
emblem =
"""
= foo
arf
p = foo
span.foo
p data-foo="yes" = goo
"""
shouldCompileTo emblem,
{ foo: "ASD", arf: "QWE", goo: "WER" },
'ASDQWE<p>ASD</p><span class="foo"></span><p data-foo="yes">WER</p>'
test "double =='s un-escape", ->
emblem =
"""
== foo
foo
p == foo
"""
shouldCompileTo emblem,
{ foo: '<span>123</span>' },
'<span>123</span><span>123</span><p><span>123</span></p>'
test "nested combo syntax", ->
emblem =
"""
ul = each items
li = foo
"""
shouldCompileTo emblem,
{ items: [ { foo: "YEAH"}, { foo: "BOI" } ] },
'<ul><li>YEAH</li><li>BOI</li></ul>'
suite "mustache helpers"
Handlebars.registerHelper 'booltest', (options) ->
hash = options.hash
result = if hash.what == true
"true"
else if hash.what == false
"false"
else "neither"
result
Handlebars.registerHelper 'hashtypetest', (options) ->
hash = options.hash
typeof hash.what
Handlebars.registerHelper 'typetest', (num, options) ->
typeof num
Handlebars.registerHelper 'frank', ->
options = arguments[arguments.length - 1]
"WOO: #{options.hash.text} #{options.hash.text2}"
Handlebars.registerHelper 'sally', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
param = params[0] || 'none'
if options.fn
content = options.fn @
new Handlebars.SafeString """<sally class="#{param}">#{content}</sally>"""
else
content = param
new Handlebars.SafeString """<sally class="#{param}">#{content}</sally>"""
test "basic", -> shouldCompileTo 'echo foo', {foo: "YES"}, 'ECHO YES'
test "hashed parameters should work", ->
shouldCompileTo 'frank text="YES" text2="NO"', 'WOO: YES NO'
Handlebars.registerHelper 'concatenator', ->
options = arguments[arguments.length - 1]
new Handlebars.SafeString ("'#{key}'='#{value}'" for key, value of options.hash).sort().join( " " )
test "negative integers should work", ->
shouldCompileTo 'concatenator positive=100 negative=-100', "'negative'='-100' 'positive'='100'"
test "booleans", ->
shouldCompileToString 'typetest true', 'boolean'
shouldCompileToString 'typetest false', 'boolean'
shouldCompileTo 'booltest what=false', 'false'
shouldCompileTo 'booltest what=true', 'true'
shouldCompileTo 'booltest what="false"', 'neither'
shouldCompileTo 'booltest what="true"', 'neither'
test "integers", ->
shouldCompileToString 'typetest 200', 'number'
shouldCompileTo 'hashtypetest what=1', 'number'
shouldCompileTo 'hashtypetest what=200', 'number'
test "nesting", ->
emblem =
"""
sally
p Hello
"""
shouldCompileTo emblem, '<sally class="none"><p>Hello</p></sally>'
test "recursive nesting", ->
emblem =
"""
sally
sally
p Hello
"""
shouldCompileTo emblem, '<sally class="none"><sally class="none"><p>Hello</p></sally></sally>'
test "recursive nesting pt 2", ->
emblem =
"""
sally
sally thing
p Hello
"""
shouldCompileTo emblem, { thing: "woot" }, '<sally class="none"><sally class="woot"><p>Hello</p></sally></sally>'
Handlebars.registerHelper 'view', (param, a, b, c) ->
options = arguments[arguments.length - 1]
content = param
content = options.fn @ if options.fn
hashString = ""
for own k,v of options.hash
hashString += " #{k}=#{v}"
hashString = " nohash" unless hashString
new Handlebars.SafeString """<#{param}#{hashString}>#{content}</#{param}>"""
suite "capitalized line-starter"
test "should invoke `view` helper by default", ->
emblem =
"""
SomeView
"""
shouldEmberPrecompileToHelper emblem, 'view'
#shouldCompileToString emblem, '<SomeView nohash>SomeView</SomeView>'
test "should not invoke `view` helper for vanilla HB", ->
emblem =
"""
SomeView
"""
shouldCompileToString emblem, {SomeView: "ALEX"}, 'ALEX'
test "should support block mode", ->
emblem =
"""
SomeView
p View content
"""
#shouldCompileToString emblem, '<SomeView nohash><p>View content</p></SomeView>'
shouldEmberPrecompileToHelper emblem, 'view'
test "should not kick in if preceded by equal sign", ->
emblem =
"""
= SomeView
"""
shouldCompileTo emblem, { SomeView: 'erp' }, 'erp'
test "should not kick in explicit {{mustache}}", ->
emblem =
"""
p Yeah {{SomeView}}
"""
shouldCompileTo emblem, { SomeView: 'erp' }, '<p>Yeah erp</p>'
# TODO test overriding the default helper name (instead of always "view")
suite "bang syntax defaults to `unbound` helper syntax"
Handlebars.registerHelper 'unbound', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
stringedParams = params.join(' ')
content = if options.fn then options.fn @ else stringedParams
new Handlebars.SafeString """<unbound class="#{stringedParams}">#{content}</unbound>"""
test "bang helper defaults to `unbound` invocation", ->
emblem =
"""
foo! Yar
= foo!
"""
shouldCompileToString emblem, '<unbound class="foo Yar">foo Yar</unbound><unbound class="foo">foo</unbound>'
test "bang helper works with blocks", ->
emblem =
"""
hey! you suck
= foo!
"""
shouldCompileToString emblem, '<unbound class="hey you suck"><unbound class="foo">foo</unbound></unbound>'
suite "question mark syntax defaults to `if` helper syntax"
test "? helper defaults to `if` invocation", ->
emblem =
"""
foo?
p Yeah
"""
shouldCompileTo emblem, { foo: true }, '<p>Yeah</p>'
test "else works", ->
emblem =
"""
foo?
p Yeah
else
p No
"""
shouldCompileTo emblem, { foo: false }, '<p>No</p>'
test "compound", ->
emblem =
"""
p = foo?
| Hooray
else
| No
p = bar?
| Hooray
else
| No
"""
shouldCompileTo emblem, { foo: true, bar: false }, '<p>Hooray</p><p>No</p>'
test "compound", ->
emblem =
"""
p = foo?
bar
else
baz
"""
shouldCompileTo emblem, { foo: true, bar: "borf", baz: "narsty" }, '<p>borf</p>'
suite "conditionals"
test "simple if statement", ->
emblem =
"""
if foo
| Foo
if bar
| Bar
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'Foo'
test "if else ", ->
emblem =
"""
if foo
| Foo
if bar
| Bar
else
| Woot
else
| WRONG
if bar
| WRONG
else
| Hooray
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'FooWootHooray'
test "else with preceding `=`", ->
emblem =
"""
= if foo
p Yeah
= else
p No
= if bar
p Yeah!
= else
p No!
=if bar
p Yeah!
=else
p No!
"""
shouldCompileTo emblem, {foo: true, bar: false}, '<p>Yeah</p><p>No!</p><p>No!</p>'
test "unless", ->
emblem =
"""
unless bar
| Foo
unless foo
| Bar
else
| Woot
else
| WRONG
unless foo
| WRONG
else
| Hooray
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'FooWootHooray'
test "else followed by newline doesn't gobble else content", ->
emblem =
"""
if something
p something
else
if nothing
p nothing
else
p not nothing
"""
shouldCompileTo emblem, {}, '<p>not nothing</p>'
suite "class shorthand and explicit declaration is coalesced"
test "when literal class is used", ->
shouldCompileTo 'p.foo class="bar"', '<p class="foo bar"></p>'
test "when ember expression is used with variable", ->
shouldCompileTo 'p.foo class=bar', {bar: 'baz'}, '<p bind-attr class to :foo bar></p>'
test "when ember expression is used with variable in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ bar }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "when ember expression is used with constant in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ :bar }'
ok -1 != result.indexOf '\'class\': (":foo :bar")'
test "when ember expression is used with constant and variable in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ :bar bar }'
ok -1 != result.indexOf '\'class\': (":foo :bar bar")'
test "when ember expression is used with bind-attr", ->
result = shouldEmberPrecompileToHelper 'p.foo{ bind-attr class="bar" }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "when ember expression is used with bind-attr and multiple attrs", ->
result = shouldEmberPrecompileToHelper 'p.foo{ bind-attr something=bind class="bar" }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "only with bind-attr helper", ->
result = shouldEmberPrecompileToHelper 'p.foo{ someHelper class="bar" }', 'someHelper'
ok -1 != result.indexOf '\'class\': ("bar")'
ok -1 != result.indexOf 'class=\\"foo\\"'
bindAttrHelper = ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
bindingString = ""
for own k,v of options.hash
bindingString += " #{k} to #{v}"
bindingString = " narf" unless bindingString
param = params[0] || 'none'
"bind-attr#{bindingString}"
Handlebars.registerHelper 'bind-attr', bindAttrHelper
EmberHandlebars.registerHelper 'bind-attr', bindAttrHelper
suite "bind-attr behavior for unquoted attribute values"
test "basic", ->
emblem = 'p class=foo'
shouldCompileTo emblem, {foo:"YEAH"}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "basic w/ underscore", ->
emblem = 'p class=foo_urns'
shouldCompileTo emblem, {foo_urns: "YEAH"}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "subproperties", ->
emblem = 'p class=foo._death.woot'
shouldCompileTo emblem, {foo: { _death: { woot: "YEAH" } }}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "multiple", ->
shouldCompileTo 'p class=foo id="yup" data-thinger=yeah Hooray', { foo: "FOO", yeah: "YEAH" },
'<p class="FOO" id="yup" data-thinger="YEAH">Hooray</p>'
test "class bind-attr special syntax", ->
emblem = 'p class=foo:bar:baz'
shouldEmberPrecompileToHelper emblem
shouldThrow (-> CompilerContext.compile emblem)
test "class bind-attr braced syntax w/ underscores and dashes", ->
shouldEmberPrecompileToHelper 'p class={f-oo:bar :b_az}'
shouldEmberPrecompileToHelper 'p class={ f-oo:bar :b_az }'
shouldEmberPrecompileToHelper 'p class={ f-oo:bar :b_az } Hello'
emblem =
"""
.input-prepend class={ filterOn:input-append }
span.add-on
"""
shouldEmberPrecompileToHelper emblem
test "exclamation modifier (vanilla)", ->
emblem = 'p class=foo!'
# exclamation is no-op in vanilla HB
shouldCompileTo emblem, {foo:"YEAH"}, '<p class="YEAH"></p>'
test "exclamation modifier (ember)", ->
emblem = 'p class=foo!'
result = precompileEmber emblem
ok result.match /p class/
ok result.match /helpers\.unbound.*foo/
suite "in-tag explicit mustache"
Handlebars.registerHelper 'inTagHelper', (p) ->
return p;
test "single", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
test "double", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
test "triple", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
Handlebars.registerHelper 'insertClass', (p) ->
return 'class="' + p + '"'
test "with singlestache", ->
shouldCompileTo 'p{insertClass foo} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "singlestache can be used in text nodes", ->
shouldCompileTo 'p Hello {dork}', '<p>Hello {dork}</p>'
test "with doublestache", ->
shouldCompileTo 'p{{insertClass foo}} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "with triplestache", ->
shouldCompileTo 'p{{{insertClass foo}}} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "multiple", ->
shouldCompileTo 'p{{{insertClass foo}}}{{{insertClass boo}}} Hello',
{foo: "yar", boo: "nar"},
'<p class="yar" class="nar">Hello</p>'
test "with nesting", ->
emblem =
"""
p{{bind-attr class="foo"}}
span Hello
"""
shouldCompileTo emblem, {foo: "yar"},
'<p bind-attr class to foo><span>Hello</span></p>'
suite "actions"
Handlebars.registerHelper 'action', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
hashString = ""
paramsString = params.join('|')
# TODO: bad because it relies on hash ordering?
# is this guaranteed? guess it doesn't rreeeeeally
# matter since order's not being tested.
for own k,v of options.hash
hashString += " #{k}=#{v}"
hashString = " nohash" unless hashString
"action #{paramsString}#{hashString}"
test "basic (click)", ->
emblem =
"""
button click="submitComment" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment on=click>Submit Comment</button>'
test "basic (click) followed by attr", ->
emblem =
"""
button click="submitComment" class="foo" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment on=click class="foo">Submit Comment</button>'
emblem =
"""
button click="submitComment 'omg'" class="foo" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment|omg on=click class="foo">Submit Comment</button>'
test "nested (mouseEnter)", ->
emblem =
"""
a mouseEnter='submitComment target="view"'
| Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view on=mouseEnter>Submit Comment</a>'
test "nested (mouseEnter, doublequoted)", ->
emblem =
"""
a mouseEnter="submitComment target='view'"
| Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view on=mouseEnter>Submit Comment</a>'
test "manual", ->
emblem =
"""
a{action submitComment target="view"} Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view>Submit Comment</a>'
test "manual nested", ->
emblem =
"""
a{action submitComment target="view"}
p Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view><p>Submit Comment</p></a>'
suite "haml style"
test "basic", ->
emblem =
"""
%borf
"""
shouldCompileToString emblem, '<borf></borf>'
test "nested", ->
emblem =
"""
%borf
%sporf Hello
"""
shouldCompileToString emblem, '<borf><sporf>Hello</sporf></borf>'
test "capitalized", ->
emblem =
"""
%Alex <NAME> <NAME>
%Alex
%Woot
"""
shouldCompileToString emblem, '<Alex>alex</Alex><Alex><Woot></Woot></Alex>'
test "funky chars", ->
emblem =
"""
%borf:narf
%borf:narf Hello, {{foo}}.
%alex = foo
"""
shouldCompileToString emblem,
{ foo: "<NAME>" },
'<borf:narf></borf:narf><borf:narf>Hello, <NAME>.</borf:narf><alex><NAME></alex>'
suite "line-based errors"
test "line number is provided for pegjs error", ->
emblem =
"""
p Hello
p Hello {{narf}
"""
shouldThrow (-> CompilerContext.compile emblem), "line 2"
# https://github.com/machty/emblem.js/issues/6
test "single quote test", ->
emblem =
"""
button click='p' <NAME>
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click><NAME></button>'
test "double quote test", ->
emblem =
"""
button click="p" <NAME>
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click><NAME></button>'
test "no quote test", ->
emblem =
"""
button click=p <NAME>
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click><NAME></button>'
suite "mustache DOM attribute shorthand"
test "tagName w/o space", ->
emblem =
"""
App.FunView%span
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*span/
test "tagName w/ space", ->
emblem =
"""
App.FunView %span
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*span/
test "tagName block", ->
emblem =
"""
view App.FunView%span
p Hello
"""
shouldCompileToString emblem, '<App.FunView tagName=span><p>Hello</p></App.FunView>'
test "class w/ space (needs space)", ->
emblem =
"""
App.FunView .bork
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork/
test "multiple classes", ->
emblem =
"""
App.FunView .bork.snork
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork.*snork/
test "elementId", ->
emblem =
"""
App.FunView#ohno
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /elementId.*ohno/
test "mixed w/ hash`", ->
emblem =
"""
App.FunView .bork.snork funbags="yeah"
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork.*snork/
ok result.match /hash/
ok result.match /funbags/
ok result.match /yeah/
test "mixture of all`", ->
emblem =
"""
App.FunView%alex#hell.bork.snork funbags="yeah"
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*alex/
ok result.match /elementId.*hell/
ok result.match /class.*bork.*snork/
ok result.match /hash/
ok result.match /funbags/
ok result.match /yeah/
suite "self-closing html tags"
test "br", ->
emblem =
"""
br
"""
shouldCompileToString emblem, '<br />'
test "br paragraph example", ->
emblem =
"""
p
| LOL!
br
| BORF!
"""
shouldCompileToString emblem, '<p>LOL!<br />BORF!</p>'
test "input", ->
emblem =
"""
input type="text"
"""
shouldCompileToString emblem, '<input type="text" />'
suite "ember."
test "should precompile with EmberHandlebars", ->
emblem =
"""
input type="text"
"""
result = Emblem.precompile(EmberHandlebars, 'p Hello').toString()
ok result.match '<p>Hello</p>'
suite "old school handlebars"
test "array", ->
emblem =
'''
goodbyes
| #{text}!
| cruel #{world}!
'''
hash = {goodbyes: [{text: "goodbye"}, {text: "Goodbye"}, {text: "GOODBYE"}], world: "world"}
shouldCompileToString emblem, hash, "goodbye! Goodbye! GOODBYE! cruel world!"
hash = {goodbyes: [], world: "world"}
shouldCompileToString emblem, hash, "cruel world!"
Handlebars.registerPartial('hbPartial', '<a href="/people/{{id}}">{{name}}</a>')
test "calling handlebars partial", ->
emblem =
'''
> hbPartial
| Hello #{> hbPartial}
'''
shouldCompileToString emblem,
{ id: 666, name: "<NAME>" },
'<a href="/people/666">Death</a>Hello <a href="/people/666">Death</a>'
Emblem.registerPartial(Handlebars, 'emblemPartial', 'a href="/people/{{id}}" = name')
Emblem.registerPartial(Handlebars, 'emblemPartialB', 'p Grr')
Emblem.registerPartial(Handlebars, 'emblemPartialC', 'p = a')
test "calling emblem partial", ->
shouldCompileToString '> emblemPartial', { id: 666, name: "<NAME>" }, '<a href="/people/666">Death</a>'
test "calling emblem partial with context", ->
shouldCompileToString '> emblemPartialC foo', { foo: { a: "YES" } }, '<p>YES</p>'
test "partials in mustaches", ->
emblem =
"""
| Hello, {{> emblemPartialC foo}}{{>emblemPartialB}}{{>emblemPartialB }}
"""
shouldCompileToString emblem, { foo: { a: "YES" } }, 'Hello, <p>YES</p><p>Grr</p><p>Grr</p>'
test "handlebars dot-separated paths with segment-literal notation", ->
emblem =
'''
p = articles.[3]
'''
shouldCompileTo emblem, { articles: ['zero', 'one', 'two', 'three']}, '<p>three</p>'
test "handlebars dot-separated paths with segment-literal notation, more nesting", ->
emblem =
'''
p = articles.[3].[#comments].[0]
'''
shouldCompileTo emblem, { articles: [{}, {}, {}, {'#comments': ['bazinga']}]}, '<p>bazinga</p>'
test "../path as inMustacheParam recognized correctly as pathIdNode instead of classShorthand", ->
Handlebars.registerHelper 'jumpToParent', (link) ->
new Handlebars.SafeString "<a href='#{link}'>Jump to parent top</a>"
emblem =
'''
each children
jumpToParent ../parentLink
'''
shouldCompileTo emblem, {parentLink: '#anchor', children: [{}]}, '<a href=\'#anchor\'>Jump to parent top</a>'
test "block as #each", ->
emblem =
'''
thangs
p Woot #{yeah}
'''
shouldCompileToString emblem, { thangs: [{yeah: 123}, {yeah:456}] }, '<p>Woot 123</p><p>Woot 456</p>'
if supportsEachHelperDataKeywords
suite "each block helper keywords prefixed by @"
test "#each with @index", ->
emblem =
'''
thangs
p #{@index} Woot #{yeah}
'''
shouldCompileToString emblem, { thangs: [{yeah: 123}, {yeah:456}] }, '<p>0 Woot 123</p><p>1 Woot 456</p>'
test "#each with @key", ->
emblem =
'''
each thangs
p #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>@key: 123</p><p>works!: 456</p>'
test "#each with @key, @index", ->
emblem =
'''
each thangs
p #{@index} #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>0 @key: 123</p><p>1 works!: 456</p>'
test "#each with @key, @first", ->
emblem =
'''
each thangs
if @first
p First item
else
p #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>First item</p><p>works!: 456</p>'
###
test "partial in block", ->
emblem =
"""
ul = people
> link
"""
data =
people: [
{ "name": "<NAME>", "id": 1 }
{ "name": "<NAME>", "id": 2 }
]
shouldCompileToString emblem, data, '<ul><a href="/people/1"><NAME></a><a href="/people/2">Y<NAME>uda</a><ul>'
###
#suite "helper hash"
#test "quoteless values get treated as bindings", ->
#emblem =
#"""
#view SomeView a=b
#| Yes
#"""
#shouldCompileToString emblem, '<SomeView aBinding=b>Yes</SomeView>'
#test "more complex", ->
#emblem =
#"""
#view SomeView a=b foo=thing.gnar
#"""
#shouldCompileToString emblem, '<SomeView aBinding=b fooBinding=thing.gnar>SomeView</SomeView>'
suite "inline block helper"
test "text only", ->
emblem =
"""
view SomeView | Hello
"""
shouldCompileToString emblem, '<SomeView nohash>Hello</SomeView>'
test "multiline", ->
emblem =
"""
view SomeView | Hello,
How are you?
Sup?
"""
shouldCompileToString emblem, '<SomeView nohash>Hello, How are you? Sup?</SomeView>'
test "more complicated", ->
emblem =
"""
view SomeView borf="yes" | Hello,
How are you?
Sup?
"""
shouldCompileToString emblem, '<SomeView borf=yes>Hello, How are you? Sup?</SomeView>'
suite "copy paste html"
test "indented", ->
emblem =
"""
<p>
<span>This be some text</span>
<title>Basic HTML Sample Page</title>
</p>
"""
shouldCompileToString emblem, '<p><span>This be some text</span><title>Basic HTML Sample Page</title></p>'
test "flatlina", ->
emblem =
"""
<p>
<span>This be some text</span>
<title>Basic HTML Sample Page</title>
</p>
"""
shouldCompileToString emblem, '<p><span>This be some text</span><title>Basic HTML Sample Page</title></p>'
test "bigass", ->
expect(0)
return "PENDING"
emblem =
"""
<div class="content">
<p>
We design and develop ambitious web and mobile applications,
</p>
<p>
A more official portfolio page is on its way, but in the meantime,
check out
</p>
</div>
"""
expected = '<div class="content"><p> We design and develop ambitious web and mobile applications, </p><p> A more official portfolio page is on its way, but in the meantime, check out</p></div>'
shouldCompileToString emblem, expected
suite "`this` keyword"
test "basic", ->
emblem = '''
each foo
p = this
this
'''
shouldCompileTo emblem,
{ foo: [ "<NAME>", "<NAME>" ] },
'<p><NAME></p><NAME><p><NAME></p><NAME>'
suite "colon separator"
test "basic", ->
emblem = 'each foo: p Hello, #{this}'
shouldCompileTo emblem,
{ foo: [ "<NAME>", "<NAME>", "<NAME>" ] },
'<p>Hello, <NAME></p><p>Hello, <NAME></p><p>Hello, <NAME></p>'
test "html stack", ->
emblem = '.container: .row: .span5: span Hello'
shouldCompileToString emblem,
'<div class="container"><div class="row"><div class="span5"><span>Hello</span></div></div></div>'
test "epic", ->
emblem = '''
.container: .row: .span5
ul#list data-foo="yes": each foo: li
span: this
'''
shouldCompileTo emblem, { foo: ["a","b"] },
'<div class="container"><div class="row"><div class="span5"><ul id="list" data-foo="yes"><li><span>a</span></li><li><span>b</span></li></ul></div></div></div>'
test "html stack elements only", ->
emblem = 'p: span: div: p: foo'
shouldCompileToString emblem, { foo: "alex" },
'<p><span><div><p>alex</p></div></span></p>'
test "mixed separators", ->
emblem = '.fun = each foo: %nork = this'
shouldCompileTo emblem,
{ foo: [ "<NAME>", "<NAME>", "<NAME>" ] },
'<div class="fun"><nork><NAME></nork><nork><NAME></nork><nork><NAME></nork></div>'
test "mixed separators rewritten", ->
emblem = '.fun: each foo: %nork: this'
shouldCompileTo emblem,
{ foo: [ "<NAME>", "<NAME>", "<NAME>" ] },
'<div class="fun"><nork><NAME></nork><nork><NAME></nork><nork><NAME></nork></div>'
test "with text terminator", ->
emblem = '.fun: view SomeView | Hello'
shouldCompileToString emblem, '<div class="fun"><SomeView nohash>Hello</SomeView></div>'
test "test from heartsentwined", ->
shouldCompileTo 'li data-foo=bar: a', { bar: "abc" }, '<li data-foo="abc"><a></a></li>'
shouldCompileTo "li data-foo='bar': a", '<li data-foo="bar"><a></a></li>'
test "mixture of colon and indentation", ->
emblem = """
li data-foo=bar: a
baz
"""
shouldCompileTo emblem, { bar: "abc", baz: "Hello" }, '<li data-foo="abc"><a>Hello</a></li>'
test "mixture of colon and indentation pt.2", ->
emblem = """
ul
li data-foo=bar: a quux
li data-foo='bar': a quux
li data-foo=bar href='#': a quux
"""
result = precompileEmber emblem
ok(!result.match "a quux")
suite "base indent / predent"
test "predent", ->
emblem = " \n"
s =
"""
pre
` This
` should
` hopefully
` work, and work well.
"""
emblem += s
shouldCompileToString emblem, '<pre>This\n should\n hopefully\n work, and work well.\n</pre>'
test "mixture", ->
emblem = " \n"
emblem += " p Hello\n"
emblem += " p\n"
emblem += " | Woot\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "mixture w/o opening blank", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += " | Woot\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "w/ blank lines", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += "\n"
emblem += " | Woot\n"
emblem += "\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "w/ blank whitespaced lines", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += "\n"
emblem += " | Woot\n"
emblem += " \n"
emblem += " \n"
emblem += " \n"
emblem += "\n"
emblem += " span yes\n"
emblem += "\n"
emblem += " sally\n"
emblem += "\n"
emblem += " \n"
emblem += " | Woot\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span><sally class="none">Woot</sally>'
suite "EOL Whitespace"
test "shouldn't be necessary to insert a space", ->
emblem =
"""
p Hello,
How are you?
p I'm fine, thank you.
"""
shouldCompileToString emblem, "<p>Hello, How are you?</p><p>I'm fine, thank you.</p>"
suite "misc."
test "end with indent", ->
expect(0)
return "PENDING"
emblem =
"""
div
p
span Butts
em fpokasd
iunw
paosdk
"""
shouldCompileToString emblem, '<div><p><span>Buttsem fpokasdiunw paosdk</span></p></div>'
test "capitalized view helper should not kick in if suffix modifiers present", ->
emblem =
"""
Foo!
"""
shouldCompileToString emblem, '<unbound class="Foo">Foo</unbound>'
test "GH-26: no need for space before equal sign", ->
emblem =
"""
span= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span>YEAH</span>'
emblem =
"""
span.foo= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span class="foo">YEAH</span>'
emblem =
"""
span#hooray.foo= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span id="hooray" class="foo">YEAH</span>'
emblem =
"""
#hooray= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<div id="hooray">YEAH</div>'
emblem =
"""
.hooray= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<div class="hooray">YEAH</div>'
test "numbers in shorthand", ->
shouldCompileToString '#4a', '<div id="4a"></div>'
shouldCompileToString '.4a', '<div class="4a"></div>'
shouldCompileToString '.4', '<div class="4"></div>'
shouldCompileToString '#4', '<div id="4"></div>'
shouldCompileToString '%4', '<4></4>'
shouldCompileToString '%4 ermagerd', '<4>ermagerd</4>'
shouldCompileToString '%4#4.4 ermagerd', '<4 id="4" class="4">ermagerd</4>'
test "Emblem has a VERSION defined", ->
ok(Emblem.VERSION, "Emblem.VERSION should be defined")
test "Windows line endings", ->
emblem = ".navigation\r\n p Hello\r\n#main\r\n | hi"
shouldCompileToString emblem, '<div class="navigation"><p>Hello</p></div><div id="main">hi</div>'
test "backslash doesn't cause infinite loop", ->
emblem =
'''
| \\
'''
shouldCompileTo emblem, "\\"
test "backslash doesn't cause infinite loop with letter", ->
emblem =
'''
| \\a
'''
shouldCompileTo emblem, "\\a"
test "self closing tag with forward slash", ->
emblem =
'''
p/
%bork/
.omg/
#hello.boo/
p/ class="asdasd"
'''
shouldCompileTo emblem, '<p /><bork /><div class="omg" /><div id="hello" class="boo" /><p class="asdasd" />'
test "tagnames and attributes with colons", ->
emblem =
'''
%al:ex match:neer="snork" Hello!
'''
shouldCompileTo emblem, '<al:ex match:neer="snork">Hello!</al:ex>'
test "windows newlines", ->
emblem = "\r\n \r\n p Hello\r\n\r\n"
shouldCompileTo emblem, '<p>Hello</p>'
if supportsSubexpressions
suite "subexpressions"
Handlebars.registerHelper 'echo', (param) ->
"ECHO #{param}"
Handlebars.registerHelper 'echofun', ->
options = Array.prototype.pop.call(arguments)
"FUN = #{options.hash.fun}"
Handlebars.registerHelper 'hello', (param) ->
"hello"
Handlebars.registerHelper 'equal', (x, y) ->
x == y
test "arg-less helper", ->
emblem = 'p {{echo (hello)}}'
shouldCompileTo emblem, '<p>ECHO hello</p>'
emblem = '= echo (hello)'
shouldCompileTo emblem, 'ECHO hello'
test "helper w args", ->
emblem = 'p {{echo (equal 1 1)}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal 1 1)'
shouldCompileTo emblem, 'ECHO true'
test "supports much nesting", ->
emblem = 'p {{echo (equal (equal 1 1) true)}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal (equal 1 1) true)'
shouldCompileTo emblem, 'ECHO true'
test "with hashes", ->
emblem = 'p {{echo (equal (equal 1 1) true fun="yes")}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal (equal 1 1) true fun="yes")'
shouldCompileTo emblem, 'ECHO true'
test "as hashes", ->
emblem = 'p {{echofun fun=(equal 1 1)}}'
shouldCompileTo emblem, '<p>FUN = true</p>'
emblem = '= echofun fun=(equal 1 1)'
shouldCompileTo emblem, 'FUN = true'
test "complex expression", ->
emblem = 'p {{echofun true (hello how="are" you=false) 1 not=true fun=(equal "ECHO hello" (echo (hello))) win="yes"}}'
shouldCompileTo emblem, '<p>FUN = true</p>'
emblem = '= echofun true (hello how="are" you=false) 1 not=true fun=(equal "ECHO hello" (echo (hello))) win="yes"'
shouldCompileTo emblem, 'FUN = true'
| true |
# Test Setup: Set up an environment that'll work for both Node and Qunit tests.
Ember = window?.Emblem || @Emblem || {}
# These are needed for the full version ember to load properly
LoadedEmber = LoadedEmber || {}
Ember.Handlebars = LoadedEmber.Handlebars
Ember.warn = LoadedEmber.warn
if Emblem?
# Qunit testing
_equal = equal
equals = equal = (a, b, msg) ->
# Allow exec with missing message params
_equal(a, b, msg || '')
# In QUnit, we use module() instead of nonexistent suite()
window.suite = module
else
# Setup for Node package testing
Handlebars = require('handlebars')
EmberHandlebars = require('./resources/ember-template-compiler.js').EmberHandlebars
Emblem = require('../lib/emblem')
# TODO: replace with real expect()
`expect = function() {};`
{equal, equals, ok, throws} = require("assert")
unless CompilerContext?
# Note that this doesn't have the same context separation as the rspec test.
# Both should be run for full acceptance of the two libary modes.
CompilerContext =
compile: (template, options) ->
Emblem.compile(Handlebars, template, options)
supportsEachHelperDataKeywords = Handlebars.VERSION.slice(0, 3) >= 1.2
supportsSubexpressions = Handlebars.VERSION.slice(0, 3) >= 1.3
precompileEmber = (emblem) ->
Emblem.precompile(EmberHandlebars, emblem).toString()
shouldEmberPrecompileToHelper = (emblem, helper = 'bind-attr') ->
result = precompileEmber emblem
ok (result.match "helpers.#{helper}") or (result.match "helpers\\['#{helper}'\\]")
result
shouldCompileToString = (string, hashOrArray, expected) ->
if hashOrArray.constructor == String
shouldCompileToWithPartials(string, {}, false, hashOrArray, null, true)
else
shouldCompileToWithPartials(string, hashOrArray, false, expected, null, true)
shouldCompileTo = (string, hashOrArray, expected, message) ->
if hashOrArray.constructor == String
shouldCompileToWithPartials(string, {}, false, hashOrArray, message)
else
shouldCompileToWithPartials(string, hashOrArray, false, expected, message)
shouldCompileToWithPartials = (string, hashOrArray, partials, expected, message, strings) ->
options = null
if strings
options = {}
options.stringParams = true
result = compileWithPartials(string, hashOrArray, partials, options)
equal(result, expected, "'" + result + "' should === '" + expected + "': " + message)
compileWithPartials = (string, hashOrArray, partials, options = {}) ->
template = CompilerContext.compile(string, options)
if Object::toString.call(hashOrArray) == "[object Array]"
if helpers = hashOrArray[1]
for prop of Handlebars.helpers
helpers[prop] = helpers[prop] || Handlebars.helpers[prop]
ary = []
ary.push(hashOrArray[0])
ary.push
helpers: hashOrArray[1]
partials: hashOrArray[2]
else
ary = [hashOrArray]
template.apply(this, ary)
shouldThrow = (fn, exMessage) ->
caught = false
try
fn()
catch e
caught = true
if exMessage
ok e.message.match(exMessage), "exception message matched"
ok(caught, "an exception was thrown")
Handlebars.registerHelper 'echo', (param) ->
"ECHO #{param}"
suite "html one-liners"
test "element only", ->
shouldCompileTo "p", "<p></p>"
test "with text", ->
shouldCompileTo "p Hello", "<p>Hello</p>"
test "with more complex text", ->
shouldCompileTo "p Hello, how's it going with you today?", "<p>Hello, how's it going with you today?</p>"
test "with trailing space", ->
shouldCompileTo "p Hello ", "<p>Hello </p>"
suite "html multi-lines"
test "two lines", ->
emblem =
"""
p This is
pretty cool.
"""
shouldCompileTo emblem, "<p>This is pretty cool.</p>"
test "three lines", ->
emblem =
"""
p This is
pretty damn
cool.
"""
shouldCompileTo emblem, "<p>This is pretty damn cool.</p>"
test "three lines w/ embedded html", ->
emblem =
"""
p This is
pretty <span>damn</span>
cool.
"""
shouldCompileTo emblem, "<p>This is pretty <span>damn</span> cool.</p>"
test "indentation doesn't need to match starting inline content's", ->
emblem =
"""
span Hello,
How are you?
"""
shouldCompileTo emblem, "<span>Hello, How are you?</span>"
test "indentation may vary between parent/child, must be consistent within inline-block", ->
emblem =
"""
div
span Hello,
How are you?
Excellent.
p asd
"""
shouldCompileTo emblem, "<div><span>Hello, How are you? Excellent.</span><p>asd</p></div>"
emblem =
"""
div
span Hello,
How are you?
Excellent.
"""
shouldThrow -> CompilerContext.compile emblem
test "indentation may vary between parent/child, must be consistent within inline-block pt 2", ->
emblem =
"""
div
span Hello,
How are you?
Excellent.
"""
shouldCompileTo emblem, "<div><span>Hello, How are you? Excellent.</span></div>"
test "w/ mustaches", ->
emblem =
"""
div
span Hello,
{{foo}} are you?
Excellent.
"""
shouldCompileTo emblem, { foo: "YEAH" }, "<div><span>Hello, YEAH are you? Excellent.</span></div>"
test "w/ block mustaches", ->
emblem =
'''
p Hello, #{ sally | Hello},
and {{sally: span Hello}}!
'''
shouldCompileTo emblem,
'<p>Hello, <sally class="none">Hello</sally>, and <sally class="none"><span>Hello</span></sally>!</p>'
emblem =
'''
p Hello, #{ sally: span: a Hello}!
'''
shouldCompileTo emblem,
'<p>Hello, <sally class="none"><span><a>Hello</a></span></sally>!</p>'
test "with followup", ->
emblem =
"""
p This is
pretty cool.
p Hello.
"""
shouldCompileTo emblem, "<p>This is pretty cool.</p><p>Hello.</p>"
suite '#{} syntax'
test 'acts like {{}}', ->
emblem =
'''
span Yo #{foo}, I herd.
'''
shouldCompileTo emblem,
{ foo: '<span>123</span>' },
"<span>Yo <span>123</span>, I herd.</span>"
test 'can start inline content', ->
emblem =
'''
span #{foo}, I herd.
'''
shouldCompileTo emblem, { foo: "dawg" }, "<span>dawg, I herd.</span>"
test 'can end inline content', ->
emblem =
'''
span I herd #{foo}
'''
shouldCompileTo emblem, { foo: "dawg" }, "<span>I herd dawg</span>"
test "doesn't screw up parsing when # used in text nodes", ->
emblem =
'''
span OMG #YOLO
'''
shouldCompileTo emblem, "<span>OMG #YOLO</span>"
test "# can be only thing on line", ->
emblem =
'''
span #
'''
shouldCompileTo emblem, "<span>#</span>"
### TODO: this
test "can be escaped", ->
emblem =
'''
span #\\{yes}
'''
shouldCompileTo emblem, '<span>#{yes}</span>'
###
runTextLineSuite = (ch) ->
sct = (emblem, obj, expected) ->
unless expected?
expected = obj
obj = {}
unless ch == '`'
expected = expected.replace /\n/g, ""
# Replace tabs with optional trailing whitespace.
if ch == "'"
expected = expected.replace /\t/g, " "
else
expected = expected.replace /\t/g, ""
emblem = emblem.replace /_/g, ch
shouldCompileTo emblem, obj, expected
suite "text lines starting with '#{ch}'"
test "basic", -> sct "_ What what", "What what\n\t"
test "with html", ->
sct '_ What <span id="woot" data-t="oof" class="f">what</span>!',
'What <span id="woot" data-t="oof" class="f">what</span>!\n\t'
test "multiline", ->
emblem =
"""
_ Blork
Snork
"""
sct emblem, "Blork\nSnork\n\t"
test "triple multiline", ->
emblem =
"""
_ Blork
Snork
Bork
"""
sct emblem, "Blork\nSnork\nBork\n\t"
test "quadruple multiline", ->
emblem =
"""
_ Blork
Snork
Bork
Fork
"""
sct emblem, "Blork\nSnork\nBork\nFork\n\t"
test "multiline w/ trailing whitespace", ->
emblem =
"""
_ Blork
Snork
"""
sct emblem, "Blork \nSnork\n\t"
test "secondline", ->
emblem =
"""
_
Good
"""
sct emblem, "Good\n\t"
test "secondline multiline", ->
emblem =
"""
_
Good
Bork
"""
sct emblem, "Good\nBork\n\t"
test "with a mustache", ->
emblem =
"""
_ Bork {{foo}}!
"""
sct emblem,
{ foo: "YEAH" },
'Bork YEAH!\n\t'
test "with mustaches", ->
emblem =
"""
_ Bork {{foo}} {{{bar}}}!
"""
sct emblem,
{ foo: "YEAH", bar: "<span>NO</span>"},
'Bork YEAH <span>NO</span>!\n\t'
test "indented, then in a row", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance2
dude
gnar
foo
"""
sct emblem, "Good\n riddance2\n dude\n gnar\n foo\n\t"
test "indented, then in a row, then indented", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance2
dude
gnar
foo
far
faz
"""
sct emblem, "Good \n riddance2 \n dude \n gnar \n foo \n far \n faz \n\t"
test "uneven indentation megatest", ->
expect(0)
return "PENDING"
emblem =
"""
_
Good
riddance
dude
"""
sct emblem, "Good\n riddance\ndude\n\t"
emblem =
"""
_
Good
riddance3
dude
"""
sct emblem, "Good\n riddance3\n dude\n\t"
emblem =
"""
_ Good
riddance
dude
"""
sct emblem, "Good\nriddance\n dude\n\t"
test "on each line", ->
emblem =
"""
pre
_ This
_ should
_ hopefully
_ work, and work well.
"""
sct emblem, '<pre>This\n\t should\n\t hopefully\n\t work, and work well.\n\t</pre>'
test "with blank", ->
emblem =
"""
pre
_ This
_ should
_
_ hopefully
_ work, and work well.
"""
sct emblem, '<pre>This\n\t should\n\t\n\t hopefully\n\t work, and work well.\n\t</pre>'
runTextLineSuite '|'
runTextLineSuite '`'
runTextLineSuite "'"
suite "text line starting with angle bracket"
test "can start with angle bracket html", ->
emblem =
"""
<span>Hello</span>
"""
shouldCompileTo emblem, "<span>Hello</span>"
test "can start with angle bracket html and go to multiple lines", ->
emblem =
"""
<span>Hello dude,
what's up?</span>
"""
shouldCompileTo emblem, "<span>Hello dude, what's up?</span>"
suite "preprocessor"
test "it strips out preceding whitespace", ->
emblem =
"""
p Hello
"""
shouldCompileTo emblem, "<p>Hello</p>"
test "it handles preceding indentation", ->
emblem = " p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
test "it handles preceding indentation and newlines", ->
emblem = "\n p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
test "it handles preceding indentation and newlines pt 2", ->
emblem = " \n p Woot\n p Ha"
shouldCompileTo emblem, "<p>Woot</p><p>Ha</p>"
suite "comments"
test "it strips out single line '/' comments", ->
emblem =
"""
p Hello
/ A comment
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "it strips out multi-line '/' comments", ->
emblem =
"""
p Hello
/ A comment
that goes on to two lines
even three!
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "it strips out multi-line '/' comments without text on the first line", ->
emblem =
"""
p Hello
/
A comment
that goes on to two lines
even three!
h1 How are you?
"""
shouldCompileTo emblem, "<p>Hello</p><h1>How are you?</h1>"
test "mix and match with various indentation", ->
emblem =
"""
/ A test
p Hello
span
/ This is gnarly
p Yessir nope.
/ Nothin but comments
so many comments.
/
p Should not show up
"""
shouldCompileTo emblem, "<p>Hello</p><span><p>Yessir nope.</p></span>"
test "uneven indentation", ->
emblem =
"""
/ nop
nope
nope
"""
shouldCompileTo emblem, ""
test "uneven indentation 2", ->
emblem =
"""
/ n
no
nop
nope
"""
shouldCompileTo emblem, ""
test "uneven indentation 3", ->
emblem =
"""
/ n
no
nop
nope
"""
shouldCompileTo emblem, ""
test "empty first line", ->
emblem =
"""
/
nop
nope
nope
no
"""
shouldCompileTo emblem, ""
test "on same line as html content", ->
emblem =
"""
.container / This comment doesn't show up
.row / Nor does this
p Hello
"""
shouldCompileTo emblem, '<div class="container"><div class="row"><p>Hello</p></div></div>'
test "on same line as mustache content", ->
shouldCompileTo 'frank text="YES" text2="NO" / omg', 'WOO: YES NO'
test "on same line as colon syntax", ->
emblem =
"""
ul: li: span / omg
| Hello
"""
shouldCompileTo emblem, '<ul><li><span>Hello</span></li></ul>'
suite "indentation"
# This test used to make sure the emblem code threw, but now we
# support multi-line syntax.
test "it doesn't throw when indenting after a line with inline content", ->
emblem =
"""
p Hello
p invalid
"""
shouldCompileTo emblem, "<p>Hello p invalid</p>"
test "it throws on half dedent", ->
emblem =
"""
p
span This is ok
span This aint
"""
shouldThrow -> CompilerContext.compile emblem
test "new indentation levels don't have to match parents'", ->
emblem =
"""
p
span
div
span yes
"""
shouldCompileTo emblem, "<p><span><div><span>yes</span></div></span></p>"
suite "whitespace fussiness"
test "spaces after html elements", ->
shouldCompileTo "p \n span asd", "<p><span>asd</span></p>"
shouldCompileTo "p \nspan \n\ndiv\nspan", "<p></p><span></span><div></div><span></span>"
test "spaces after mustaches", ->
shouldCompileTo "each foo \n p \n span", { foo: [1,2] }, "<p></p><span></span><p></p><span></span>"
suite "attribute shorthand"
test "id shorthand", ->
shouldCompileTo "#woot", '<div id="woot"></div>'
shouldCompileTo "span#woot", '<span id="woot"></span>'
test "class shorthand", ->
shouldCompileTo ".woot", '<div class="woot"></div>'
shouldCompileTo "span.woot", '<span class="woot"></span>'
shouldCompileTo "span.woot.loot", '<span class="woot loot"></span>'
test "class can come first", ->
shouldCompileTo ".woot#hello", '<div id="hello" class="woot"></div>'
shouldCompileTo "span.woot#hello", '<span id="hello" class="woot"></span>'
shouldCompileTo "span.woot.loot#hello", '<span id="hello" class="woot loot"></span>'
shouldCompileTo "span.woot.loot#hello.boot", '<span id="hello" class="woot loot boot"></span>'
suite "full attributes - tags with content"
test "class only", ->
shouldCompileTo 'p class="yes" Blork', '<p class="yes">Blork</p>'
test "id only", ->
shouldCompileTo 'p id="yes" Hyeah', '<p id="yes">Hyeah</p>'
test "class and id", ->
shouldCompileTo 'p id="yes" class="no" Blork', '<p id="yes" class="no">Blork</p>'
test "class and id and embedded html one-liner", ->
shouldCompileTo 'p id="yes" class="no" One <b>asd</b>!', '<p id="yes" class="no">One <b>asd</b>!</p>'
test "nesting", ->
emblem =
"""
p class="hello" data-foo="gnarly"
span Yes
"""
shouldCompileTo emblem, '<p class="hello" data-foo="gnarly"><span>Yes</span></p>'
suite "full attributes - mixed quotes"
test "single empty", ->
shouldCompileTo "p class=''", '<p class=""></p>'
test "single full", ->
shouldCompileTo "p class='woot yeah'", '<p class="woot yeah"></p>'
test "mixed", ->
shouldCompileTo "p class='woot \"oof\" yeah'", '<p class="woot "oof" yeah"></p>'
suite "full attributes - tags without content"
test "empty", ->
shouldCompileTo 'p class=""', '<p class=""></p>'
test "class only", ->
shouldCompileTo 'p class="yes"', '<p class="yes"></p>'
test "id only", ->
shouldCompileTo 'p id="yes"', '<p id="yes"></p>'
test "class and id", ->
shouldCompileTo 'p id="yes" class="no"', '<p id="yes" class="no"></p>'
suite "full attributes w/ mustaches"
test "with mustache", ->
shouldCompileTo 'p class="foo {{yes}}"', {yes: "ALEX"}, '<p class="foo ALEX"></p>'
shouldCompileTo 'p class="foo {{yes}}" Hello', {yes: "ALEX"}, '<p class="foo ALEX">Hello</p>'
emblem =
"""
p class="foo {{yes}}"
| Hello
"""
shouldCompileTo emblem, {yes: "ALEX"}, '<p class="foo ALEX">Hello</p>'
test "with mustache calling helper", ->
shouldCompileTo 'p class="foo {{{echo "YES"}}}"', '<p class="foo ECHO YES"></p>'
shouldCompileTo 'p class="foo #{echo "NO"} and {{{echo "YES"}}}" Hello', '<p class="foo ECHO NO and ECHO YES">Hello</p>'
emblem =
"""
p class="foo {{echo "BORF"}}"
| Hello
"""
shouldCompileTo emblem, '<p class="foo ECHO BORF">Hello</p>'
suite "boolean attributes"
test "static", ->
shouldCompileTo 'p borf=true', '<p borf></p>'
shouldCompileTo 'p borf=true Woot', '<p borf>Woot</p>'
shouldCompileTo 'p borf=false', '<p></p>'
shouldCompileTo 'p borf=false Nork', '<p>Nork</p>'
shouldCompileTo 'option selected=true Thingeroo', '<option selected>Thingeroo</option>'
#test "dynamic", ->
## TODO
#shouldCompileTo 'p borf=foo', { foo: true }, '<p borf></p>'
#shouldCompileTo 'p borf=foo', { foo: false }, '<p></p>'
#shouldCompileTo 'p borf=foo Yeah', { foo: true }, '<p borf>Yeah</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: false }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: null }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: undefined }, '<p>Naww</p>'
#shouldCompileTo 'p borf=foo Naww', { foo: 0 }, '<p borf="0">Naww</p>'
suite "html nested"
test "basic", ->
emblem =
"""
p
span Hello
strong Hi
div
p Hooray
"""
shouldCompileTo emblem, '<p><span>Hello</span><strong>Hi</strong></p><div><p>Hooray</p></div>'
test "empty nest", ->
emblem =
"""
p
span
strong
i
"""
shouldCompileTo emblem, '<p><span><strong><i></i></strong></span></p>'
test "empty nest w/ attribute shorthand", ->
emblem =
"""
p.woo
span#yes
strong.no.yes
i
"""
shouldCompileTo emblem, '<p class="woo"><span id="yes"><strong class="no yes"><i></i></strong></span></p>'
suite "simple mustache"
test "various one-liners", ->
emblem =
"""
= foo
arf
p = foo
span.foo
p data-foo="yes" = goo
"""
shouldCompileTo emblem,
{ foo: "ASD", arf: "QWE", goo: "WER" },
'ASDQWE<p>ASD</p><span class="foo"></span><p data-foo="yes">WER</p>'
test "double =='s un-escape", ->
emblem =
"""
== foo
foo
p == foo
"""
shouldCompileTo emblem,
{ foo: '<span>123</span>' },
'<span>123</span><span>123</span><p><span>123</span></p>'
test "nested combo syntax", ->
emblem =
"""
ul = each items
li = foo
"""
shouldCompileTo emblem,
{ items: [ { foo: "YEAH"}, { foo: "BOI" } ] },
'<ul><li>YEAH</li><li>BOI</li></ul>'
suite "mustache helpers"
Handlebars.registerHelper 'booltest', (options) ->
hash = options.hash
result = if hash.what == true
"true"
else if hash.what == false
"false"
else "neither"
result
Handlebars.registerHelper 'hashtypetest', (options) ->
hash = options.hash
typeof hash.what
Handlebars.registerHelper 'typetest', (num, options) ->
typeof num
Handlebars.registerHelper 'frank', ->
options = arguments[arguments.length - 1]
"WOO: #{options.hash.text} #{options.hash.text2}"
Handlebars.registerHelper 'sally', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
param = params[0] || 'none'
if options.fn
content = options.fn @
new Handlebars.SafeString """<sally class="#{param}">#{content}</sally>"""
else
content = param
new Handlebars.SafeString """<sally class="#{param}">#{content}</sally>"""
test "basic", -> shouldCompileTo 'echo foo', {foo: "YES"}, 'ECHO YES'
test "hashed parameters should work", ->
shouldCompileTo 'frank text="YES" text2="NO"', 'WOO: YES NO'
Handlebars.registerHelper 'concatenator', ->
options = arguments[arguments.length - 1]
new Handlebars.SafeString ("'#{key}'='#{value}'" for key, value of options.hash).sort().join( " " )
test "negative integers should work", ->
shouldCompileTo 'concatenator positive=100 negative=-100', "'negative'='-100' 'positive'='100'"
test "booleans", ->
shouldCompileToString 'typetest true', 'boolean'
shouldCompileToString 'typetest false', 'boolean'
shouldCompileTo 'booltest what=false', 'false'
shouldCompileTo 'booltest what=true', 'true'
shouldCompileTo 'booltest what="false"', 'neither'
shouldCompileTo 'booltest what="true"', 'neither'
test "integers", ->
shouldCompileToString 'typetest 200', 'number'
shouldCompileTo 'hashtypetest what=1', 'number'
shouldCompileTo 'hashtypetest what=200', 'number'
test "nesting", ->
emblem =
"""
sally
p Hello
"""
shouldCompileTo emblem, '<sally class="none"><p>Hello</p></sally>'
test "recursive nesting", ->
emblem =
"""
sally
sally
p Hello
"""
shouldCompileTo emblem, '<sally class="none"><sally class="none"><p>Hello</p></sally></sally>'
test "recursive nesting pt 2", ->
emblem =
"""
sally
sally thing
p Hello
"""
shouldCompileTo emblem, { thing: "woot" }, '<sally class="none"><sally class="woot"><p>Hello</p></sally></sally>'
Handlebars.registerHelper 'view', (param, a, b, c) ->
options = arguments[arguments.length - 1]
content = param
content = options.fn @ if options.fn
hashString = ""
for own k,v of options.hash
hashString += " #{k}=#{v}"
hashString = " nohash" unless hashString
new Handlebars.SafeString """<#{param}#{hashString}>#{content}</#{param}>"""
suite "capitalized line-starter"
test "should invoke `view` helper by default", ->
emblem =
"""
SomeView
"""
shouldEmberPrecompileToHelper emblem, 'view'
#shouldCompileToString emblem, '<SomeView nohash>SomeView</SomeView>'
test "should not invoke `view` helper for vanilla HB", ->
emblem =
"""
SomeView
"""
shouldCompileToString emblem, {SomeView: "ALEX"}, 'ALEX'
test "should support block mode", ->
emblem =
"""
SomeView
p View content
"""
#shouldCompileToString emblem, '<SomeView nohash><p>View content</p></SomeView>'
shouldEmberPrecompileToHelper emblem, 'view'
test "should not kick in if preceded by equal sign", ->
emblem =
"""
= SomeView
"""
shouldCompileTo emblem, { SomeView: 'erp' }, 'erp'
test "should not kick in explicit {{mustache}}", ->
emblem =
"""
p Yeah {{SomeView}}
"""
shouldCompileTo emblem, { SomeView: 'erp' }, '<p>Yeah erp</p>'
# TODO test overriding the default helper name (instead of always "view")
suite "bang syntax defaults to `unbound` helper syntax"
Handlebars.registerHelper 'unbound', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
stringedParams = params.join(' ')
content = if options.fn then options.fn @ else stringedParams
new Handlebars.SafeString """<unbound class="#{stringedParams}">#{content}</unbound>"""
test "bang helper defaults to `unbound` invocation", ->
emblem =
"""
foo! Yar
= foo!
"""
shouldCompileToString emblem, '<unbound class="foo Yar">foo Yar</unbound><unbound class="foo">foo</unbound>'
test "bang helper works with blocks", ->
emblem =
"""
hey! you suck
= foo!
"""
shouldCompileToString emblem, '<unbound class="hey you suck"><unbound class="foo">foo</unbound></unbound>'
suite "question mark syntax defaults to `if` helper syntax"
test "? helper defaults to `if` invocation", ->
emblem =
"""
foo?
p Yeah
"""
shouldCompileTo emblem, { foo: true }, '<p>Yeah</p>'
test "else works", ->
emblem =
"""
foo?
p Yeah
else
p No
"""
shouldCompileTo emblem, { foo: false }, '<p>No</p>'
test "compound", ->
emblem =
"""
p = foo?
| Hooray
else
| No
p = bar?
| Hooray
else
| No
"""
shouldCompileTo emblem, { foo: true, bar: false }, '<p>Hooray</p><p>No</p>'
test "compound", ->
emblem =
"""
p = foo?
bar
else
baz
"""
shouldCompileTo emblem, { foo: true, bar: "borf", baz: "narsty" }, '<p>borf</p>'
suite "conditionals"
test "simple if statement", ->
emblem =
"""
if foo
| Foo
if bar
| Bar
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'Foo'
test "if else ", ->
emblem =
"""
if foo
| Foo
if bar
| Bar
else
| Woot
else
| WRONG
if bar
| WRONG
else
| Hooray
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'FooWootHooray'
test "else with preceding `=`", ->
emblem =
"""
= if foo
p Yeah
= else
p No
= if bar
p Yeah!
= else
p No!
=if bar
p Yeah!
=else
p No!
"""
shouldCompileTo emblem, {foo: true, bar: false}, '<p>Yeah</p><p>No!</p><p>No!</p>'
test "unless", ->
emblem =
"""
unless bar
| Foo
unless foo
| Bar
else
| Woot
else
| WRONG
unless foo
| WRONG
else
| Hooray
"""
shouldCompileTo emblem, {foo: true, bar: false}, 'FooWootHooray'
test "else followed by newline doesn't gobble else content", ->
emblem =
"""
if something
p something
else
if nothing
p nothing
else
p not nothing
"""
shouldCompileTo emblem, {}, '<p>not nothing</p>'
suite "class shorthand and explicit declaration is coalesced"
test "when literal class is used", ->
shouldCompileTo 'p.foo class="bar"', '<p class="foo bar"></p>'
test "when ember expression is used with variable", ->
shouldCompileTo 'p.foo class=bar', {bar: 'baz'}, '<p bind-attr class to :foo bar></p>'
test "when ember expression is used with variable in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ bar }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "when ember expression is used with constant in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ :bar }'
ok -1 != result.indexOf '\'class\': (":foo :bar")'
test "when ember expression is used with constant and variable in braces", ->
result = shouldEmberPrecompileToHelper 'p.foo class={ :bar bar }'
ok -1 != result.indexOf '\'class\': (":foo :bar bar")'
test "when ember expression is used with bind-attr", ->
result = shouldEmberPrecompileToHelper 'p.foo{ bind-attr class="bar" }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "when ember expression is used with bind-attr and multiple attrs", ->
result = shouldEmberPrecompileToHelper 'p.foo{ bind-attr something=bind class="bar" }'
ok -1 != result.indexOf '\'class\': (":foo bar")'
test "only with bind-attr helper", ->
result = shouldEmberPrecompileToHelper 'p.foo{ someHelper class="bar" }', 'someHelper'
ok -1 != result.indexOf '\'class\': ("bar")'
ok -1 != result.indexOf 'class=\\"foo\\"'
bindAttrHelper = ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
bindingString = ""
for own k,v of options.hash
bindingString += " #{k} to #{v}"
bindingString = " narf" unless bindingString
param = params[0] || 'none'
"bind-attr#{bindingString}"
Handlebars.registerHelper 'bind-attr', bindAttrHelper
EmberHandlebars.registerHelper 'bind-attr', bindAttrHelper
suite "bind-attr behavior for unquoted attribute values"
test "basic", ->
emblem = 'p class=foo'
shouldCompileTo emblem, {foo:"YEAH"}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "basic w/ underscore", ->
emblem = 'p class=foo_urns'
shouldCompileTo emblem, {foo_urns: "YEAH"}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "subproperties", ->
emblem = 'p class=foo._death.woot'
shouldCompileTo emblem, {foo: { _death: { woot: "YEAH" } }}, '<p class="YEAH"></p>'
shouldEmberPrecompileToHelper emblem
test "multiple", ->
shouldCompileTo 'p class=foo id="yup" data-thinger=yeah Hooray', { foo: "FOO", yeah: "YEAH" },
'<p class="FOO" id="yup" data-thinger="YEAH">Hooray</p>'
test "class bind-attr special syntax", ->
emblem = 'p class=foo:bar:baz'
shouldEmberPrecompileToHelper emblem
shouldThrow (-> CompilerContext.compile emblem)
test "class bind-attr braced syntax w/ underscores and dashes", ->
shouldEmberPrecompileToHelper 'p class={f-oo:bar :b_az}'
shouldEmberPrecompileToHelper 'p class={ f-oo:bar :b_az }'
shouldEmberPrecompileToHelper 'p class={ f-oo:bar :b_az } Hello'
emblem =
"""
.input-prepend class={ filterOn:input-append }
span.add-on
"""
shouldEmberPrecompileToHelper emblem
test "exclamation modifier (vanilla)", ->
emblem = 'p class=foo!'
# exclamation is no-op in vanilla HB
shouldCompileTo emblem, {foo:"YEAH"}, '<p class="YEAH"></p>'
test "exclamation modifier (ember)", ->
emblem = 'p class=foo!'
result = precompileEmber emblem
ok result.match /p class/
ok result.match /helpers\.unbound.*foo/
suite "in-tag explicit mustache"
Handlebars.registerHelper 'inTagHelper', (p) ->
return p;
test "single", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
test "double", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
test "triple", ->
shouldCompileTo 'p{inTagHelper foo}', {foo: "ALEX"}, '<p ALEX></p>'
Handlebars.registerHelper 'insertClass', (p) ->
return 'class="' + p + '"'
test "with singlestache", ->
shouldCompileTo 'p{insertClass foo} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "singlestache can be used in text nodes", ->
shouldCompileTo 'p Hello {dork}', '<p>Hello {dork}</p>'
test "with doublestache", ->
shouldCompileTo 'p{{insertClass foo}} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "with triplestache", ->
shouldCompileTo 'p{{{insertClass foo}}} Hello', {foo: "yar"}, '<p class="yar">Hello</p>'
test "multiple", ->
shouldCompileTo 'p{{{insertClass foo}}}{{{insertClass boo}}} Hello',
{foo: "yar", boo: "nar"},
'<p class="yar" class="nar">Hello</p>'
test "with nesting", ->
emblem =
"""
p{{bind-attr class="foo"}}
span Hello
"""
shouldCompileTo emblem, {foo: "yar"},
'<p bind-attr class to foo><span>Hello</span></p>'
suite "actions"
Handlebars.registerHelper 'action', ->
options = arguments[arguments.length - 1]
params = Array::slice.call arguments, 0, -1
hashString = ""
paramsString = params.join('|')
# TODO: bad because it relies on hash ordering?
# is this guaranteed? guess it doesn't rreeeeeally
# matter since order's not being tested.
for own k,v of options.hash
hashString += " #{k}=#{v}"
hashString = " nohash" unless hashString
"action #{paramsString}#{hashString}"
test "basic (click)", ->
emblem =
"""
button click="submitComment" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment on=click>Submit Comment</button>'
test "basic (click) followed by attr", ->
emblem =
"""
button click="submitComment" class="foo" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment on=click class="foo">Submit Comment</button>'
emblem =
"""
button click="submitComment 'omg'" class="foo" Submit Comment
"""
shouldCompileToString emblem, '<button action submitComment|omg on=click class="foo">Submit Comment</button>'
test "nested (mouseEnter)", ->
emblem =
"""
a mouseEnter='submitComment target="view"'
| Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view on=mouseEnter>Submit Comment</a>'
test "nested (mouseEnter, doublequoted)", ->
emblem =
"""
a mouseEnter="submitComment target='view'"
| Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view on=mouseEnter>Submit Comment</a>'
test "manual", ->
emblem =
"""
a{action submitComment target="view"} Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view>Submit Comment</a>'
test "manual nested", ->
emblem =
"""
a{action submitComment target="view"}
p Submit Comment
"""
shouldCompileToString emblem, '<a action submitComment target=view><p>Submit Comment</p></a>'
suite "haml style"
test "basic", ->
emblem =
"""
%borf
"""
shouldCompileToString emblem, '<borf></borf>'
test "nested", ->
emblem =
"""
%borf
%sporf Hello
"""
shouldCompileToString emblem, '<borf><sporf>Hello</sporf></borf>'
test "capitalized", ->
emblem =
"""
%Alex PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI
%Alex
%Woot
"""
shouldCompileToString emblem, '<Alex>alex</Alex><Alex><Woot></Woot></Alex>'
test "funky chars", ->
emblem =
"""
%borf:narf
%borf:narf Hello, {{foo}}.
%alex = foo
"""
shouldCompileToString emblem,
{ foo: "PI:NAME:<NAME>END_PI" },
'<borf:narf></borf:narf><borf:narf>Hello, PI:NAME:<NAME>END_PI.</borf:narf><alex>PI:NAME:<NAME>END_PI</alex>'
suite "line-based errors"
test "line number is provided for pegjs error", ->
emblem =
"""
p Hello
p Hello {{narf}
"""
shouldThrow (-> CompilerContext.compile emblem), "line 2"
# https://github.com/machty/emblem.js/issues/6
test "single quote test", ->
emblem =
"""
button click='p' PI:NAME:<NAME>END_PI
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click>PI:NAME:<NAME>END_PI</button>'
test "double quote test", ->
emblem =
"""
button click="p" PI:NAME:<NAME>END_PI
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click>PI:NAME:<NAME>END_PI</button>'
test "no quote test", ->
emblem =
"""
button click=p PI:NAME:<NAME>END_PI
/ form s='d target="App"'
label I'm a label!
"""
shouldCompileToString emblem, '<button action p on=click>PI:NAME:<NAME>END_PI</button>'
suite "mustache DOM attribute shorthand"
test "tagName w/o space", ->
emblem =
"""
App.FunView%span
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*span/
test "tagName w/ space", ->
emblem =
"""
App.FunView %span
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*span/
test "tagName block", ->
emblem =
"""
view App.FunView%span
p Hello
"""
shouldCompileToString emblem, '<App.FunView tagName=span><p>Hello</p></App.FunView>'
test "class w/ space (needs space)", ->
emblem =
"""
App.FunView .bork
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork/
test "multiple classes", ->
emblem =
"""
App.FunView .bork.snork
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork.*snork/
test "elementId", ->
emblem =
"""
App.FunView#ohno
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /elementId.*ohno/
test "mixed w/ hash`", ->
emblem =
"""
App.FunView .bork.snork funbags="yeah"
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /class.*bork.*snork/
ok result.match /hash/
ok result.match /funbags/
ok result.match /yeah/
test "mixture of all`", ->
emblem =
"""
App.FunView%alex#hell.bork.snork funbags="yeah"
"""
result = precompileEmber emblem
ok result.match /helpers\.view/
ok result.match /App\.FunView/
ok result.match /tagName.*alex/
ok result.match /elementId.*hell/
ok result.match /class.*bork.*snork/
ok result.match /hash/
ok result.match /funbags/
ok result.match /yeah/
suite "self-closing html tags"
test "br", ->
emblem =
"""
br
"""
shouldCompileToString emblem, '<br />'
test "br paragraph example", ->
emblem =
"""
p
| LOL!
br
| BORF!
"""
shouldCompileToString emblem, '<p>LOL!<br />BORF!</p>'
test "input", ->
emblem =
"""
input type="text"
"""
shouldCompileToString emblem, '<input type="text" />'
suite "ember."
test "should precompile with EmberHandlebars", ->
emblem =
"""
input type="text"
"""
result = Emblem.precompile(EmberHandlebars, 'p Hello').toString()
ok result.match '<p>Hello</p>'
suite "old school handlebars"
test "array", ->
emblem =
'''
goodbyes
| #{text}!
| cruel #{world}!
'''
hash = {goodbyes: [{text: "goodbye"}, {text: "Goodbye"}, {text: "GOODBYE"}], world: "world"}
shouldCompileToString emblem, hash, "goodbye! Goodbye! GOODBYE! cruel world!"
hash = {goodbyes: [], world: "world"}
shouldCompileToString emblem, hash, "cruel world!"
Handlebars.registerPartial('hbPartial', '<a href="/people/{{id}}">{{name}}</a>')
test "calling handlebars partial", ->
emblem =
'''
> hbPartial
| Hello #{> hbPartial}
'''
shouldCompileToString emblem,
{ id: 666, name: "PI:NAME:<NAME>END_PI" },
'<a href="/people/666">Death</a>Hello <a href="/people/666">Death</a>'
Emblem.registerPartial(Handlebars, 'emblemPartial', 'a href="/people/{{id}}" = name')
Emblem.registerPartial(Handlebars, 'emblemPartialB', 'p Grr')
Emblem.registerPartial(Handlebars, 'emblemPartialC', 'p = a')
test "calling emblem partial", ->
shouldCompileToString '> emblemPartial', { id: 666, name: "PI:NAME:<NAME>END_PI" }, '<a href="/people/666">Death</a>'
test "calling emblem partial with context", ->
shouldCompileToString '> emblemPartialC foo', { foo: { a: "YES" } }, '<p>YES</p>'
test "partials in mustaches", ->
emblem =
"""
| Hello, {{> emblemPartialC foo}}{{>emblemPartialB}}{{>emblemPartialB }}
"""
shouldCompileToString emblem, { foo: { a: "YES" } }, 'Hello, <p>YES</p><p>Grr</p><p>Grr</p>'
test "handlebars dot-separated paths with segment-literal notation", ->
emblem =
'''
p = articles.[3]
'''
shouldCompileTo emblem, { articles: ['zero', 'one', 'two', 'three']}, '<p>three</p>'
test "handlebars dot-separated paths with segment-literal notation, more nesting", ->
emblem =
'''
p = articles.[3].[#comments].[0]
'''
shouldCompileTo emblem, { articles: [{}, {}, {}, {'#comments': ['bazinga']}]}, '<p>bazinga</p>'
test "../path as inMustacheParam recognized correctly as pathIdNode instead of classShorthand", ->
Handlebars.registerHelper 'jumpToParent', (link) ->
new Handlebars.SafeString "<a href='#{link}'>Jump to parent top</a>"
emblem =
'''
each children
jumpToParent ../parentLink
'''
shouldCompileTo emblem, {parentLink: '#anchor', children: [{}]}, '<a href=\'#anchor\'>Jump to parent top</a>'
test "block as #each", ->
emblem =
'''
thangs
p Woot #{yeah}
'''
shouldCompileToString emblem, { thangs: [{yeah: 123}, {yeah:456}] }, '<p>Woot 123</p><p>Woot 456</p>'
if supportsEachHelperDataKeywords
suite "each block helper keywords prefixed by @"
test "#each with @index", ->
emblem =
'''
thangs
p #{@index} Woot #{yeah}
'''
shouldCompileToString emblem, { thangs: [{yeah: 123}, {yeah:456}] }, '<p>0 Woot 123</p><p>1 Woot 456</p>'
test "#each with @key", ->
emblem =
'''
each thangs
p #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>@key: 123</p><p>works!: 456</p>'
test "#each with @key, @index", ->
emblem =
'''
each thangs
p #{@index} #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>0 @key: 123</p><p>1 works!: 456</p>'
test "#each with @key, @first", ->
emblem =
'''
each thangs
if @first
p First item
else
p #{@key}: #{this}
'''
shouldCompileTo emblem, { thangs: {'@key': 123, 'works!':456} }, '<p>First item</p><p>works!: 456</p>'
###
test "partial in block", ->
emblem =
"""
ul = people
> link
"""
data =
people: [
{ "name": "PI:NAME:<NAME>END_PI", "id": 1 }
{ "name": "PI:NAME:<NAME>END_PI", "id": 2 }
]
shouldCompileToString emblem, data, '<ul><a href="/people/1">PI:NAME:<NAME>END_PI</a><a href="/people/2">YPI:NAME:<NAME>END_PIuda</a><ul>'
###
#suite "helper hash"
#test "quoteless values get treated as bindings", ->
#emblem =
#"""
#view SomeView a=b
#| Yes
#"""
#shouldCompileToString emblem, '<SomeView aBinding=b>Yes</SomeView>'
#test "more complex", ->
#emblem =
#"""
#view SomeView a=b foo=thing.gnar
#"""
#shouldCompileToString emblem, '<SomeView aBinding=b fooBinding=thing.gnar>SomeView</SomeView>'
suite "inline block helper"
test "text only", ->
emblem =
"""
view SomeView | Hello
"""
shouldCompileToString emblem, '<SomeView nohash>Hello</SomeView>'
test "multiline", ->
emblem =
"""
view SomeView | Hello,
How are you?
Sup?
"""
shouldCompileToString emblem, '<SomeView nohash>Hello, How are you? Sup?</SomeView>'
test "more complicated", ->
emblem =
"""
view SomeView borf="yes" | Hello,
How are you?
Sup?
"""
shouldCompileToString emblem, '<SomeView borf=yes>Hello, How are you? Sup?</SomeView>'
suite "copy paste html"
test "indented", ->
emblem =
"""
<p>
<span>This be some text</span>
<title>Basic HTML Sample Page</title>
</p>
"""
shouldCompileToString emblem, '<p><span>This be some text</span><title>Basic HTML Sample Page</title></p>'
test "flatlina", ->
emblem =
"""
<p>
<span>This be some text</span>
<title>Basic HTML Sample Page</title>
</p>
"""
shouldCompileToString emblem, '<p><span>This be some text</span><title>Basic HTML Sample Page</title></p>'
test "bigass", ->
expect(0)
return "PENDING"
emblem =
"""
<div class="content">
<p>
We design and develop ambitious web and mobile applications,
</p>
<p>
A more official portfolio page is on its way, but in the meantime,
check out
</p>
</div>
"""
expected = '<div class="content"><p> We design and develop ambitious web and mobile applications, </p><p> A more official portfolio page is on its way, but in the meantime, check out</p></div>'
shouldCompileToString emblem, expected
suite "`this` keyword"
test "basic", ->
emblem = '''
each foo
p = this
this
'''
shouldCompileTo emblem,
{ foo: [ "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI" ] },
'<p>PI:NAME:<NAME>END_PI</p>PI:NAME:<NAME>END_PI<p>PI:NAME:<NAME>END_PI</p>PI:NAME:<NAME>END_PI'
suite "colon separator"
test "basic", ->
emblem = 'each foo: p Hello, #{this}'
shouldCompileTo emblem,
{ foo: [ "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI" ] },
'<p>Hello, PI:NAME:<NAME>END_PI</p><p>Hello, PI:NAME:<NAME>END_PI</p><p>Hello, PI:NAME:<NAME>END_PI</p>'
test "html stack", ->
emblem = '.container: .row: .span5: span Hello'
shouldCompileToString emblem,
'<div class="container"><div class="row"><div class="span5"><span>Hello</span></div></div></div>'
test "epic", ->
emblem = '''
.container: .row: .span5
ul#list data-foo="yes": each foo: li
span: this
'''
shouldCompileTo emblem, { foo: ["a","b"] },
'<div class="container"><div class="row"><div class="span5"><ul id="list" data-foo="yes"><li><span>a</span></li><li><span>b</span></li></ul></div></div></div>'
test "html stack elements only", ->
emblem = 'p: span: div: p: foo'
shouldCompileToString emblem, { foo: "alex" },
'<p><span><div><p>alex</p></div></span></p>'
test "mixed separators", ->
emblem = '.fun = each foo: %nork = this'
shouldCompileTo emblem,
{ foo: [ "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI" ] },
'<div class="fun"><nork>PI:NAME:<NAME>END_PI</nork><nork>PI:NAME:<NAME>END_PI</nork><nork>PI:NAME:<NAME>END_PI</nork></div>'
test "mixed separators rewritten", ->
emblem = '.fun: each foo: %nork: this'
shouldCompileTo emblem,
{ foo: [ "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI" ] },
'<div class="fun"><nork>PI:NAME:<NAME>END_PI</nork><nork>PI:NAME:<NAME>END_PI</nork><nork>PI:NAME:<NAME>END_PI</nork></div>'
test "with text terminator", ->
emblem = '.fun: view SomeView | Hello'
shouldCompileToString emblem, '<div class="fun"><SomeView nohash>Hello</SomeView></div>'
test "test from heartsentwined", ->
shouldCompileTo 'li data-foo=bar: a', { bar: "abc" }, '<li data-foo="abc"><a></a></li>'
shouldCompileTo "li data-foo='bar': a", '<li data-foo="bar"><a></a></li>'
test "mixture of colon and indentation", ->
emblem = """
li data-foo=bar: a
baz
"""
shouldCompileTo emblem, { bar: "abc", baz: "Hello" }, '<li data-foo="abc"><a>Hello</a></li>'
test "mixture of colon and indentation pt.2", ->
emblem = """
ul
li data-foo=bar: a quux
li data-foo='bar': a quux
li data-foo=bar href='#': a quux
"""
result = precompileEmber emblem
ok(!result.match "a quux")
suite "base indent / predent"
test "predent", ->
emblem = " \n"
s =
"""
pre
` This
` should
` hopefully
` work, and work well.
"""
emblem += s
shouldCompileToString emblem, '<pre>This\n should\n hopefully\n work, and work well.\n</pre>'
test "mixture", ->
emblem = " \n"
emblem += " p Hello\n"
emblem += " p\n"
emblem += " | Woot\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "mixture w/o opening blank", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += " | Woot\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "w/ blank lines", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += "\n"
emblem += " | Woot\n"
emblem += "\n"
emblem += " span yes\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span>'
test "w/ blank whitespaced lines", ->
emblem = " p Hello\n"
emblem += " p\n"
emblem += "\n"
emblem += " | Woot\n"
emblem += " \n"
emblem += " \n"
emblem += " \n"
emblem += "\n"
emblem += " span yes\n"
emblem += "\n"
emblem += " sally\n"
emblem += "\n"
emblem += " \n"
emblem += " | Woot\n"
shouldCompileToString emblem, '<p>Hello</p><p>Woot</p><span>yes</span><sally class="none">Woot</sally>'
suite "EOL Whitespace"
test "shouldn't be necessary to insert a space", ->
emblem =
"""
p Hello,
How are you?
p I'm fine, thank you.
"""
shouldCompileToString emblem, "<p>Hello, How are you?</p><p>I'm fine, thank you.</p>"
suite "misc."
test "end with indent", ->
expect(0)
return "PENDING"
emblem =
"""
div
p
span Butts
em fpokasd
iunw
paosdk
"""
shouldCompileToString emblem, '<div><p><span>Buttsem fpokasdiunw paosdk</span></p></div>'
test "capitalized view helper should not kick in if suffix modifiers present", ->
emblem =
"""
Foo!
"""
shouldCompileToString emblem, '<unbound class="Foo">Foo</unbound>'
test "GH-26: no need for space before equal sign", ->
emblem =
"""
span= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span>YEAH</span>'
emblem =
"""
span.foo= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span class="foo">YEAH</span>'
emblem =
"""
span#hooray.foo= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<span id="hooray" class="foo">YEAH</span>'
emblem =
"""
#hooray= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<div id="hooray">YEAH</div>'
emblem =
"""
.hooray= foo
"""
shouldCompileToString emblem, {foo: "YEAH"}, '<div class="hooray">YEAH</div>'
test "numbers in shorthand", ->
shouldCompileToString '#4a', '<div id="4a"></div>'
shouldCompileToString '.4a', '<div class="4a"></div>'
shouldCompileToString '.4', '<div class="4"></div>'
shouldCompileToString '#4', '<div id="4"></div>'
shouldCompileToString '%4', '<4></4>'
shouldCompileToString '%4 ermagerd', '<4>ermagerd</4>'
shouldCompileToString '%4#4.4 ermagerd', '<4 id="4" class="4">ermagerd</4>'
test "Emblem has a VERSION defined", ->
ok(Emblem.VERSION, "Emblem.VERSION should be defined")
test "Windows line endings", ->
emblem = ".navigation\r\n p Hello\r\n#main\r\n | hi"
shouldCompileToString emblem, '<div class="navigation"><p>Hello</p></div><div id="main">hi</div>'
test "backslash doesn't cause infinite loop", ->
emblem =
'''
| \\
'''
shouldCompileTo emblem, "\\"
test "backslash doesn't cause infinite loop with letter", ->
emblem =
'''
| \\a
'''
shouldCompileTo emblem, "\\a"
test "self closing tag with forward slash", ->
emblem =
'''
p/
%bork/
.omg/
#hello.boo/
p/ class="asdasd"
'''
shouldCompileTo emblem, '<p /><bork /><div class="omg" /><div id="hello" class="boo" /><p class="asdasd" />'
test "tagnames and attributes with colons", ->
emblem =
'''
%al:ex match:neer="snork" Hello!
'''
shouldCompileTo emblem, '<al:ex match:neer="snork">Hello!</al:ex>'
test "windows newlines", ->
emblem = "\r\n \r\n p Hello\r\n\r\n"
shouldCompileTo emblem, '<p>Hello</p>'
if supportsSubexpressions
suite "subexpressions"
Handlebars.registerHelper 'echo', (param) ->
"ECHO #{param}"
Handlebars.registerHelper 'echofun', ->
options = Array.prototype.pop.call(arguments)
"FUN = #{options.hash.fun}"
Handlebars.registerHelper 'hello', (param) ->
"hello"
Handlebars.registerHelper 'equal', (x, y) ->
x == y
test "arg-less helper", ->
emblem = 'p {{echo (hello)}}'
shouldCompileTo emblem, '<p>ECHO hello</p>'
emblem = '= echo (hello)'
shouldCompileTo emblem, 'ECHO hello'
test "helper w args", ->
emblem = 'p {{echo (equal 1 1)}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal 1 1)'
shouldCompileTo emblem, 'ECHO true'
test "supports much nesting", ->
emblem = 'p {{echo (equal (equal 1 1) true)}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal (equal 1 1) true)'
shouldCompileTo emblem, 'ECHO true'
test "with hashes", ->
emblem = 'p {{echo (equal (equal 1 1) true fun="yes")}}'
shouldCompileTo emblem, '<p>ECHO true</p>'
emblem = '= echo (equal (equal 1 1) true fun="yes")'
shouldCompileTo emblem, 'ECHO true'
test "as hashes", ->
emblem = 'p {{echofun fun=(equal 1 1)}}'
shouldCompileTo emblem, '<p>FUN = true</p>'
emblem = '= echofun fun=(equal 1 1)'
shouldCompileTo emblem, 'FUN = true'
test "complex expression", ->
emblem = 'p {{echofun true (hello how="are" you=false) 1 not=true fun=(equal "ECHO hello" (echo (hello))) win="yes"}}'
shouldCompileTo emblem, '<p>FUN = true</p>'
emblem = '= echofun true (hello how="are" you=false) 1 not=true fun=(equal "ECHO hello" (echo (hello))) win="yes"'
shouldCompileTo emblem, 'FUN = true'
|
[
{
"context": "ROUND HERE <text> - The Big Lebowski\n# Author:\n# skalnik\n\ninspect = require('util').inspect\nrequest = requ",
"end": 660,
"score": 0.99927818775177,
"start": 653,
"tag": "USERNAME",
"value": "skalnik"
},
{
"context": " = process.env.HUBOT_MEMEGEN_USERNAME\n passw... | src/meme-generator.coffee | benderTheCrime/hubot-meme-generator | 0 | # Description:
# Integrates with imgflip.net
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_MEMEGEN_USERNAME
# HUBOT_MEMEGEN_PASSWORD
#
# Commands:
# hubot memegen <text> (SUCCESS|NAILED IT) - Generates success kid with the top caption of <text>
# hubot memegen Not sure if <text> or <text> - Generates Futurama Fry
# hubot memegen Yo dawg <text> so <text> - Generates Yo Dawg
# hubot memegen if <text>, <word that can start a question> <text>? - Generates Philosoraptor
# hubot memegen one does not simply <text> - Lord of the Rings Boromir
# hubot memegen AM I THE ONLY ONE AROUND HERE <text> - The Big Lebowski
# Author:
# skalnik
inspect = require('util').inspect
request = require 'request'
url = 'https://api.imgflip.com/caption_image'
memes = [
{
regex: /(memegen )?(.*)(SUCCESS|NAILED IT.*)/i
generatorID: 61544
}
{
regex: /(memegen )?(NOT SURE IF .*) (OR .*)/i
generatorID: 61520
}
{
regex: /(memegen )?(YO DAWG .*) (SO .*)/i
generatorID: 101716
}
{
regex: /(memegen )?(one does not simply) (.*)/i
generatorID: 61579
}
{
regex: /(memegen )?(AM I THE ONLY ONE AROUND HERE) (.*)/i
generatorID: 259680
}
]
module.exports = (robot) ->
memeResponder(robot, meme) for meme in memes
robot.respond /(memegen )?(IF .*), ((ARE|CAN|DO|DOES|HOW|IS|MAY|MIGHT|SHOULD|THEN|WHAT|WHEN|WHERE|WHICH|WHO|WHY|WILL|WON\'T|WOULD)[ \'N].*)/i, (msg) ->
memeGenerator msg, 61516, msg.match[2], msg.match[3] + (if msg.match[3].search(/\?$/)==(-1) then '?' else ''), (img) ->
msg.send img
memeResponder = (robot, meme) ->
robot.respond meme.regex, (msg) ->
memeGenerator msg, meme.generatorID, msg.match[2], msg.match[3], (img) ->
msg.send img
memeGenerator = (msg, generatorID, text0, text1, cb) ->
username = process.env.HUBOT_MEMEGEN_USERNAME
password = process.env.HUBOT_MEMEGEN_PASSWORD
imgFlipUrl = url + objectToQueryString
template_id: generatorID
username: username
password: password
text0: text0
text1: text1
request.get imgFlipUrl, (e, res, body) ->
return if e
jsonBody = JSON.parse(body)
success = jsonBody?.success
return unless success
img = jsonBody.data?.url
unless img
msg.reply "Ugh, I got back weird results from imgflip.net. Expected an image URL, but couldn't find it in the result. Here's what I got:", inspect(jsonBody)
return
cb img
objectToQueryString = (obj) -> '?' + (
"#{k}=#{encodeURIComponent(v)}&" for k, v of obj
).join '' | 107152 | # Description:
# Integrates with imgflip.net
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_MEMEGEN_USERNAME
# HUBOT_MEMEGEN_PASSWORD
#
# Commands:
# hubot memegen <text> (SUCCESS|NAILED IT) - Generates success kid with the top caption of <text>
# hubot memegen Not sure if <text> or <text> - Generates Futurama Fry
# hubot memegen Yo dawg <text> so <text> - Generates Yo Dawg
# hubot memegen if <text>, <word that can start a question> <text>? - Generates Philosoraptor
# hubot memegen one does not simply <text> - Lord of the Rings Boromir
# hubot memegen AM I THE ONLY ONE AROUND HERE <text> - The Big Lebowski
# Author:
# skalnik
inspect = require('util').inspect
request = require 'request'
url = 'https://api.imgflip.com/caption_image'
memes = [
{
regex: /(memegen )?(.*)(SUCCESS|NAILED IT.*)/i
generatorID: 61544
}
{
regex: /(memegen )?(NOT SURE IF .*) (OR .*)/i
generatorID: 61520
}
{
regex: /(memegen )?(YO DAWG .*) (SO .*)/i
generatorID: 101716
}
{
regex: /(memegen )?(one does not simply) (.*)/i
generatorID: 61579
}
{
regex: /(memegen )?(AM I THE ONLY ONE AROUND HERE) (.*)/i
generatorID: 259680
}
]
module.exports = (robot) ->
memeResponder(robot, meme) for meme in memes
robot.respond /(memegen )?(IF .*), ((ARE|CAN|DO|DOES|HOW|IS|MAY|MIGHT|SHOULD|THEN|WHAT|WHEN|WHERE|WHICH|WHO|WHY|WILL|WON\'T|WOULD)[ \'N].*)/i, (msg) ->
memeGenerator msg, 61516, msg.match[2], msg.match[3] + (if msg.match[3].search(/\?$/)==(-1) then '?' else ''), (img) ->
msg.send img
memeResponder = (robot, meme) ->
robot.respond meme.regex, (msg) ->
memeGenerator msg, meme.generatorID, msg.match[2], msg.match[3], (img) ->
msg.send img
memeGenerator = (msg, generatorID, text0, text1, cb) ->
username = process.env.HUBOT_MEMEGEN_USERNAME
password = <PASSWORD>.<PASSWORD>
imgFlipUrl = url + objectToQueryString
template_id: generatorID
username: username
password: <PASSWORD>
text0: text0
text1: text1
request.get imgFlipUrl, (e, res, body) ->
return if e
jsonBody = JSON.parse(body)
success = jsonBody?.success
return unless success
img = jsonBody.data?.url
unless img
msg.reply "Ugh, I got back weird results from imgflip.net. Expected an image URL, but couldn't find it in the result. Here's what I got:", inspect(jsonBody)
return
cb img
objectToQueryString = (obj) -> '?' + (
"#{k}=#{encodeURIComponent(v)}&" for k, v of obj
).join '' | true | # Description:
# Integrates with imgflip.net
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_MEMEGEN_USERNAME
# HUBOT_MEMEGEN_PASSWORD
#
# Commands:
# hubot memegen <text> (SUCCESS|NAILED IT) - Generates success kid with the top caption of <text>
# hubot memegen Not sure if <text> or <text> - Generates Futurama Fry
# hubot memegen Yo dawg <text> so <text> - Generates Yo Dawg
# hubot memegen if <text>, <word that can start a question> <text>? - Generates Philosoraptor
# hubot memegen one does not simply <text> - Lord of the Rings Boromir
# hubot memegen AM I THE ONLY ONE AROUND HERE <text> - The Big Lebowski
# Author:
# skalnik
inspect = require('util').inspect
request = require 'request'
url = 'https://api.imgflip.com/caption_image'
memes = [
{
regex: /(memegen )?(.*)(SUCCESS|NAILED IT.*)/i
generatorID: 61544
}
{
regex: /(memegen )?(NOT SURE IF .*) (OR .*)/i
generatorID: 61520
}
{
regex: /(memegen )?(YO DAWG .*) (SO .*)/i
generatorID: 101716
}
{
regex: /(memegen )?(one does not simply) (.*)/i
generatorID: 61579
}
{
regex: /(memegen )?(AM I THE ONLY ONE AROUND HERE) (.*)/i
generatorID: 259680
}
]
module.exports = (robot) ->
memeResponder(robot, meme) for meme in memes
robot.respond /(memegen )?(IF .*), ((ARE|CAN|DO|DOES|HOW|IS|MAY|MIGHT|SHOULD|THEN|WHAT|WHEN|WHERE|WHICH|WHO|WHY|WILL|WON\'T|WOULD)[ \'N].*)/i, (msg) ->
memeGenerator msg, 61516, msg.match[2], msg.match[3] + (if msg.match[3].search(/\?$/)==(-1) then '?' else ''), (img) ->
msg.send img
memeResponder = (robot, meme) ->
robot.respond meme.regex, (msg) ->
memeGenerator msg, meme.generatorID, msg.match[2], msg.match[3], (img) ->
msg.send img
memeGenerator = (msg, generatorID, text0, text1, cb) ->
username = process.env.HUBOT_MEMEGEN_USERNAME
password = PI:PASSWORD:<PASSWORD>END_PI.PI:PASSWORD:<PASSWORD>END_PI
imgFlipUrl = url + objectToQueryString
template_id: generatorID
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
text0: text0
text1: text1
request.get imgFlipUrl, (e, res, body) ->
return if e
jsonBody = JSON.parse(body)
success = jsonBody?.success
return unless success
img = jsonBody.data?.url
unless img
msg.reply "Ugh, I got back weird results from imgflip.net. Expected an image URL, but couldn't find it in the result. Here's what I got:", inspect(jsonBody)
return
cb img
objectToQueryString = (obj) -> '?' + (
"#{k}=#{encodeURIComponent(v)}&" for k, v of obj
).join '' |
[
{
"context": ")=>\n @timeout 15000\n o = \n name:\"A Test\"\n description: \"Testing Object create",
"end": 2198,
"score": 0.583860456943512,
"start": 2197,
"tag": "NAME",
"value": "A"
},
{
"context": " it 'Should create a new user', (done)=>\n (@testUse... | test/server/node_modules/api-hero/node_modules/rikki-tikki-client/test/_bak/tests.coffee | vancarney/apihero-module-socket.io | 0 | fs = require 'fs'
(chai = require 'chai').should()
_ = (require 'underscore')._
Backbone = require 'backbone'
Backbone.$ = require 'jQuery'
{sparse} = require '../src/sparse.coffee'
jsonData = require './data.json'
server = true
if (typeof process.env.PARSE_APP_ID == 'undefined' or typeof process.env.PARSE_REST_KEY == 'undefined')
console.error 'Failure: PARSE_APP_ID and PARSE_REST_KEY are required to be set in your env vars to run tests'
process.exit 1
describe 'sParse Test Suite', ->
it 'should exist', =>
(sparse).should.be.a 'object'
sparse.APP_ID = process.env.PARSE_APP_ID
sparse.REST_KEY = process.env.PARSE_REST_KEY
describe 'sparse Inflection', =>
it 'should have PLURALIZATION', =>
(sparse).Inflection.pluralize('Man').should.equal 'Men'
(sparse).Inflection.pluralize('Person').should.not.equal 'Persons'
(sparse).Inflection.pluralize('Person').should.equal 'People'
(sparse).Inflection.pluralize('Ox').should.equal 'Oxen'
(sparse).Inflection.pluralize('Mouse').should.equal 'Mice'
(sparse).Inflection.pluralize('Deer').should.equal 'Deer'
(sparse).Inflection.pluralize('Child').should.equal 'Children'
(sparse).Inflection.pluralize('Life').should.equal 'Lives'
(sparse).Inflection.pluralize('Lens').should.equal 'Lenses'
(sparse).Inflection.pluralize('Mine').should.equal 'Mines'
(sparse).Inflection.pluralize('Business').should.equal 'Businesses'
(sparse).Inflection.pluralize('Octopus').should.equal 'Octopi'
describe 'sparse.Model lifecycle', ->
it 'sparse.Model.saveAll should be STATIC', =>
sparse.Model.saveAll.should.be.a 'function'
it 'Model should be extensable', =>
(@clazz = class Test extends (sparse.Model)).should.be.a 'function'
it 'should safely get it\'s constructor.name', =>
(sparse.getConstructorName @testModel = new @clazz()).should.equal 'Test'
it 'should have a pluralized Parse API Class Name', =>
(@testModel).className.should.equal 'Tests'
it 'should save Data to the Parse API', (done)=>
@timeout 15000
o =
name:"A Test"
description: "Testing Object create via Parse API"
h =
success:(m,r,o)=>
done()
@testModel.save o, h
it 'should have an ObjectID after saving', =>
(@testModel.get 'objectId').should.not.equal null
it 'should update Data to the Parse API', (done)=>
@timeout 15000
o =
active:true
h =
success:(m,r,o)=>
done()
@testModel.save o, h
it 'should delete it\'s self from the Parse API', (done)=>
h =
success:(m,r,o)=>
done()
@testModel.destroy h
describe 'sparse.User lifecycle', ->
@timeout 15000
it 'Should create a new user', (done)=>
(@testUser = new sparse.User).save {username:'test.user',password:'sParseTest'},
success:(m,r,o)=>
done()
it 'Should be able to login', (done)=>
@testUser.login (@testUser.get 'username'), (@testUser.get 'password'),
success:(m,r,o)=>
done()
it 'Should have set SESSION_TOKEN after login', ->
sparse.SESSION_TOKEN.should.be.a 'string'
it 'Should be able to update itself', (done)=>
@testUser.save email: 'a.user+changed@email.com',
success:(m,r,o)=>
done()
error:(m,r,o)=>
console.log r
it 'Should be able to logout', (done)=>
@testUser.logout()
@testUser.save email: 'a.user@email.com',
error:(m,r,o)=>
done()
it 'Should be able to be destroyed', (done)=>
@testUser.login 'test.user', 'sParseTest',
success:(m,r,o)=>
@testUser.destroy
success:(m,r,o)=>
done()
describe 'sparse.Batch and sparse.Collections', ->
@timeout 15000
@data = new (class TestCompanies extends sparse.Collection
model: class TestCompany extends sparse.Model
defaults:
name:""
contact_email:""
tagline:""
)
@data.set jsonData.TestCompanies
@batch = new sparse.Batch
@batch.save @data.models
it 'Should Batch Save', (done)=>
@batch.exec
complete:(m,r,o)=>
done()
success:(m,r,o)=>
error:(m,r,o)=>
console.log m
it 'Should Query Records on the Server', (done)=>
@data.reset {}
@data.query active:true,
success:(m,r,o)=>
@data.models.length.should.equal 51
done()
it 'Should mark items for deletion', (done)=>
@data.reset {}
@data.fetch
success: (m,r,o)=>
@batch.destroy @data.models
done()
error: (m,r,o)=>
console.log r
it 'Should have a count of Records on the Server', =>
@data.count().should.equal 101
it 'Should Batch Delete', (done)=>
@batch.exec
complete:(m,r,o)=>
done()
error: (m,r,o)=>
console.log m
it 'Should have deleted all data records', (done)=>
@data.reset {}
@data.fetch
success: (m,r,o)=>
@data.count().should.equal 0
done()
error: (m,r,o)=>
console.log r | 16658 | fs = require 'fs'
(chai = require 'chai').should()
_ = (require 'underscore')._
Backbone = require 'backbone'
Backbone.$ = require 'jQuery'
{sparse} = require '../src/sparse.coffee'
jsonData = require './data.json'
server = true
if (typeof process.env.PARSE_APP_ID == 'undefined' or typeof process.env.PARSE_REST_KEY == 'undefined')
console.error 'Failure: PARSE_APP_ID and PARSE_REST_KEY are required to be set in your env vars to run tests'
process.exit 1
describe 'sParse Test Suite', ->
it 'should exist', =>
(sparse).should.be.a 'object'
sparse.APP_ID = process.env.PARSE_APP_ID
sparse.REST_KEY = process.env.PARSE_REST_KEY
describe 'sparse Inflection', =>
it 'should have PLURALIZATION', =>
(sparse).Inflection.pluralize('Man').should.equal 'Men'
(sparse).Inflection.pluralize('Person').should.not.equal 'Persons'
(sparse).Inflection.pluralize('Person').should.equal 'People'
(sparse).Inflection.pluralize('Ox').should.equal 'Oxen'
(sparse).Inflection.pluralize('Mouse').should.equal 'Mice'
(sparse).Inflection.pluralize('Deer').should.equal 'Deer'
(sparse).Inflection.pluralize('Child').should.equal 'Children'
(sparse).Inflection.pluralize('Life').should.equal 'Lives'
(sparse).Inflection.pluralize('Lens').should.equal 'Lenses'
(sparse).Inflection.pluralize('Mine').should.equal 'Mines'
(sparse).Inflection.pluralize('Business').should.equal 'Businesses'
(sparse).Inflection.pluralize('Octopus').should.equal 'Octopi'
describe 'sparse.Model lifecycle', ->
it 'sparse.Model.saveAll should be STATIC', =>
sparse.Model.saveAll.should.be.a 'function'
it 'Model should be extensable', =>
(@clazz = class Test extends (sparse.Model)).should.be.a 'function'
it 'should safely get it\'s constructor.name', =>
(sparse.getConstructorName @testModel = new @clazz()).should.equal 'Test'
it 'should have a pluralized Parse API Class Name', =>
(@testModel).className.should.equal 'Tests'
it 'should save Data to the Parse API', (done)=>
@timeout 15000
o =
name:"<NAME> Test"
description: "Testing Object create via Parse API"
h =
success:(m,r,o)=>
done()
@testModel.save o, h
it 'should have an ObjectID after saving', =>
(@testModel.get 'objectId').should.not.equal null
it 'should update Data to the Parse API', (done)=>
@timeout 15000
o =
active:true
h =
success:(m,r,o)=>
done()
@testModel.save o, h
it 'should delete it\'s self from the Parse API', (done)=>
h =
success:(m,r,o)=>
done()
@testModel.destroy h
describe 'sparse.User lifecycle', ->
@timeout 15000
it 'Should create a new user', (done)=>
(@testUser = new sparse.User).save {username:'test.user',password:'<PASSWORD>'},
success:(m,r,o)=>
done()
it 'Should be able to login', (done)=>
@testUser.login (@testUser.get 'username'), (@testUser.get 'password'),
success:(m,r,o)=>
done()
it 'Should have set SESSION_TOKEN after login', ->
sparse.SESSION_TOKEN.should.be.a 'string'
it 'Should be able to update itself', (done)=>
@testUser.save email: '<EMAIL>',
success:(m,r,o)=>
done()
error:(m,r,o)=>
console.log r
it 'Should be able to logout', (done)=>
@testUser.logout()
@testUser.save email: '<EMAIL>',
error:(m,r,o)=>
done()
it 'Should be able to be destroyed', (done)=>
@testUser.login 'test.user', 'sParseTest',
success:(m,r,o)=>
@testUser.destroy
success:(m,r,o)=>
done()
describe 'sparse.Batch and sparse.Collections', ->
@timeout 15000
@data = new (class TestCompanies extends sparse.Collection
model: class TestCompany extends sparse.Model
defaults:
name:""
contact_email:""
tagline:""
)
@data.set jsonData.TestCompanies
@batch = new sparse.Batch
@batch.save @data.models
it 'Should Batch Save', (done)=>
@batch.exec
complete:(m,r,o)=>
done()
success:(m,r,o)=>
error:(m,r,o)=>
console.log m
it 'Should Query Records on the Server', (done)=>
@data.reset {}
@data.query active:true,
success:(m,r,o)=>
@data.models.length.should.equal 51
done()
it 'Should mark items for deletion', (done)=>
@data.reset {}
@data.fetch
success: (m,r,o)=>
@batch.destroy @data.models
done()
error: (m,r,o)=>
console.log r
it 'Should have a count of Records on the Server', =>
@data.count().should.equal 101
it 'Should Batch Delete', (done)=>
@batch.exec
complete:(m,r,o)=>
done()
error: (m,r,o)=>
console.log m
it 'Should have deleted all data records', (done)=>
@data.reset {}
@data.fetch
success: (m,r,o)=>
@data.count().should.equal 0
done()
error: (m,r,o)=>
console.log r | true | fs = require 'fs'
(chai = require 'chai').should()
_ = (require 'underscore')._
Backbone = require 'backbone'
Backbone.$ = require 'jQuery'
{sparse} = require '../src/sparse.coffee'
jsonData = require './data.json'
server = true
if (typeof process.env.PARSE_APP_ID == 'undefined' or typeof process.env.PARSE_REST_KEY == 'undefined')
console.error 'Failure: PARSE_APP_ID and PARSE_REST_KEY are required to be set in your env vars to run tests'
process.exit 1
describe 'sParse Test Suite', ->
it 'should exist', =>
(sparse).should.be.a 'object'
sparse.APP_ID = process.env.PARSE_APP_ID
sparse.REST_KEY = process.env.PARSE_REST_KEY
describe 'sparse Inflection', =>
it 'should have PLURALIZATION', =>
(sparse).Inflection.pluralize('Man').should.equal 'Men'
(sparse).Inflection.pluralize('Person').should.not.equal 'Persons'
(sparse).Inflection.pluralize('Person').should.equal 'People'
(sparse).Inflection.pluralize('Ox').should.equal 'Oxen'
(sparse).Inflection.pluralize('Mouse').should.equal 'Mice'
(sparse).Inflection.pluralize('Deer').should.equal 'Deer'
(sparse).Inflection.pluralize('Child').should.equal 'Children'
(sparse).Inflection.pluralize('Life').should.equal 'Lives'
(sparse).Inflection.pluralize('Lens').should.equal 'Lenses'
(sparse).Inflection.pluralize('Mine').should.equal 'Mines'
(sparse).Inflection.pluralize('Business').should.equal 'Businesses'
(sparse).Inflection.pluralize('Octopus').should.equal 'Octopi'
describe 'sparse.Model lifecycle', ->
it 'sparse.Model.saveAll should be STATIC', =>
sparse.Model.saveAll.should.be.a 'function'
it 'Model should be extensable', =>
(@clazz = class Test extends (sparse.Model)).should.be.a 'function'
it 'should safely get it\'s constructor.name', =>
(sparse.getConstructorName @testModel = new @clazz()).should.equal 'Test'
it 'should have a pluralized Parse API Class Name', =>
(@testModel).className.should.equal 'Tests'
it 'should save Data to the Parse API', (done)=>
@timeout 15000
o =
name:"PI:NAME:<NAME>END_PI Test"
description: "Testing Object create via Parse API"
h =
success:(m,r,o)=>
done()
@testModel.save o, h
it 'should have an ObjectID after saving', =>
(@testModel.get 'objectId').should.not.equal null
it 'should update Data to the Parse API', (done)=>
@timeout 15000
o =
active:true
h =
success:(m,r,o)=>
done()
@testModel.save o, h
it 'should delete it\'s self from the Parse API', (done)=>
h =
success:(m,r,o)=>
done()
@testModel.destroy h
describe 'sparse.User lifecycle', ->
@timeout 15000
it 'Should create a new user', (done)=>
(@testUser = new sparse.User).save {username:'test.user',password:'PI:PASSWORD:<PASSWORD>END_PI'},
success:(m,r,o)=>
done()
it 'Should be able to login', (done)=>
@testUser.login (@testUser.get 'username'), (@testUser.get 'password'),
success:(m,r,o)=>
done()
it 'Should have set SESSION_TOKEN after login', ->
sparse.SESSION_TOKEN.should.be.a 'string'
it 'Should be able to update itself', (done)=>
@testUser.save email: 'PI:EMAIL:<EMAIL>END_PI',
success:(m,r,o)=>
done()
error:(m,r,o)=>
console.log r
it 'Should be able to logout', (done)=>
@testUser.logout()
@testUser.save email: 'PI:EMAIL:<EMAIL>END_PI',
error:(m,r,o)=>
done()
it 'Should be able to be destroyed', (done)=>
@testUser.login 'test.user', 'sParseTest',
success:(m,r,o)=>
@testUser.destroy
success:(m,r,o)=>
done()
describe 'sparse.Batch and sparse.Collections', ->
@timeout 15000
@data = new (class TestCompanies extends sparse.Collection
model: class TestCompany extends sparse.Model
defaults:
name:""
contact_email:""
tagline:""
)
@data.set jsonData.TestCompanies
@batch = new sparse.Batch
@batch.save @data.models
it 'Should Batch Save', (done)=>
@batch.exec
complete:(m,r,o)=>
done()
success:(m,r,o)=>
error:(m,r,o)=>
console.log m
it 'Should Query Records on the Server', (done)=>
@data.reset {}
@data.query active:true,
success:(m,r,o)=>
@data.models.length.should.equal 51
done()
it 'Should mark items for deletion', (done)=>
@data.reset {}
@data.fetch
success: (m,r,o)=>
@batch.destroy @data.models
done()
error: (m,r,o)=>
console.log r
it 'Should have a count of Records on the Server', =>
@data.count().should.equal 101
it 'Should Batch Delete', (done)=>
@batch.exec
complete:(m,r,o)=>
done()
error: (m,r,o)=>
console.log m
it 'Should have deleted all data records', (done)=>
@data.reset {}
@data.fetch
success: (m,r,o)=>
@data.count().should.equal 0
done()
error: (m,r,o)=>
console.log r |
[
{
"context": "###\nCopyright 2013 Marco Braak\n\nLicensed under the Apache License, Version 2.0 (",
"end": 30,
"score": 0.9998483657836914,
"start": 19,
"tag": "NAME",
"value": "Marco Braak"
}
] | evidencePrijmu/src/main/webapp/vendor/simple-data-grid-master/simple.widget.coffee | Matesyi/pb138-evidence-prijmu-web | 0 | ###
Copyright 2013 Marco Braak
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
$ = jQuery
class SimpleWidget
defaults: {}
constructor: (el, options) ->
@$el = $(el)
@options = $.extend({}, @defaults, options)
destroy: ->
@_deinit()
_init: ->
null
_deinit: ->
null
@register = (widget_class, widget_name) ->
getDataKey = ->
return "simple_widget_#{widget_name}"
getWidgetData = (el, data_key) ->
widget = $.data(el, data_key)
if widget and (widget instanceof SimpleWidget)
return widget
else
return null
createWidget = ($el, options) ->
data_key = getDataKey()
for el in $el
existing_widget = getWidgetData(el, data_key)
if not existing_widget
widget = new widget_class(el, options)
if not $.data(el, data_key)
$.data(el, data_key, widget)
# Call init after setting data, so we can call methods
widget._init()
return $el
destroyWidget = ($el) ->
data_key = getDataKey()
for el in $el
widget = getWidgetData(el, data_key)
if widget
widget.destroy()
$.removeData(el, data_key)
callFunction = ($el, function_name, args) ->
result = null
for el in $el
widget = $.data(el, getDataKey())
if widget and (widget instanceof SimpleWidget)
widget_function = widget[function_name]
if widget_function and (typeof widget_function == 'function')
result = widget_function.apply(widget, args)
return result
$.fn[widget_name] = (argument1, args...) ->
$el = this
if argument1 is undefined or typeof argument1 == 'object'
options = argument1
return createWidget($el, options)
else if typeof argument1 == 'string' and argument1[0] != '_'
function_name = argument1
if function_name == 'destroy'
return destroyWidget($el)
else
return callFunction($el, function_name, args)
module.exports = SimpleWidget
| 12635 | ###
Copyright 2013 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
$ = jQuery
class SimpleWidget
defaults: {}
constructor: (el, options) ->
@$el = $(el)
@options = $.extend({}, @defaults, options)
destroy: ->
@_deinit()
_init: ->
null
_deinit: ->
null
@register = (widget_class, widget_name) ->
getDataKey = ->
return "simple_widget_#{widget_name}"
getWidgetData = (el, data_key) ->
widget = $.data(el, data_key)
if widget and (widget instanceof SimpleWidget)
return widget
else
return null
createWidget = ($el, options) ->
data_key = getDataKey()
for el in $el
existing_widget = getWidgetData(el, data_key)
if not existing_widget
widget = new widget_class(el, options)
if not $.data(el, data_key)
$.data(el, data_key, widget)
# Call init after setting data, so we can call methods
widget._init()
return $el
destroyWidget = ($el) ->
data_key = getDataKey()
for el in $el
widget = getWidgetData(el, data_key)
if widget
widget.destroy()
$.removeData(el, data_key)
callFunction = ($el, function_name, args) ->
result = null
for el in $el
widget = $.data(el, getDataKey())
if widget and (widget instanceof SimpleWidget)
widget_function = widget[function_name]
if widget_function and (typeof widget_function == 'function')
result = widget_function.apply(widget, args)
return result
$.fn[widget_name] = (argument1, args...) ->
$el = this
if argument1 is undefined or typeof argument1 == 'object'
options = argument1
return createWidget($el, options)
else if typeof argument1 == 'string' and argument1[0] != '_'
function_name = argument1
if function_name == 'destroy'
return destroyWidget($el)
else
return callFunction($el, function_name, args)
module.exports = SimpleWidget
| true | ###
Copyright 2013 PI:NAME:<NAME>END_PI
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
$ = jQuery
class SimpleWidget
defaults: {}
constructor: (el, options) ->
@$el = $(el)
@options = $.extend({}, @defaults, options)
destroy: ->
@_deinit()
_init: ->
null
_deinit: ->
null
@register = (widget_class, widget_name) ->
getDataKey = ->
return "simple_widget_#{widget_name}"
getWidgetData = (el, data_key) ->
widget = $.data(el, data_key)
if widget and (widget instanceof SimpleWidget)
return widget
else
return null
createWidget = ($el, options) ->
data_key = getDataKey()
for el in $el
existing_widget = getWidgetData(el, data_key)
if not existing_widget
widget = new widget_class(el, options)
if not $.data(el, data_key)
$.data(el, data_key, widget)
# Call init after setting data, so we can call methods
widget._init()
return $el
destroyWidget = ($el) ->
data_key = getDataKey()
for el in $el
widget = getWidgetData(el, data_key)
if widget
widget.destroy()
$.removeData(el, data_key)
callFunction = ($el, function_name, args) ->
result = null
for el in $el
widget = $.data(el, getDataKey())
if widget and (widget instanceof SimpleWidget)
widget_function = widget[function_name]
if widget_function and (typeof widget_function == 'function')
result = widget_function.apply(widget, args)
return result
$.fn[widget_name] = (argument1, args...) ->
$el = this
if argument1 is undefined or typeof argument1 == 'object'
options = argument1
return createWidget($el, options)
else if typeof argument1 == 'string' and argument1[0] != '_'
function_name = argument1
if function_name == 'destroy'
return destroyWidget($el)
else
return callFunction($el, function_name, args)
module.exports = SimpleWidget
|
[
{
"context": "\ntestjson = \"test/test.json\"\ntestobj = {\n name: \"test\"\n othername: \"test2\"\n number: 3\n nested:\n n",
"end": 185,
"score": 0.9754127264022827,
"start": 181,
"tag": "NAME",
"value": "test"
},
{
"context": "st.json\"\ntestobj = {\n name: \"test\"\n oth... | test/index.coffee | paulpflug/pkg-json | 1 | chai = require "chai"
fs = require "fs"
should = chai.should()
util = require "core-util-is"
script = require "../src/index.coffee"
testjson = "test/test.json"
testobj = {
name: "test"
othername: "test2"
number: 3
nested:
nested2: "test3"
nested3: test4: "test5"
nested4: [4,5,6]
array: [1,2,3]
array2: ["a","b","c"]
}
wraperr = (err, arg) ->
(->script(arg)).should.throw err
describe "pkg-json", ->
before (done) ->
fs.writeFile testjson, JSON.stringify(testobj), -> done()
describe "script", ->
it "should be a function", ->
script.should.be.a.function
it "should throw when more than one option is given", ->
err = "only one of set, get, push or splice is allowed"
wraperr err, set:true, get:true
wraperr err, set:true, push:true
wraperr err, set:true, splice:true
wraperr err, push:true, get:true
wraperr err, splice:true, get:true
wraperr err, splice:true, push:true
it "should throw when type is unvalid", ->
err = "type: something is invalid"
wraperr err, type:"something"
it "should throw when no command is given", ->
err = "no command given, try set, get push or splice"
wraperr err
it "should throw when path is no string", ->
err = "path needs to be a string"
wraperr err, type:"set", path: 0
it "should throw when no value is provided with push or splice", ->
wraperr "no value provided for splice", splice:""
wraperr "no value provided for push", push:""
it "should throw when no value is provided with set and isn't forced", ->
wraperr "no value provided for set, to remove a key use --force", set:""
it "should throw when invalid json file is provided", ->
wraperr "failed to load invalid.json", get:"", in:"invalid.json"
it "should throw when data is no object", ->
wraperr "data needs to be an object", get: "", data: ""
it "should throw when path is not found", ->
wraperr 'path test not found in {"test2":"test2"}', get: "test", data: test2: "test2"
it "should throw when push or splice is called on something other then array", ->
wraperr "there is no array at the given path", splice: "test", value:"1", data: test: "test"
wraperr "there is no array at the given path", push: "test", value:"1", data: test: "test"
it "should throw when splice is not found", ->
wraperr "d not found in array", splice: "test", value:"d", data: test: ["a","b","c"]
it "should throw when splice is out of boundary", ->
wraperr "index out of array boundary", splice: "test", value:"4", data: test: ["a","b","c"]
it "should throw when trying to overwrite a object/array with something other", ->
wraperr "you are trying to overwrite an array by a number. Use -f to force.",
set:"test",value:1,data: test: []
wraperr "you are trying to overwrite an object by a number. Use -f to force.",
set:"test",value:1,data: test: {}
it "should work with get and package.json", ->
script(get:"name").should.equal '"pkg-json"'
script(type:"get", path:"name").should.equal '"pkg-json"'
script(get:"bin.pkg-json").should.equal '"./index.js"'
it "should work with get and testjson", ->
script(get:"name",in:testjson).should.equal "\"#{testobj.name}\""
it "should work with bare", ->
script(get:"name",in:testjson,bare:true).should.equal testobj.name
it "should work nested", ->
script(get:"nested.nested3.test4",in:testjson,bare:true).should.equal testobj.nested.nested3.test4
it "should work with set", (done) ->
script set:"name",value:"newName",in:testjson,cb: ->
script(get:"name",in:testjson,bare:true).should.equal "newName"
done()
it "should work with deep set", (done) ->
script set:"nested.added.again",value:"something",in:testjson,cb: ->
testobj.nested ?= {}
testobj.nested.added ?= {}
testobj.nested.added.again = "something"
script(get:"nested.added.again",in:testjson,bare:true).should.equal "something"
done()
it "should work with return", ->
clone = JSON.parse(JSON.stringify(testobj))
clone.name = "newName2"
script(set:"name",value:"newName2",in:testjson,return: true).should.equal JSON.stringify clone,null,'\t'
it "should work with push", (done) ->
script push:"array",value:4,in:testjson,cb: ->
script(get:"array",in:testjson).should.equal JSON.stringify [1,2,3,4],null,'\t'
done()
it "should work with splice", (done) ->
script splice:"array",value:0,in:testjson,cb: ->
script(get:"array",in:testjson).should.equal JSON.stringify [2,3,4],null,'\t'
script splice:"array2",value:"b",in:testjson,cb: ->
script(get:"array2",in:testjson).should.equal JSON.stringify ["a","c"],null,'\t'
done()
describe "cli", ->
it "should be tested"
after (done) ->
fs.unlink testjson, -> done()
| 95265 | chai = require "chai"
fs = require "fs"
should = chai.should()
util = require "core-util-is"
script = require "../src/index.coffee"
testjson = "test/test.json"
testobj = {
name: "<NAME>"
othername: "<NAME>"
number: 3
nested:
nested2: "test3"
nested3: test4: "test5"
nested4: [4,5,6]
array: [1,2,3]
array2: ["a","b","c"]
}
wraperr = (err, arg) ->
(->script(arg)).should.throw err
describe "pkg-json", ->
before (done) ->
fs.writeFile testjson, JSON.stringify(testobj), -> done()
describe "script", ->
it "should be a function", ->
script.should.be.a.function
it "should throw when more than one option is given", ->
err = "only one of set, get, push or splice is allowed"
wraperr err, set:true, get:true
wraperr err, set:true, push:true
wraperr err, set:true, splice:true
wraperr err, push:true, get:true
wraperr err, splice:true, get:true
wraperr err, splice:true, push:true
it "should throw when type is unvalid", ->
err = "type: something is invalid"
wraperr err, type:"something"
it "should throw when no command is given", ->
err = "no command given, try set, get push or splice"
wraperr err
it "should throw when path is no string", ->
err = "path needs to be a string"
wraperr err, type:"set", path: 0
it "should throw when no value is provided with push or splice", ->
wraperr "no value provided for splice", splice:""
wraperr "no value provided for push", push:""
it "should throw when no value is provided with set and isn't forced", ->
wraperr "no value provided for set, to remove a key use --force", set:""
it "should throw when invalid json file is provided", ->
wraperr "failed to load invalid.json", get:"", in:"invalid.json"
it "should throw when data is no object", ->
wraperr "data needs to be an object", get: "", data: ""
it "should throw when path is not found", ->
wraperr 'path test not found in {"test2":"test2"}', get: "test", data: test2: "test2"
it "should throw when push or splice is called on something other then array", ->
wraperr "there is no array at the given path", splice: "test", value:"1", data: test: "test"
wraperr "there is no array at the given path", push: "test", value:"1", data: test: "test"
it "should throw when splice is not found", ->
wraperr "d not found in array", splice: "test", value:"d", data: test: ["a","b","c"]
it "should throw when splice is out of boundary", ->
wraperr "index out of array boundary", splice: "test", value:"4", data: test: ["a","b","c"]
it "should throw when trying to overwrite a object/array with something other", ->
wraperr "you are trying to overwrite an array by a number. Use -f to force.",
set:"test",value:1,data: test: []
wraperr "you are trying to overwrite an object by a number. Use -f to force.",
set:"test",value:1,data: test: {}
it "should work with get and package.json", ->
script(get:"name").should.equal '"pkg-json"'
script(type:"get", path:"name").should.equal '"pkg-json"'
script(get:"bin.pkg-json").should.equal '"./index.js"'
it "should work with get and testjson", ->
script(get:"name",in:testjson).should.equal "\"#{testobj.name}\""
it "should work with bare", ->
script(get:"name",in:testjson,bare:true).should.equal testobj.name
it "should work nested", ->
script(get:"nested.nested3.test4",in:testjson,bare:true).should.equal testobj.nested.nested3.test4
it "should work with set", (done) ->
script set:"name",value:"<NAME>",in:testjson,cb: ->
script(get:"name",in:testjson,bare:true).should.equal "newName"
done()
it "should work with deep set", (done) ->
script set:"nested.added.again",value:"something",in:testjson,cb: ->
testobj.nested ?= {}
testobj.nested.added ?= {}
testobj.nested.added.again = "something"
script(get:"nested.added.again",in:testjson,bare:true).should.equal "something"
done()
it "should work with return", ->
clone = JSON.parse(JSON.stringify(testobj))
clone.name = "<NAME>Name2"
script(set:"name",value:"<NAME>",in:testjson,return: true).should.equal JSON.stringify clone,null,'\t'
it "should work with push", (done) ->
script push:"array",value:4,in:testjson,cb: ->
script(get:"array",in:testjson).should.equal JSON.stringify [1,2,3,4],null,'\t'
done()
it "should work with splice", (done) ->
script splice:"array",value:0,in:testjson,cb: ->
script(get:"array",in:testjson).should.equal JSON.stringify [2,3,4],null,'\t'
script splice:"array2",value:"b",in:testjson,cb: ->
script(get:"array2",in:testjson).should.equal JSON.stringify ["a","c"],null,'\t'
done()
describe "cli", ->
it "should be tested"
after (done) ->
fs.unlink testjson, -> done()
| true | chai = require "chai"
fs = require "fs"
should = chai.should()
util = require "core-util-is"
script = require "../src/index.coffee"
testjson = "test/test.json"
testobj = {
name: "PI:NAME:<NAME>END_PI"
othername: "PI:NAME:<NAME>END_PI"
number: 3
nested:
nested2: "test3"
nested3: test4: "test5"
nested4: [4,5,6]
array: [1,2,3]
array2: ["a","b","c"]
}
wraperr = (err, arg) ->
(->script(arg)).should.throw err
describe "pkg-json", ->
before (done) ->
fs.writeFile testjson, JSON.stringify(testobj), -> done()
describe "script", ->
it "should be a function", ->
script.should.be.a.function
it "should throw when more than one option is given", ->
err = "only one of set, get, push or splice is allowed"
wraperr err, set:true, get:true
wraperr err, set:true, push:true
wraperr err, set:true, splice:true
wraperr err, push:true, get:true
wraperr err, splice:true, get:true
wraperr err, splice:true, push:true
it "should throw when type is unvalid", ->
err = "type: something is invalid"
wraperr err, type:"something"
it "should throw when no command is given", ->
err = "no command given, try set, get push or splice"
wraperr err
it "should throw when path is no string", ->
err = "path needs to be a string"
wraperr err, type:"set", path: 0
it "should throw when no value is provided with push or splice", ->
wraperr "no value provided for splice", splice:""
wraperr "no value provided for push", push:""
it "should throw when no value is provided with set and isn't forced", ->
wraperr "no value provided for set, to remove a key use --force", set:""
it "should throw when invalid json file is provided", ->
wraperr "failed to load invalid.json", get:"", in:"invalid.json"
it "should throw when data is no object", ->
wraperr "data needs to be an object", get: "", data: ""
it "should throw when path is not found", ->
wraperr 'path test not found in {"test2":"test2"}', get: "test", data: test2: "test2"
it "should throw when push or splice is called on something other then array", ->
wraperr "there is no array at the given path", splice: "test", value:"1", data: test: "test"
wraperr "there is no array at the given path", push: "test", value:"1", data: test: "test"
it "should throw when splice is not found", ->
wraperr "d not found in array", splice: "test", value:"d", data: test: ["a","b","c"]
it "should throw when splice is out of boundary", ->
wraperr "index out of array boundary", splice: "test", value:"4", data: test: ["a","b","c"]
it "should throw when trying to overwrite a object/array with something other", ->
wraperr "you are trying to overwrite an array by a number. Use -f to force.",
set:"test",value:1,data: test: []
wraperr "you are trying to overwrite an object by a number. Use -f to force.",
set:"test",value:1,data: test: {}
it "should work with get and package.json", ->
script(get:"name").should.equal '"pkg-json"'
script(type:"get", path:"name").should.equal '"pkg-json"'
script(get:"bin.pkg-json").should.equal '"./index.js"'
it "should work with get and testjson", ->
script(get:"name",in:testjson).should.equal "\"#{testobj.name}\""
it "should work with bare", ->
script(get:"name",in:testjson,bare:true).should.equal testobj.name
it "should work nested", ->
script(get:"nested.nested3.test4",in:testjson,bare:true).should.equal testobj.nested.nested3.test4
it "should work with set", (done) ->
script set:"name",value:"PI:NAME:<NAME>END_PI",in:testjson,cb: ->
script(get:"name",in:testjson,bare:true).should.equal "newName"
done()
it "should work with deep set", (done) ->
script set:"nested.added.again",value:"something",in:testjson,cb: ->
testobj.nested ?= {}
testobj.nested.added ?= {}
testobj.nested.added.again = "something"
script(get:"nested.added.again",in:testjson,bare:true).should.equal "something"
done()
it "should work with return", ->
clone = JSON.parse(JSON.stringify(testobj))
clone.name = "PI:NAME:<NAME>END_PIName2"
script(set:"name",value:"PI:NAME:<NAME>END_PI",in:testjson,return: true).should.equal JSON.stringify clone,null,'\t'
it "should work with push", (done) ->
script push:"array",value:4,in:testjson,cb: ->
script(get:"array",in:testjson).should.equal JSON.stringify [1,2,3,4],null,'\t'
done()
it "should work with splice", (done) ->
script splice:"array",value:0,in:testjson,cb: ->
script(get:"array",in:testjson).should.equal JSON.stringify [2,3,4],null,'\t'
script splice:"array2",value:"b",in:testjson,cb: ->
script(get:"array2",in:testjson).should.equal JSON.stringify ["a","c"],null,'\t'
done()
describe "cli", ->
it "should be tested"
after (done) ->
fs.unlink testjson, -> done()
|
[
{
"context": "LED = 'true'\n process.env.RADBUS_API_KEYS = '1234,4321'\n\n afterEach ->\n delete process.env.RADBU",
"end": 769,
"score": 0.9993504285812378,
"start": 760,
"tag": "KEY",
"value": "1234,4321"
}
] | test/api/test-root.coffee | twistedstream/radbus-api | 2 | # coffeelint: disable=max_line_length
chai = require 'chai'
chai.use require 'chai-as-promised'
should = chai.should()
request = require 'super-request'
helpers = require './helpers'
# build server
server = helpers.buildServer '../../api/resources/root'
describe "GET / (root)", ->
describe "api-key disabled", ->
beforeEach ->
process.env.RADBUS_API_KEYS_ENABLED = 'false'
afterEach ->
delete process.env.RADBUS_API_KEYS_ENABLED
it "should return 200 with expected application/version structure", ->
r = request(server)
.get('/')
helpers.assertAppVersionResponse r
describe "api-key enabled", ->
beforeEach ->
process.env.RADBUS_API_KEYS_ENABLED = 'true'
process.env.RADBUS_API_KEYS = '1234,4321'
afterEach ->
delete process.env.RADBUS_API_KEYS_ENABLED
delete process.env.RADBUS_API_KEYS
it "should return 200 with expected application/version structure", ->
r = request(server)
.get('/')
helpers.assertAppVersionResponse r
| 219609 | # coffeelint: disable=max_line_length
chai = require 'chai'
chai.use require 'chai-as-promised'
should = chai.should()
request = require 'super-request'
helpers = require './helpers'
# build server
server = helpers.buildServer '../../api/resources/root'
describe "GET / (root)", ->
describe "api-key disabled", ->
beforeEach ->
process.env.RADBUS_API_KEYS_ENABLED = 'false'
afterEach ->
delete process.env.RADBUS_API_KEYS_ENABLED
it "should return 200 with expected application/version structure", ->
r = request(server)
.get('/')
helpers.assertAppVersionResponse r
describe "api-key enabled", ->
beforeEach ->
process.env.RADBUS_API_KEYS_ENABLED = 'true'
process.env.RADBUS_API_KEYS = '<KEY>'
afterEach ->
delete process.env.RADBUS_API_KEYS_ENABLED
delete process.env.RADBUS_API_KEYS
it "should return 200 with expected application/version structure", ->
r = request(server)
.get('/')
helpers.assertAppVersionResponse r
| true | # coffeelint: disable=max_line_length
chai = require 'chai'
chai.use require 'chai-as-promised'
should = chai.should()
request = require 'super-request'
helpers = require './helpers'
# build server
server = helpers.buildServer '../../api/resources/root'
describe "GET / (root)", ->
describe "api-key disabled", ->
beforeEach ->
process.env.RADBUS_API_KEYS_ENABLED = 'false'
afterEach ->
delete process.env.RADBUS_API_KEYS_ENABLED
it "should return 200 with expected application/version structure", ->
r = request(server)
.get('/')
helpers.assertAppVersionResponse r
describe "api-key enabled", ->
beforeEach ->
process.env.RADBUS_API_KEYS_ENABLED = 'true'
process.env.RADBUS_API_KEYS = 'PI:KEY:<KEY>END_PI'
afterEach ->
delete process.env.RADBUS_API_KEYS_ENABLED
delete process.env.RADBUS_API_KEYS
it "should return 200 with expected application/version structure", ->
r = request(server)
.get('/')
helpers.assertAppVersionResponse r
|
[
{
"context": "y Wills. Anything happen to pop and I got you like Uncle Phil.\",\n \"You got your resolutions, we just got reser",
"end": 4704,
"score": 0.9936043620109558,
"start": 4694,
"tag": "NAME",
"value": "Uncle Phil"
}
] | src/drizzy.coffee | curtiscook/hubot-drizzy | 1 | # Description
# A Drake Quote generator for Hubots.
#
# Configuration:
# None
#
# Commands:
# hubot drizzy quote me - displays a random quote from drake
#
quotes = [
"When you call too much, then I call less. You would always complain about my small texts, but what you call conversation, I call stress.",
"You don't love me, you just say that s*** to get to me.",
"We all have our nights though, don't be so ashamed. I've had mine, you've had yours, we both know.",
"Never thoughts of suicide, I'm too alive. But I still treat it like it's do or die.",
"I gotta feel alive, even if it kills me. Promise to always give you me, the real me.",
"F*** with me, I'll buy the shots. Live a little, cause n***as die a lot.",
"I hate getting messages from you, it's like you know the perfect s*** to say. F***s up my whole day, thought all these feelings went away.",
"Watch me, going out of the way, when I should've went home. Only time of the day I get to spend on my own.",
"Makin' major changes to the life I'm livin'. I had no choice, I had to prove I made the right decision.",
"Can't deny that I want you, but I'll lie if I have to.",
"No new n***as, n***a we don't feel that. F*** a fake friend, where ya real friends at?",
"I say f*** ya, less I'm with ya. If I take you out of the picture, I know real n***as won't miss ya, no lie.",
"I finally get a moment to myself, I would realize you were everything I'm missing, and you'll tell me you're in love with someone else.",
"I just need some closure. Ain't no turnin' back for me, I'm in it till it's over.",
"I was stayin' home when they was havin' fun, so please don't be surprised when they announce that I won.",
"Feelin' good, livin' better. I think maybe I was numb to it last year, but you know I feel it now more than ever.",
"You let me go, little did you know, I could've made it all on my own.",
"I got my eyes on you, you're everything that I see. I want your hot love and emotion endlessly.",
"Accept yourself. You don't have to prove s*** to no one except yourself.",
"Don't get impatient when it takes too long, and drink it all even when it tastes too strong.",
"All so convinced that you're following your heart, cause your mind don't control what it does sometimes.",
"They say real girls ain't never perfect, perfect girls ain't real.",
"Ain't no wishing over on this side. Y'all don't f*** with us then we don't f*** with y'all, it's no different over on this side.",
"We walk the same path, but got on different shoes. Live in the same building, but we got different views.",
"I'd rather be with you, but you are not around. So I'ma call somebody up and see if they'd be down, cause I hate sleeping alone.",
"You were the one. You've been the one from the moment I seen ya.",
"My family gets all of my loyalty, all of my patience. My life for your life, man I wouldn't trade it.",
"Don't ever forget the moment you began to doubt, transitioning from fitting in to standing out.",
"I guess that's how it goes. They hear about your cons, but focus on your pros, and love you for who you are from the bottom of their soul.",
"Make sure you don't fall in love, cause I don't have the time for that.",
"Too many times I've been wrong, I guess being right takes too long.",
"Isn't it amazing how you talk all this s*** and we still lack communication.",
"May your neighbors respect you, trouble neglect you. Angels protect you, and heaven accept you.",
"How I put it on you, crazy. That's when you start to laugh, saying I just want what I can't have.",
"I'm still fly, I'm sky high, and I dare anybody to try and cut my wings.",
"You love me and I love you, and your heart hurts, mine does too. And it's just words and they cut deep but it's our world, it's just us two.",
"My life is moving fast, the time will often past. And something will remind me, I can't put this behind me.",
"I'm just doin' me and you could never understand it.",
"We sure make it feel like we're together, cause we're scared to see each other with somebody else.",
"I was a cold dude, I'm gettin' back to my ways.",
"I hate the s*** you do, but I still text you back, say I miss you too." ,
"The weekend's here, started it right, even if you only get part of it right.",
"If you end up needin some extra help then I can help.You know, back on ya feet and s***.Tryna get my karma up, f*** the guilty &; greedy s***",
"Holla if you got me and f*** you if you had me.",
"Young, dumb, lookin' in the mirror tryin' to figure out who I'm gonna be this year.",
"All them boys in my will, all them boys is my Wills. Anything happen to pop and I got you like Uncle Phil.",
"You got your resolutions, we just got reservations.",
"Feelin' good, livin' better. But it's evident that nothing can prepare us for certain moments that you just gotta cherish.",
"Got everything I'm asking for but you.",
"I'm slowly running out of all the time that I invested, making all the same mistakes and I'm just trying to correct it.",
"It's such a small place, not much to do but talk and listen. The men are jealous and the women all in competition.",
"Next time I stand tall, I wanna be standing for you. And next time I spend I want it all to be for you.",
"This here is something personal, highly doubt this feeling is reversible. Knowledge is pain and that's why it hurts to know.",
"I'm lucky that you picked up, lucky that you stayed on. I need someone to put this weight on.",
"I've been avoiding commitment, that's why I'm in this position. I'm scared to let somebody in on this.",
"I'm thinkin' about my goals, and how we used to say a goal is just a dream with a deadline.",
"I've asked about you and they told me things. But my mind didn't change, I still feel the same.",
"Sweat pants, hair tied, chillin' with no makeup on. That's when you're the prettiest, I hope that you don't take it wrong.",
"I'm honest, I make mistakes, I'd be the second to admit it." ,
"I'm really tryna make it more than what it is, cause everybody dies but not everybody lives.",
"The good ones go, if you wait too long. So you should go, before you stay too long.",
"That's why I pick and choose. I don't get s*** confused. I got a small circle, I'm not with different crews.",
"Make it sooner than later, we won't be here forever. And I realize I waited too long, but please don't move on. You dont need no one else.",
"People around you should really have nothing to say. Me, I'm just proud of the fact that you've done it your way.",
"I try to tell them don't judge me because you heard stuff.",
"I really hate to say I told you so. So I bite my tongue but you know, you know.",
"You know it's real when you are who you think you are." ,
"Now you're trying to find somebody to replace what I gave to you. It's a shame you didn't keep it.",
"I'm trying to let go of the past. Should we make this one a double? You ain't even gotta ask.",
"I be hearin' the s*** that you say through the grapevine, but jealousy is just love and hate at the same time.",
"I forgot about you last night, for the first time in a long time I did.",
"All in all I learned a lesson from it though, you never see it coming you just get to see it go.",
"I like a woman with a future and a past. A little attitude problem all good, it'll make the s*** last.",
"Somewhere between I want it and I got it." ,
"Just lie to my ears. Tell me it feel the same, that's all I've been dying to hear.",
"You say that you're over me, you always end up under me. You know how it goes, don't be crazy, don't play dumb with me.",
"Heard once that in dire times when you need a sign, that's when they appear." ,
"Live for today, plan for tomorrow, party tonight.",
"Guess you lose some and win some. As long as the outcome is income, you know I want it all and then some.",
"She's the reason it happened, but she's overreacting, and it's all because she don't want things to change.",
"I should let you know ahead, I'm coming back on my worst behavior." ,
"I love myself cause I swear their life is just not as fun.",
"I don't really give a f***, and my excuse is that I'm young.",
"I'm still in love, cause when it's that real, it's when it doesn't fade.",
"My only wish is I die real. Cause that truth hurts, and those lies heal.",
"Looking back on it, at least my pride is in tact. Cause we said no strings attached, and I still got tied up in that.",
"Take all my memories with you and just erase it for me, you can never reminisce when you forget it all.",
"Overly focused, it's far from the time to rest now." ,
"Jealousy in the air tonight, I could tell. I will never understand that, but oh well." ,
"She used to say, 'you can be whoever you want, even yourself.' I show up knowin' exactly who I was and never leave as myself." ,
"We had the type of nights where morning comes too soon, and nothing was the same." ,
"I just been plottin' on the low. Schemin' on the low. The furthest thing from perfect, like everyone I know." ,
"Actin like oh well, this is life, I guess. Nah, f*** that s***. Listen man, you can still do what you wanna do, you gotta trust that s***.",
"Same city, same friends if you're looking for me." ,
"Tables turn, bridges burn, you live and learn." ,
"All my young boys ‘round me sayin', 'get money and f*** these hoes.' Where we learn these values? I do not know what to tell you.",
"Talkin' that s*** with your back to me, just know it always get back to me." ,
"Wish you would learn to love people and use things, and not the other way around." ,
"I felt like we had it all planned out, I guess I f***ed up the vision. Learning the true consequences of my selfish decisions." ,
"the album is comin' tomorrow just wait on it.",
"You ain't the only one that's tryna be the only one." ,
"It's been one of those days you try and forget about, take a shot and let it out." ,
"This the s*** that I live for, with the people I die for." ,
"See the difference with me, I don't forget the past. And you gave me nothing, so I'ma give you nothing right back.",
"Should I listen to everybody or myself? Cause myself just told myself, 'you the motherf***in' man, you don't need no help.'",
"The girl that I want to save is like a danger to my health. Try being with somebody that want to be somebody else.",
"Bullets do not choose a victim. It is the shooter that picks ‘em.",
"You know it's real when your latest nights are your greatest nights. The sun is up when you get home, that's just a way of life.",
"Sinatra lifestyle, I'm just bein' frank with ya.",
"Lookin' for the right way to do the wrong things.",
"I spend money because spending time is hopeless, and know I pop bottles cause I bottle my emotions. At least I put it all in the open.",
"I'ma worry about me, give a f*** about you.",
"Everything's adding up, you've been through hell and back, that's why you're bad as f***, and you know you are.",
"Careful what you wish for, you just might get it all.",
"I could tell a lie if you askin' me my whereabouts, but I might talk that real if you ask me what I care about.",
"A lot of people sayin' f*** me, problem is they be tellin' everybody but me.",
"She said kiss me like you miss me, f*** me like you hate me. And when you're f***in' someone else, just f*** her like she ain't me.",
"You got the resolutions, we just got reservations.",
"Live fast, die young, never take it slow.",
"You think you got to me, I can just read your mind. You think I'm so caught up in where I am right now, but believe I remember it all.",
"I still got it for ya, and even though we let it go, it's better that you know.",
"I'm good, I don't need no help. Cause I'm better off by myself then to start over with somebody else.",
"Started not to give a f*** and stopped fearin' the consequence.",
"You mad cause nobody ever did it like me.",
"The good ones go, if you wait too long.",
"Never forgetting from where I came, and no matter where I'm headed, I promise to stay the same.",
"I want things to go my way, but as of late a lot of s*** been goin' sideways.",
"Talking to myself but I never listen. Cause man it's been awhile, and I swear that this one's different.",
"It's funny when you coming in first but you hope that you're last, you just hope that it last.",
"Only thing you got is some years on me. Man, f*** you and your time difference.",
"I'd really love to be the one you took a step with, so trust me when I tell you it'd be worth the effort.",
"To have known me would mean that there's a new me. And if you think I changed in the slightest, could've fooled me.",
"Say you'll be mine, say we'll be fine, say we'll be together.",
"You probably think it's too far to even have to care, well take a look at where you live, what if it happened there?",
"I learned working with the negatives could make for better pictures.",
"I'm in the world where things are taken, never given. How long they choose to love you will never be your decision.",
"Guess you lose some and win some. Long as the outcome is income, you know I want it all and then some.",
"I'ma sip until I feel it, I'ma smoke it till it's done. I don't really give a f***, and my excuse is that I'm young.",
"I'm tryna do better than good enough.",
"When all is said and done, more is always said than done.",
"I hate getting messages from you, especially when you say you should've stayed. F***s up my whole day, thought all these feelings went away.",
"If it's comin' from a n***a I don't know, then I don't trust it. If you comin' for my head, then motherf***er get to bustin'.",
"They say if you get her, you could understand her better. And she known to be a cheater, but that's only if you let her.",
"Damn, what happened to us? Life can always change, you have to adjust.",
"Mind in one place, heart in another.",
"I can tell certain people don't like me no more, new s*** don't excite me no more, guess they don't really make ‘em like me no more.",
"Just as I predicted, here we go again. They always say the hottest love has the coldest end.",
"Don't ask permission, just ask forgiveness.",
"You know life is what we make it, and a chance is like a picture, it'd be nice if you just take it.",
"These haters pretendin' that it's all good. Say that s*** in person man, I wish y'all would.",
"Is this even still a discussion? Don't you ever wake up disgusted? Every million I gain an enemy or a cousin.",
"Call me crazy, s*** at least you callin'. Feels better when you let it out, don't it?",
"I was curious and I'll never forget it baby, what an experience. You could've been the one, but it wasn't that serious.",
"I can tell that you've been crying all night, drinking all summer. Praying for your happiness, hope that you recover.",
"This life is something I would die for. Octobers Own, but it's lookin' like July 4th.",
"Damn, tell me what I'm gon' do, since everything I'm trying to forget is all true.",
"We just kill the summer every single time it come around. Never chase hoes, but we do be laughin' at the n***as tryna run 'em down.",
"I'm more than just a number, I doubt you'll find another. So every single summer, I'll be the one that you remember.",
"I know you waiting, waiting on a good thing. When the time's right, you should let me get to know you.",
"I think I have a chance at love but knowing me I'll miss it, cause me dedicating my time just isn't realistic.",
"Shawty wanna tell me secrets bout a rap n***a. I tell that bitch it's more attractive when you hold it down.",
"I never really had no one like you man, this all new s***. Made the world I know bigger, changed the way that I viewed it.",
"I'm about bein' single, seein' double, makin' triple.",
"Hennessy and enemies is one hell of a mixture.",
"Worryin' bout your followers, you need to get your dollars up.",
"Me falling and landing in love, I don't think should happen. Because everything's easier to get into than get out of.",
"You don't do it for the men, men never notice. You just do it for yourself, you the f***in' coldest.",
"My dreams are who I'm racin' with, but you can see I'm pacin' it, so that I'm always chasin' it.",
"If amazing was a young bitch, I'd be going to jail cause I be f***in' amazing.",
"You lookin' bad, girl for goodness sakes. You with all those curves, and me without no brakes.",
"Tell me how the f*** we supposed to stay friends, when you got a bunch of feelings that you don't show.",
"Daydream at nighttime, I think too much. Then I hit the nightclub till it's daytime and I drink too much.",
"Tell on me, I don't mind. Cause if they ask me about it, I won't lie. I'll admit that I'm yours, I don't really care who knows it.",
"They told me s*** would change, but I don't really see no change in us.",
"Women need attention therefore women will complain. Develop hatred for men and say that you're the one to blame.",
"It's crazy all the emotions forgot in a year.",
"I'm urgin all daughters to kiss they mothers,with those lips that all that lipstick covers. You're never too grown up to miss and hug her.",
"Make the most out of tonight and worry 'bout it all tomorrow."
]
module.exports = (robot) ->
robot.respond /drizzy quote me\b/i, (msg) ->
msg.send msg.random quotes
robot.respond /drake quote me\b/i, (msg) ->
msg.send msg.random quotes
robot.respond /drizzy version\b/i, (msg) ->
msg.send require('../package').version
| 74411 | # Description
# A Drake Quote generator for Hubots.
#
# Configuration:
# None
#
# Commands:
# hubot drizzy quote me - displays a random quote from drake
#
quotes = [
"When you call too much, then I call less. You would always complain about my small texts, but what you call conversation, I call stress.",
"You don't love me, you just say that s*** to get to me.",
"We all have our nights though, don't be so ashamed. I've had mine, you've had yours, we both know.",
"Never thoughts of suicide, I'm too alive. But I still treat it like it's do or die.",
"I gotta feel alive, even if it kills me. Promise to always give you me, the real me.",
"F*** with me, I'll buy the shots. Live a little, cause n***as die a lot.",
"I hate getting messages from you, it's like you know the perfect s*** to say. F***s up my whole day, thought all these feelings went away.",
"Watch me, going out of the way, when I should've went home. Only time of the day I get to spend on my own.",
"Makin' major changes to the life I'm livin'. I had no choice, I had to prove I made the right decision.",
"Can't deny that I want you, but I'll lie if I have to.",
"No new n***as, n***a we don't feel that. F*** a fake friend, where ya real friends at?",
"I say f*** ya, less I'm with ya. If I take you out of the picture, I know real n***as won't miss ya, no lie.",
"I finally get a moment to myself, I would realize you were everything I'm missing, and you'll tell me you're in love with someone else.",
"I just need some closure. Ain't no turnin' back for me, I'm in it till it's over.",
"I was stayin' home when they was havin' fun, so please don't be surprised when they announce that I won.",
"Feelin' good, livin' better. I think maybe I was numb to it last year, but you know I feel it now more than ever.",
"You let me go, little did you know, I could've made it all on my own.",
"I got my eyes on you, you're everything that I see. I want your hot love and emotion endlessly.",
"Accept yourself. You don't have to prove s*** to no one except yourself.",
"Don't get impatient when it takes too long, and drink it all even when it tastes too strong.",
"All so convinced that you're following your heart, cause your mind don't control what it does sometimes.",
"They say real girls ain't never perfect, perfect girls ain't real.",
"Ain't no wishing over on this side. Y'all don't f*** with us then we don't f*** with y'all, it's no different over on this side.",
"We walk the same path, but got on different shoes. Live in the same building, but we got different views.",
"I'd rather be with you, but you are not around. So I'ma call somebody up and see if they'd be down, cause I hate sleeping alone.",
"You were the one. You've been the one from the moment I seen ya.",
"My family gets all of my loyalty, all of my patience. My life for your life, man I wouldn't trade it.",
"Don't ever forget the moment you began to doubt, transitioning from fitting in to standing out.",
"I guess that's how it goes. They hear about your cons, but focus on your pros, and love you for who you are from the bottom of their soul.",
"Make sure you don't fall in love, cause I don't have the time for that.",
"Too many times I've been wrong, I guess being right takes too long.",
"Isn't it amazing how you talk all this s*** and we still lack communication.",
"May your neighbors respect you, trouble neglect you. Angels protect you, and heaven accept you.",
"How I put it on you, crazy. That's when you start to laugh, saying I just want what I can't have.",
"I'm still fly, I'm sky high, and I dare anybody to try and cut my wings.",
"You love me and I love you, and your heart hurts, mine does too. And it's just words and they cut deep but it's our world, it's just us two.",
"My life is moving fast, the time will often past. And something will remind me, I can't put this behind me.",
"I'm just doin' me and you could never understand it.",
"We sure make it feel like we're together, cause we're scared to see each other with somebody else.",
"I was a cold dude, I'm gettin' back to my ways.",
"I hate the s*** you do, but I still text you back, say I miss you too." ,
"The weekend's here, started it right, even if you only get part of it right.",
"If you end up needin some extra help then I can help.You know, back on ya feet and s***.Tryna get my karma up, f*** the guilty &; greedy s***",
"Holla if you got me and f*** you if you had me.",
"Young, dumb, lookin' in the mirror tryin' to figure out who I'm gonna be this year.",
"All them boys in my will, all them boys is my Wills. Anything happen to pop and I got you like <NAME>.",
"You got your resolutions, we just got reservations.",
"Feelin' good, livin' better. But it's evident that nothing can prepare us for certain moments that you just gotta cherish.",
"Got everything I'm asking for but you.",
"I'm slowly running out of all the time that I invested, making all the same mistakes and I'm just trying to correct it.",
"It's such a small place, not much to do but talk and listen. The men are jealous and the women all in competition.",
"Next time I stand tall, I wanna be standing for you. And next time I spend I want it all to be for you.",
"This here is something personal, highly doubt this feeling is reversible. Knowledge is pain and that's why it hurts to know.",
"I'm lucky that you picked up, lucky that you stayed on. I need someone to put this weight on.",
"I've been avoiding commitment, that's why I'm in this position. I'm scared to let somebody in on this.",
"I'm thinkin' about my goals, and how we used to say a goal is just a dream with a deadline.",
"I've asked about you and they told me things. But my mind didn't change, I still feel the same.",
"Sweat pants, hair tied, chillin' with no makeup on. That's when you're the prettiest, I hope that you don't take it wrong.",
"I'm honest, I make mistakes, I'd be the second to admit it." ,
"I'm really tryna make it more than what it is, cause everybody dies but not everybody lives.",
"The good ones go, if you wait too long. So you should go, before you stay too long.",
"That's why I pick and choose. I don't get s*** confused. I got a small circle, I'm not with different crews.",
"Make it sooner than later, we won't be here forever. And I realize I waited too long, but please don't move on. You dont need no one else.",
"People around you should really have nothing to say. Me, I'm just proud of the fact that you've done it your way.",
"I try to tell them don't judge me because you heard stuff.",
"I really hate to say I told you so. So I bite my tongue but you know, you know.",
"You know it's real when you are who you think you are." ,
"Now you're trying to find somebody to replace what I gave to you. It's a shame you didn't keep it.",
"I'm trying to let go of the past. Should we make this one a double? You ain't even gotta ask.",
"I be hearin' the s*** that you say through the grapevine, but jealousy is just love and hate at the same time.",
"I forgot about you last night, for the first time in a long time I did.",
"All in all I learned a lesson from it though, you never see it coming you just get to see it go.",
"I like a woman with a future and a past. A little attitude problem all good, it'll make the s*** last.",
"Somewhere between I want it and I got it." ,
"Just lie to my ears. Tell me it feel the same, that's all I've been dying to hear.",
"You say that you're over me, you always end up under me. You know how it goes, don't be crazy, don't play dumb with me.",
"Heard once that in dire times when you need a sign, that's when they appear." ,
"Live for today, plan for tomorrow, party tonight.",
"Guess you lose some and win some. As long as the outcome is income, you know I want it all and then some.",
"She's the reason it happened, but she's overreacting, and it's all because she don't want things to change.",
"I should let you know ahead, I'm coming back on my worst behavior." ,
"I love myself cause I swear their life is just not as fun.",
"I don't really give a f***, and my excuse is that I'm young.",
"I'm still in love, cause when it's that real, it's when it doesn't fade.",
"My only wish is I die real. Cause that truth hurts, and those lies heal.",
"Looking back on it, at least my pride is in tact. Cause we said no strings attached, and I still got tied up in that.",
"Take all my memories with you and just erase it for me, you can never reminisce when you forget it all.",
"Overly focused, it's far from the time to rest now." ,
"Jealousy in the air tonight, I could tell. I will never understand that, but oh well." ,
"She used to say, 'you can be whoever you want, even yourself.' I show up knowin' exactly who I was and never leave as myself." ,
"We had the type of nights where morning comes too soon, and nothing was the same." ,
"I just been plottin' on the low. Schemin' on the low. The furthest thing from perfect, like everyone I know." ,
"Actin like oh well, this is life, I guess. Nah, f*** that s***. Listen man, you can still do what you wanna do, you gotta trust that s***.",
"Same city, same friends if you're looking for me." ,
"Tables turn, bridges burn, you live and learn." ,
"All my young boys ‘round me sayin', 'get money and f*** these hoes.' Where we learn these values? I do not know what to tell you.",
"Talkin' that s*** with your back to me, just know it always get back to me." ,
"Wish you would learn to love people and use things, and not the other way around." ,
"I felt like we had it all planned out, I guess I f***ed up the vision. Learning the true consequences of my selfish decisions." ,
"the album is comin' tomorrow just wait on it.",
"You ain't the only one that's tryna be the only one." ,
"It's been one of those days you try and forget about, take a shot and let it out." ,
"This the s*** that I live for, with the people I die for." ,
"See the difference with me, I don't forget the past. And you gave me nothing, so I'ma give you nothing right back.",
"Should I listen to everybody or myself? Cause myself just told myself, 'you the motherf***in' man, you don't need no help.'",
"The girl that I want to save is like a danger to my health. Try being with somebody that want to be somebody else.",
"Bullets do not choose a victim. It is the shooter that picks ‘em.",
"You know it's real when your latest nights are your greatest nights. The sun is up when you get home, that's just a way of life.",
"Sinatra lifestyle, I'm just bein' frank with ya.",
"Lookin' for the right way to do the wrong things.",
"I spend money because spending time is hopeless, and know I pop bottles cause I bottle my emotions. At least I put it all in the open.",
"I'ma worry about me, give a f*** about you.",
"Everything's adding up, you've been through hell and back, that's why you're bad as f***, and you know you are.",
"Careful what you wish for, you just might get it all.",
"I could tell a lie if you askin' me my whereabouts, but I might talk that real if you ask me what I care about.",
"A lot of people sayin' f*** me, problem is they be tellin' everybody but me.",
"She said kiss me like you miss me, f*** me like you hate me. And when you're f***in' someone else, just f*** her like she ain't me.",
"You got the resolutions, we just got reservations.",
"Live fast, die young, never take it slow.",
"You think you got to me, I can just read your mind. You think I'm so caught up in where I am right now, but believe I remember it all.",
"I still got it for ya, and even though we let it go, it's better that you know.",
"I'm good, I don't need no help. Cause I'm better off by myself then to start over with somebody else.",
"Started not to give a f*** and stopped fearin' the consequence.",
"You mad cause nobody ever did it like me.",
"The good ones go, if you wait too long.",
"Never forgetting from where I came, and no matter where I'm headed, I promise to stay the same.",
"I want things to go my way, but as of late a lot of s*** been goin' sideways.",
"Talking to myself but I never listen. Cause man it's been awhile, and I swear that this one's different.",
"It's funny when you coming in first but you hope that you're last, you just hope that it last.",
"Only thing you got is some years on me. Man, f*** you and your time difference.",
"I'd really love to be the one you took a step with, so trust me when I tell you it'd be worth the effort.",
"To have known me would mean that there's a new me. And if you think I changed in the slightest, could've fooled me.",
"Say you'll be mine, say we'll be fine, say we'll be together.",
"You probably think it's too far to even have to care, well take a look at where you live, what if it happened there?",
"I learned working with the negatives could make for better pictures.",
"I'm in the world where things are taken, never given. How long they choose to love you will never be your decision.",
"Guess you lose some and win some. Long as the outcome is income, you know I want it all and then some.",
"I'ma sip until I feel it, I'ma smoke it till it's done. I don't really give a f***, and my excuse is that I'm young.",
"I'm tryna do better than good enough.",
"When all is said and done, more is always said than done.",
"I hate getting messages from you, especially when you say you should've stayed. F***s up my whole day, thought all these feelings went away.",
"If it's comin' from a n***a I don't know, then I don't trust it. If you comin' for my head, then motherf***er get to bustin'.",
"They say if you get her, you could understand her better. And she known to be a cheater, but that's only if you let her.",
"Damn, what happened to us? Life can always change, you have to adjust.",
"Mind in one place, heart in another.",
"I can tell certain people don't like me no more, new s*** don't excite me no more, guess they don't really make ‘em like me no more.",
"Just as I predicted, here we go again. They always say the hottest love has the coldest end.",
"Don't ask permission, just ask forgiveness.",
"You know life is what we make it, and a chance is like a picture, it'd be nice if you just take it.",
"These haters pretendin' that it's all good. Say that s*** in person man, I wish y'all would.",
"Is this even still a discussion? Don't you ever wake up disgusted? Every million I gain an enemy or a cousin.",
"Call me crazy, s*** at least you callin'. Feels better when you let it out, don't it?",
"I was curious and I'll never forget it baby, what an experience. You could've been the one, but it wasn't that serious.",
"I can tell that you've been crying all night, drinking all summer. Praying for your happiness, hope that you recover.",
"This life is something I would die for. Octobers Own, but it's lookin' like July 4th.",
"Damn, tell me what I'm gon' do, since everything I'm trying to forget is all true.",
"We just kill the summer every single time it come around. Never chase hoes, but we do be laughin' at the n***as tryna run 'em down.",
"I'm more than just a number, I doubt you'll find another. So every single summer, I'll be the one that you remember.",
"I know you waiting, waiting on a good thing. When the time's right, you should let me get to know you.",
"I think I have a chance at love but knowing me I'll miss it, cause me dedicating my time just isn't realistic.",
"Shawty wanna tell me secrets bout a rap n***a. I tell that bitch it's more attractive when you hold it down.",
"I never really had no one like you man, this all new s***. Made the world I know bigger, changed the way that I viewed it.",
"I'm about bein' single, seein' double, makin' triple.",
"Hennessy and enemies is one hell of a mixture.",
"Worryin' bout your followers, you need to get your dollars up.",
"Me falling and landing in love, I don't think should happen. Because everything's easier to get into than get out of.",
"You don't do it for the men, men never notice. You just do it for yourself, you the f***in' coldest.",
"My dreams are who I'm racin' with, but you can see I'm pacin' it, so that I'm always chasin' it.",
"If amazing was a young bitch, I'd be going to jail cause I be f***in' amazing.",
"You lookin' bad, girl for goodness sakes. You with all those curves, and me without no brakes.",
"Tell me how the f*** we supposed to stay friends, when you got a bunch of feelings that you don't show.",
"Daydream at nighttime, I think too much. Then I hit the nightclub till it's daytime and I drink too much.",
"Tell on me, I don't mind. Cause if they ask me about it, I won't lie. I'll admit that I'm yours, I don't really care who knows it.",
"They told me s*** would change, but I don't really see no change in us.",
"Women need attention therefore women will complain. Develop hatred for men and say that you're the one to blame.",
"It's crazy all the emotions forgot in a year.",
"I'm urgin all daughters to kiss they mothers,with those lips that all that lipstick covers. You're never too grown up to miss and hug her.",
"Make the most out of tonight and worry 'bout it all tomorrow."
]
module.exports = (robot) ->
robot.respond /drizzy quote me\b/i, (msg) ->
msg.send msg.random quotes
robot.respond /drake quote me\b/i, (msg) ->
msg.send msg.random quotes
robot.respond /drizzy version\b/i, (msg) ->
msg.send require('../package').version
| true | # Description
# A Drake Quote generator for Hubots.
#
# Configuration:
# None
#
# Commands:
# hubot drizzy quote me - displays a random quote from drake
#
quotes = [
"When you call too much, then I call less. You would always complain about my small texts, but what you call conversation, I call stress.",
"You don't love me, you just say that s*** to get to me.",
"We all have our nights though, don't be so ashamed. I've had mine, you've had yours, we both know.",
"Never thoughts of suicide, I'm too alive. But I still treat it like it's do or die.",
"I gotta feel alive, even if it kills me. Promise to always give you me, the real me.",
"F*** with me, I'll buy the shots. Live a little, cause n***as die a lot.",
"I hate getting messages from you, it's like you know the perfect s*** to say. F***s up my whole day, thought all these feelings went away.",
"Watch me, going out of the way, when I should've went home. Only time of the day I get to spend on my own.",
"Makin' major changes to the life I'm livin'. I had no choice, I had to prove I made the right decision.",
"Can't deny that I want you, but I'll lie if I have to.",
"No new n***as, n***a we don't feel that. F*** a fake friend, where ya real friends at?",
"I say f*** ya, less I'm with ya. If I take you out of the picture, I know real n***as won't miss ya, no lie.",
"I finally get a moment to myself, I would realize you were everything I'm missing, and you'll tell me you're in love with someone else.",
"I just need some closure. Ain't no turnin' back for me, I'm in it till it's over.",
"I was stayin' home when they was havin' fun, so please don't be surprised when they announce that I won.",
"Feelin' good, livin' better. I think maybe I was numb to it last year, but you know I feel it now more than ever.",
"You let me go, little did you know, I could've made it all on my own.",
"I got my eyes on you, you're everything that I see. I want your hot love and emotion endlessly.",
"Accept yourself. You don't have to prove s*** to no one except yourself.",
"Don't get impatient when it takes too long, and drink it all even when it tastes too strong.",
"All so convinced that you're following your heart, cause your mind don't control what it does sometimes.",
"They say real girls ain't never perfect, perfect girls ain't real.",
"Ain't no wishing over on this side. Y'all don't f*** with us then we don't f*** with y'all, it's no different over on this side.",
"We walk the same path, but got on different shoes. Live in the same building, but we got different views.",
"I'd rather be with you, but you are not around. So I'ma call somebody up and see if they'd be down, cause I hate sleeping alone.",
"You were the one. You've been the one from the moment I seen ya.",
"My family gets all of my loyalty, all of my patience. My life for your life, man I wouldn't trade it.",
"Don't ever forget the moment you began to doubt, transitioning from fitting in to standing out.",
"I guess that's how it goes. They hear about your cons, but focus on your pros, and love you for who you are from the bottom of their soul.",
"Make sure you don't fall in love, cause I don't have the time for that.",
"Too many times I've been wrong, I guess being right takes too long.",
"Isn't it amazing how you talk all this s*** and we still lack communication.",
"May your neighbors respect you, trouble neglect you. Angels protect you, and heaven accept you.",
"How I put it on you, crazy. That's when you start to laugh, saying I just want what I can't have.",
"I'm still fly, I'm sky high, and I dare anybody to try and cut my wings.",
"You love me and I love you, and your heart hurts, mine does too. And it's just words and they cut deep but it's our world, it's just us two.",
"My life is moving fast, the time will often past. And something will remind me, I can't put this behind me.",
"I'm just doin' me and you could never understand it.",
"We sure make it feel like we're together, cause we're scared to see each other with somebody else.",
"I was a cold dude, I'm gettin' back to my ways.",
"I hate the s*** you do, but I still text you back, say I miss you too." ,
"The weekend's here, started it right, even if you only get part of it right.",
"If you end up needin some extra help then I can help.You know, back on ya feet and s***.Tryna get my karma up, f*** the guilty &; greedy s***",
"Holla if you got me and f*** you if you had me.",
"Young, dumb, lookin' in the mirror tryin' to figure out who I'm gonna be this year.",
"All them boys in my will, all them boys is my Wills. Anything happen to pop and I got you like PI:NAME:<NAME>END_PI.",
"You got your resolutions, we just got reservations.",
"Feelin' good, livin' better. But it's evident that nothing can prepare us for certain moments that you just gotta cherish.",
"Got everything I'm asking for but you.",
"I'm slowly running out of all the time that I invested, making all the same mistakes and I'm just trying to correct it.",
"It's such a small place, not much to do but talk and listen. The men are jealous and the women all in competition.",
"Next time I stand tall, I wanna be standing for you. And next time I spend I want it all to be for you.",
"This here is something personal, highly doubt this feeling is reversible. Knowledge is pain and that's why it hurts to know.",
"I'm lucky that you picked up, lucky that you stayed on. I need someone to put this weight on.",
"I've been avoiding commitment, that's why I'm in this position. I'm scared to let somebody in on this.",
"I'm thinkin' about my goals, and how we used to say a goal is just a dream with a deadline.",
"I've asked about you and they told me things. But my mind didn't change, I still feel the same.",
"Sweat pants, hair tied, chillin' with no makeup on. That's when you're the prettiest, I hope that you don't take it wrong.",
"I'm honest, I make mistakes, I'd be the second to admit it." ,
"I'm really tryna make it more than what it is, cause everybody dies but not everybody lives.",
"The good ones go, if you wait too long. So you should go, before you stay too long.",
"That's why I pick and choose. I don't get s*** confused. I got a small circle, I'm not with different crews.",
"Make it sooner than later, we won't be here forever. And I realize I waited too long, but please don't move on. You dont need no one else.",
"People around you should really have nothing to say. Me, I'm just proud of the fact that you've done it your way.",
"I try to tell them don't judge me because you heard stuff.",
"I really hate to say I told you so. So I bite my tongue but you know, you know.",
"You know it's real when you are who you think you are." ,
"Now you're trying to find somebody to replace what I gave to you. It's a shame you didn't keep it.",
"I'm trying to let go of the past. Should we make this one a double? You ain't even gotta ask.",
"I be hearin' the s*** that you say through the grapevine, but jealousy is just love and hate at the same time.",
"I forgot about you last night, for the first time in a long time I did.",
"All in all I learned a lesson from it though, you never see it coming you just get to see it go.",
"I like a woman with a future and a past. A little attitude problem all good, it'll make the s*** last.",
"Somewhere between I want it and I got it." ,
"Just lie to my ears. Tell me it feel the same, that's all I've been dying to hear.",
"You say that you're over me, you always end up under me. You know how it goes, don't be crazy, don't play dumb with me.",
"Heard once that in dire times when you need a sign, that's when they appear." ,
"Live for today, plan for tomorrow, party tonight.",
"Guess you lose some and win some. As long as the outcome is income, you know I want it all and then some.",
"She's the reason it happened, but she's overreacting, and it's all because she don't want things to change.",
"I should let you know ahead, I'm coming back on my worst behavior." ,
"I love myself cause I swear their life is just not as fun.",
"I don't really give a f***, and my excuse is that I'm young.",
"I'm still in love, cause when it's that real, it's when it doesn't fade.",
"My only wish is I die real. Cause that truth hurts, and those lies heal.",
"Looking back on it, at least my pride is in tact. Cause we said no strings attached, and I still got tied up in that.",
"Take all my memories with you and just erase it for me, you can never reminisce when you forget it all.",
"Overly focused, it's far from the time to rest now." ,
"Jealousy in the air tonight, I could tell. I will never understand that, but oh well." ,
"She used to say, 'you can be whoever you want, even yourself.' I show up knowin' exactly who I was and never leave as myself." ,
"We had the type of nights where morning comes too soon, and nothing was the same." ,
"I just been plottin' on the low. Schemin' on the low. The furthest thing from perfect, like everyone I know." ,
"Actin like oh well, this is life, I guess. Nah, f*** that s***. Listen man, you can still do what you wanna do, you gotta trust that s***.",
"Same city, same friends if you're looking for me." ,
"Tables turn, bridges burn, you live and learn." ,
"All my young boys ‘round me sayin', 'get money and f*** these hoes.' Where we learn these values? I do not know what to tell you.",
"Talkin' that s*** with your back to me, just know it always get back to me." ,
"Wish you would learn to love people and use things, and not the other way around." ,
"I felt like we had it all planned out, I guess I f***ed up the vision. Learning the true consequences of my selfish decisions." ,
"the album is comin' tomorrow just wait on it.",
"You ain't the only one that's tryna be the only one." ,
"It's been one of those days you try and forget about, take a shot and let it out." ,
"This the s*** that I live for, with the people I die for." ,
"See the difference with me, I don't forget the past. And you gave me nothing, so I'ma give you nothing right back.",
"Should I listen to everybody or myself? Cause myself just told myself, 'you the motherf***in' man, you don't need no help.'",
"The girl that I want to save is like a danger to my health. Try being with somebody that want to be somebody else.",
"Bullets do not choose a victim. It is the shooter that picks ‘em.",
"You know it's real when your latest nights are your greatest nights. The sun is up when you get home, that's just a way of life.",
"Sinatra lifestyle, I'm just bein' frank with ya.",
"Lookin' for the right way to do the wrong things.",
"I spend money because spending time is hopeless, and know I pop bottles cause I bottle my emotions. At least I put it all in the open.",
"I'ma worry about me, give a f*** about you.",
"Everything's adding up, you've been through hell and back, that's why you're bad as f***, and you know you are.",
"Careful what you wish for, you just might get it all.",
"I could tell a lie if you askin' me my whereabouts, but I might talk that real if you ask me what I care about.",
"A lot of people sayin' f*** me, problem is they be tellin' everybody but me.",
"She said kiss me like you miss me, f*** me like you hate me. And when you're f***in' someone else, just f*** her like she ain't me.",
"You got the resolutions, we just got reservations.",
"Live fast, die young, never take it slow.",
"You think you got to me, I can just read your mind. You think I'm so caught up in where I am right now, but believe I remember it all.",
"I still got it for ya, and even though we let it go, it's better that you know.",
"I'm good, I don't need no help. Cause I'm better off by myself then to start over with somebody else.",
"Started not to give a f*** and stopped fearin' the consequence.",
"You mad cause nobody ever did it like me.",
"The good ones go, if you wait too long.",
"Never forgetting from where I came, and no matter where I'm headed, I promise to stay the same.",
"I want things to go my way, but as of late a lot of s*** been goin' sideways.",
"Talking to myself but I never listen. Cause man it's been awhile, and I swear that this one's different.",
"It's funny when you coming in first but you hope that you're last, you just hope that it last.",
"Only thing you got is some years on me. Man, f*** you and your time difference.",
"I'd really love to be the one you took a step with, so trust me when I tell you it'd be worth the effort.",
"To have known me would mean that there's a new me. And if you think I changed in the slightest, could've fooled me.",
"Say you'll be mine, say we'll be fine, say we'll be together.",
"You probably think it's too far to even have to care, well take a look at where you live, what if it happened there?",
"I learned working with the negatives could make for better pictures.",
"I'm in the world where things are taken, never given. How long they choose to love you will never be your decision.",
"Guess you lose some and win some. Long as the outcome is income, you know I want it all and then some.",
"I'ma sip until I feel it, I'ma smoke it till it's done. I don't really give a f***, and my excuse is that I'm young.",
"I'm tryna do better than good enough.",
"When all is said and done, more is always said than done.",
"I hate getting messages from you, especially when you say you should've stayed. F***s up my whole day, thought all these feelings went away.",
"If it's comin' from a n***a I don't know, then I don't trust it. If you comin' for my head, then motherf***er get to bustin'.",
"They say if you get her, you could understand her better. And she known to be a cheater, but that's only if you let her.",
"Damn, what happened to us? Life can always change, you have to adjust.",
"Mind in one place, heart in another.",
"I can tell certain people don't like me no more, new s*** don't excite me no more, guess they don't really make ‘em like me no more.",
"Just as I predicted, here we go again. They always say the hottest love has the coldest end.",
"Don't ask permission, just ask forgiveness.",
"You know life is what we make it, and a chance is like a picture, it'd be nice if you just take it.",
"These haters pretendin' that it's all good. Say that s*** in person man, I wish y'all would.",
"Is this even still a discussion? Don't you ever wake up disgusted? Every million I gain an enemy or a cousin.",
"Call me crazy, s*** at least you callin'. Feels better when you let it out, don't it?",
"I was curious and I'll never forget it baby, what an experience. You could've been the one, but it wasn't that serious.",
"I can tell that you've been crying all night, drinking all summer. Praying for your happiness, hope that you recover.",
"This life is something I would die for. Octobers Own, but it's lookin' like July 4th.",
"Damn, tell me what I'm gon' do, since everything I'm trying to forget is all true.",
"We just kill the summer every single time it come around. Never chase hoes, but we do be laughin' at the n***as tryna run 'em down.",
"I'm more than just a number, I doubt you'll find another. So every single summer, I'll be the one that you remember.",
"I know you waiting, waiting on a good thing. When the time's right, you should let me get to know you.",
"I think I have a chance at love but knowing me I'll miss it, cause me dedicating my time just isn't realistic.",
"Shawty wanna tell me secrets bout a rap n***a. I tell that bitch it's more attractive when you hold it down.",
"I never really had no one like you man, this all new s***. Made the world I know bigger, changed the way that I viewed it.",
"I'm about bein' single, seein' double, makin' triple.",
"Hennessy and enemies is one hell of a mixture.",
"Worryin' bout your followers, you need to get your dollars up.",
"Me falling and landing in love, I don't think should happen. Because everything's easier to get into than get out of.",
"You don't do it for the men, men never notice. You just do it for yourself, you the f***in' coldest.",
"My dreams are who I'm racin' with, but you can see I'm pacin' it, so that I'm always chasin' it.",
"If amazing was a young bitch, I'd be going to jail cause I be f***in' amazing.",
"You lookin' bad, girl for goodness sakes. You with all those curves, and me without no brakes.",
"Tell me how the f*** we supposed to stay friends, when you got a bunch of feelings that you don't show.",
"Daydream at nighttime, I think too much. Then I hit the nightclub till it's daytime and I drink too much.",
"Tell on me, I don't mind. Cause if they ask me about it, I won't lie. I'll admit that I'm yours, I don't really care who knows it.",
"They told me s*** would change, but I don't really see no change in us.",
"Women need attention therefore women will complain. Develop hatred for men and say that you're the one to blame.",
"It's crazy all the emotions forgot in a year.",
"I'm urgin all daughters to kiss they mothers,with those lips that all that lipstick covers. You're never too grown up to miss and hug her.",
"Make the most out of tonight and worry 'bout it all tomorrow."
]
module.exports = (robot) ->
robot.respond /drizzy quote me\b/i, (msg) ->
msg.send msg.random quotes
robot.respond /drake quote me\b/i, (msg) ->
msg.send msg.random quotes
robot.respond /drizzy version\b/i, (msg) ->
msg.send require('../package').version
|
[
{
"context": "SOFTWARE.\n\nangular-google-maps\nhttps://github.com/nlaplante/angular-google-maps\n\n@authors\nNicolas Laplante - ",
"end": 1154,
"score": 0.9830113053321838,
"start": 1145,
"tag": "USERNAME",
"value": "nlaplante"
},
{
"context": "ithub.com/nlaplante/angular-google-map... | www/lib/js/angular-google-maps/src/coffee/directives/polygon.coffee | neilff/liqbo-cordova | 1 | ###
!
The MIT License
Copyright (c) 2010-2013 Google, Inc. http://angularjs.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
angular-google-maps
https://github.com/nlaplante/angular-google-maps
@authors
Nicolas Laplante - https://plus.google.com/108189012221374960701
Nicholas McCready - https://twitter.com/nmccready
###
angular.module("google-maps").directive "polygon", ["$log", "$timeout", ($log, $timeout) ->
validatePathPoints = (path) ->
i = 0
while i < path.length
return false if angular.isUndefined(path[i].latitude) or angular.isUndefined(path[i].longitude)
i++
true
convertPathPoints = (path) ->
result = new google.maps.MVCArray()
i = 0
while i < path.length
result.push new google.maps.LatLng(path[i].latitude, path[i].longitude)
i++
result
extendMapBounds = (map, points) ->
bounds = new google.maps.LatLngBounds()
i = 0
while i < points.length
bounds.extend points.getAt(i)
i++
map.fitBounds bounds
#
# * Utility functions
#
###
Check if a value is true
###
isTrue = (val) ->
angular.isDefined(val) and val isnt null and val is true or val is "1" or val is "y" or val is "true"
"use strict"
DEFAULTS = {}
restrict: "ECA"
require: "^googleMap"
replace: true
scope:
path: "=path"
stroke: "=stroke"
clickable: "="
draggable: "="
editable: "="
geodesic: "="
icons: "=icons"
visible: "="
link: (scope, element, attrs, mapCtrl) ->
# Validate required properties
if angular.isUndefined(scope.path) or scope.path is null or scope.path.length < 2 or not validatePathPoints(scope.path)
$log.error "polyline: no valid path attribute found"
return
# Wrap polyline initialization inside a $timeout() call to make sure the map is created already
$timeout ->
map = mapCtrl.getMap()
pathPoints = convertPathPoints(scope.path)
opts = angular.extend({}, DEFAULTS,
map: map
path: pathPoints
strokeColor: scope.stroke and scope.stroke.color
strokeOpacity: scope.stroke and scope.stroke.opacity
strokeWeight: scope.stroke and scope.stroke.weight
)
angular.forEach
clickable: true
draggable: false
editable: false
geodesic: false
visible: true
, (defaultValue, key) ->
if angular.isUndefined(scope[key]) or scope[key] is null
opts[key] = defaultValue
else
opts[key] = scope[key]
polyline = new google.maps.Polyline(opts)
extendMapBounds map, pathPoints if isTrue(attrs.fit)
if angular.isDefined(scope.editable)
scope.$watch "editable", (newValue, oldValue) ->
polyline.setEditable newValue
if angular.isDefined(scope.draggable)
scope.$watch "draggable", (newValue, oldValue) ->
polyline.setDraggable newValue
if angular.isDefined(scope.visible)
scope.$watch "visible", (newValue, oldValue) ->
polyline.setVisible newValue
pathSetAtListener = undefined
pathInsertAtListener = undefined
pathRemoveAtListener = undefined
polyPath = polyline.getPath()
pathSetAtListener = google.maps.event.addListener(polyPath, "set_at", (index) ->
value = polyPath.getAt(index)
return unless value
return if not value.lng or not value.lat
scope.path[index].latitude = value.lat()
scope.path[index].longitude = value.lng()
scope.$apply()
)
pathInsertAtListener = google.maps.event.addListener(polyPath, "insert_at", (index) ->
value = polyPath.getAt(index)
return unless value
return if not value.lng or not value.lat
scope.path.splice index, 0,
latitude: value.lat()
longitude: value.lng()
scope.$apply()
)
pathRemoveAtListener = google.maps.event.addListener(polyPath, "remove_at", (index) ->
scope.path.splice index, 1
scope.$apply()
)
scope.$watch "path", ((newArray) ->
oldArray = polyline.getPath()
if newArray isnt oldArray
if newArray
polyline.setMap map
i = 0
oldLength = oldArray.getLength()
newLength = newArray.length
l = Math.min(oldLength, newLength)
while i < l
oldValue = oldArray.getAt(i)
newValue = newArray[i]
oldArray.setAt i, new google.maps.LatLng(newValue.latitude, newValue.longitude) if (oldValue.lat() isnt newValue.latitude) or (oldValue.lng() isnt newValue.longitude)
i++
while i < newLength
newValue = newArray[i]
oldArray.push new google.maps.LatLng(newValue.latitude, newValue.longitude)
i++
while i < oldLength
oldArray.pop()
i++
extendMapBounds map, oldArray if isTrue(attrs.fit)
else
# Remove polyline
polyline.setMap null
), true
# Remove polyline on scope $destroy
scope.$on "$destroy", ->
polyline.setMap null
pathSetAtListener()
pathSetAtListener = null
pathInsertAtListener()
pathInsertAtListener = null
pathRemoveAtListener()
pathRemoveAtListener = null
] | 128782 | ###
!
The MIT License
Copyright (c) 2010-2013 Google, Inc. http://angularjs.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
angular-google-maps
https://github.com/nlaplante/angular-google-maps
@authors
<NAME> - https://plus.google.com/108189012221374960701
<NAME> - https://twitter.com/nmccready
###
angular.module("google-maps").directive "polygon", ["$log", "$timeout", ($log, $timeout) ->
validatePathPoints = (path) ->
i = 0
while i < path.length
return false if angular.isUndefined(path[i].latitude) or angular.isUndefined(path[i].longitude)
i++
true
convertPathPoints = (path) ->
result = new google.maps.MVCArray()
i = 0
while i < path.length
result.push new google.maps.LatLng(path[i].latitude, path[i].longitude)
i++
result
extendMapBounds = (map, points) ->
bounds = new google.maps.LatLngBounds()
i = 0
while i < points.length
bounds.extend points.getAt(i)
i++
map.fitBounds bounds
#
# * Utility functions
#
###
Check if a value is true
###
isTrue = (val) ->
angular.isDefined(val) and val isnt null and val is true or val is "1" or val is "y" or val is "true"
"use strict"
DEFAULTS = {}
restrict: "ECA"
require: "^googleMap"
replace: true
scope:
path: "=path"
stroke: "=stroke"
clickable: "="
draggable: "="
editable: "="
geodesic: "="
icons: "=icons"
visible: "="
link: (scope, element, attrs, mapCtrl) ->
# Validate required properties
if angular.isUndefined(scope.path) or scope.path is null or scope.path.length < 2 or not validatePathPoints(scope.path)
$log.error "polyline: no valid path attribute found"
return
# Wrap polyline initialization inside a $timeout() call to make sure the map is created already
$timeout ->
map = mapCtrl.getMap()
pathPoints = convertPathPoints(scope.path)
opts = angular.extend({}, DEFAULTS,
map: map
path: pathPoints
strokeColor: scope.stroke and scope.stroke.color
strokeOpacity: scope.stroke and scope.stroke.opacity
strokeWeight: scope.stroke and scope.stroke.weight
)
angular.forEach
clickable: true
draggable: false
editable: false
geodesic: false
visible: true
, (defaultValue, key) ->
if angular.isUndefined(scope[key]) or scope[key] is null
opts[key] = defaultValue
else
opts[key] = scope[key]
polyline = new google.maps.Polyline(opts)
extendMapBounds map, pathPoints if isTrue(attrs.fit)
if angular.isDefined(scope.editable)
scope.$watch "editable", (newValue, oldValue) ->
polyline.setEditable newValue
if angular.isDefined(scope.draggable)
scope.$watch "draggable", (newValue, oldValue) ->
polyline.setDraggable newValue
if angular.isDefined(scope.visible)
scope.$watch "visible", (newValue, oldValue) ->
polyline.setVisible newValue
pathSetAtListener = undefined
pathInsertAtListener = undefined
pathRemoveAtListener = undefined
polyPath = polyline.getPath()
pathSetAtListener = google.maps.event.addListener(polyPath, "set_at", (index) ->
value = polyPath.getAt(index)
return unless value
return if not value.lng or not value.lat
scope.path[index].latitude = value.lat()
scope.path[index].longitude = value.lng()
scope.$apply()
)
pathInsertAtListener = google.maps.event.addListener(polyPath, "insert_at", (index) ->
value = polyPath.getAt(index)
return unless value
return if not value.lng or not value.lat
scope.path.splice index, 0,
latitude: value.lat()
longitude: value.lng()
scope.$apply()
)
pathRemoveAtListener = google.maps.event.addListener(polyPath, "remove_at", (index) ->
scope.path.splice index, 1
scope.$apply()
)
scope.$watch "path", ((newArray) ->
oldArray = polyline.getPath()
if newArray isnt oldArray
if newArray
polyline.setMap map
i = 0
oldLength = oldArray.getLength()
newLength = newArray.length
l = Math.min(oldLength, newLength)
while i < l
oldValue = oldArray.getAt(i)
newValue = newArray[i]
oldArray.setAt i, new google.maps.LatLng(newValue.latitude, newValue.longitude) if (oldValue.lat() isnt newValue.latitude) or (oldValue.lng() isnt newValue.longitude)
i++
while i < newLength
newValue = newArray[i]
oldArray.push new google.maps.LatLng(newValue.latitude, newValue.longitude)
i++
while i < oldLength
oldArray.pop()
i++
extendMapBounds map, oldArray if isTrue(attrs.fit)
else
# Remove polyline
polyline.setMap null
), true
# Remove polyline on scope $destroy
scope.$on "$destroy", ->
polyline.setMap null
pathSetAtListener()
pathSetAtListener = null
pathInsertAtListener()
pathInsertAtListener = null
pathRemoveAtListener()
pathRemoveAtListener = null
] | true | ###
!
The MIT License
Copyright (c) 2010-2013 Google, Inc. http://angularjs.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
angular-google-maps
https://github.com/nlaplante/angular-google-maps
@authors
PI:NAME:<NAME>END_PI - https://plus.google.com/108189012221374960701
PI:NAME:<NAME>END_PI - https://twitter.com/nmccready
###
angular.module("google-maps").directive "polygon", ["$log", "$timeout", ($log, $timeout) ->
validatePathPoints = (path) ->
i = 0
while i < path.length
return false if angular.isUndefined(path[i].latitude) or angular.isUndefined(path[i].longitude)
i++
true
convertPathPoints = (path) ->
result = new google.maps.MVCArray()
i = 0
while i < path.length
result.push new google.maps.LatLng(path[i].latitude, path[i].longitude)
i++
result
extendMapBounds = (map, points) ->
bounds = new google.maps.LatLngBounds()
i = 0
while i < points.length
bounds.extend points.getAt(i)
i++
map.fitBounds bounds
#
# * Utility functions
#
###
Check if a value is true
###
isTrue = (val) ->
angular.isDefined(val) and val isnt null and val is true or val is "1" or val is "y" or val is "true"
"use strict"
DEFAULTS = {}
restrict: "ECA"
require: "^googleMap"
replace: true
scope:
path: "=path"
stroke: "=stroke"
clickable: "="
draggable: "="
editable: "="
geodesic: "="
icons: "=icons"
visible: "="
link: (scope, element, attrs, mapCtrl) ->
# Validate required properties
if angular.isUndefined(scope.path) or scope.path is null or scope.path.length < 2 or not validatePathPoints(scope.path)
$log.error "polyline: no valid path attribute found"
return
# Wrap polyline initialization inside a $timeout() call to make sure the map is created already
$timeout ->
map = mapCtrl.getMap()
pathPoints = convertPathPoints(scope.path)
opts = angular.extend({}, DEFAULTS,
map: map
path: pathPoints
strokeColor: scope.stroke and scope.stroke.color
strokeOpacity: scope.stroke and scope.stroke.opacity
strokeWeight: scope.stroke and scope.stroke.weight
)
angular.forEach
clickable: true
draggable: false
editable: false
geodesic: false
visible: true
, (defaultValue, key) ->
if angular.isUndefined(scope[key]) or scope[key] is null
opts[key] = defaultValue
else
opts[key] = scope[key]
polyline = new google.maps.Polyline(opts)
extendMapBounds map, pathPoints if isTrue(attrs.fit)
if angular.isDefined(scope.editable)
scope.$watch "editable", (newValue, oldValue) ->
polyline.setEditable newValue
if angular.isDefined(scope.draggable)
scope.$watch "draggable", (newValue, oldValue) ->
polyline.setDraggable newValue
if angular.isDefined(scope.visible)
scope.$watch "visible", (newValue, oldValue) ->
polyline.setVisible newValue
pathSetAtListener = undefined
pathInsertAtListener = undefined
pathRemoveAtListener = undefined
polyPath = polyline.getPath()
pathSetAtListener = google.maps.event.addListener(polyPath, "set_at", (index) ->
value = polyPath.getAt(index)
return unless value
return if not value.lng or not value.lat
scope.path[index].latitude = value.lat()
scope.path[index].longitude = value.lng()
scope.$apply()
)
pathInsertAtListener = google.maps.event.addListener(polyPath, "insert_at", (index) ->
value = polyPath.getAt(index)
return unless value
return if not value.lng or not value.lat
scope.path.splice index, 0,
latitude: value.lat()
longitude: value.lng()
scope.$apply()
)
pathRemoveAtListener = google.maps.event.addListener(polyPath, "remove_at", (index) ->
scope.path.splice index, 1
scope.$apply()
)
scope.$watch "path", ((newArray) ->
oldArray = polyline.getPath()
if newArray isnt oldArray
if newArray
polyline.setMap map
i = 0
oldLength = oldArray.getLength()
newLength = newArray.length
l = Math.min(oldLength, newLength)
while i < l
oldValue = oldArray.getAt(i)
newValue = newArray[i]
oldArray.setAt i, new google.maps.LatLng(newValue.latitude, newValue.longitude) if (oldValue.lat() isnt newValue.latitude) or (oldValue.lng() isnt newValue.longitude)
i++
while i < newLength
newValue = newArray[i]
oldArray.push new google.maps.LatLng(newValue.latitude, newValue.longitude)
i++
while i < oldLength
oldArray.pop()
i++
extendMapBounds map, oldArray if isTrue(attrs.fit)
else
# Remove polyline
polyline.setMap null
), true
# Remove polyline on scope $destroy
scope.$on "$destroy", ->
polyline.setMap null
pathSetAtListener()
pathSetAtListener = null
pathInsertAtListener()
pathInsertAtListener = null
pathRemoveAtListener()
pathRemoveAtListener = null
] |
[
{
"context": "ail')\n sender:\n address: 'team@codecombat.com'\n name: 'CodeCombat Team'\n ",
"end": 2200,
"score": 0.9999184012413025,
"start": 2181,
"tag": "EMAIL",
"value": "team@codecombat.com"
},
{
"context": "dress: 'team@codecombat... | scripts/node/2016-10-07-fix-anonymous-users.coffee | johanvl/codecombat | 2 | # Usage:
# > coffee -c scripts/node/2016-10-07-fix-anonymous-users.coffee; node scripts/node/2016-10-07-fix-anonymous-users.js run
require('coffee-script');
require('coffee-script/register');
_ = require 'lodash'
sendwithus = require '../../server/sendwithus'
log = require 'winston'
str = require 'underscore.string'
co = require 'co'
changedUsernameTemplate = _.template("
<p>
Hi, CodeCombat user!
</p>
<p>
Just letting you know we've made a change to your account settings which may change how you log in. Here are your old settings:
</p>
<ul>
<li>Old username: <%= oldUsername %></li>
</ul>
</p>
Your old username conflicts with another user's. This should have been prevented on signup, our apologies!
Here are your new settings:
</p>
<ul>
<li>New username: <%= newUsername %></li>
</ul>
<p>
Please <a href='https://codecombat.com/account/settings'>visit the site</a> if you would like to update your settings.
And let us know if you have any questions!
</p>
<p>
- CodeCombat Team
</p>
")
exports.run = ->
co ->
mongoose = require 'mongoose'
User = require '../../server/models/User'
users = yield User.find({
$and: [
{ emailLower: {$exists: true}},
{ anonymous: true },
],
slug: {$exists: false}
}).limit(1000).sort({_id:-1})
console.log 'found', users.length, 'users'
successes = 0
for user in users
try
console.log 'save', user.id, user.get('name'), user.get('email'), user.get('anonymous'), user.get('slug'), user.get('emailLower')
yield user.save()
successes += 1
catch e
if e.response.message is 'is already in use' and e.response.property is 'name'
oldUsername = user.get('name')
newUsername = yield User.unconflictNameAsync(user.get('name'))
content = changedUsernameTemplate({
oldUsername
newUsername
})
console.log "\tChange name '#{oldUsername}' => '#{newUsername}'"
context =
template: sendwithus.templates.plain_text_email
recipient:
address: user.get('email')
sender:
address: 'team@codecombat.com'
name: 'CodeCombat Team'
template_data:
subject: 'Your Username Has Changed'
contentHTML: content
user.set('name': newUsername)
yield user.save()
yield sendwithus.api.sendAsync(context)
# return
console.log("Fixed #{successes} / #{users.length} users")
return 'Done'
if _.last(process.argv) is 'run'
database = require '../../server/commons/database'
mongoose = require 'mongoose'
### SET UP ###
do (setupLodash = this) ->
GLOBAL._ = require 'lodash'
_.str = require 'underscore.string'
_.mixin _.str.exports()
GLOBAL.tv4 = require('tv4').tv4
database.connect()
co ->
yield exports.run()
process.exit()
| 163186 | # Usage:
# > coffee -c scripts/node/2016-10-07-fix-anonymous-users.coffee; node scripts/node/2016-10-07-fix-anonymous-users.js run
require('coffee-script');
require('coffee-script/register');
_ = require 'lodash'
sendwithus = require '../../server/sendwithus'
log = require 'winston'
str = require 'underscore.string'
co = require 'co'
changedUsernameTemplate = _.template("
<p>
Hi, CodeCombat user!
</p>
<p>
Just letting you know we've made a change to your account settings which may change how you log in. Here are your old settings:
</p>
<ul>
<li>Old username: <%= oldUsername %></li>
</ul>
</p>
Your old username conflicts with another user's. This should have been prevented on signup, our apologies!
Here are your new settings:
</p>
<ul>
<li>New username: <%= newUsername %></li>
</ul>
<p>
Please <a href='https://codecombat.com/account/settings'>visit the site</a> if you would like to update your settings.
And let us know if you have any questions!
</p>
<p>
- CodeCombat Team
</p>
")
exports.run = ->
co ->
mongoose = require 'mongoose'
User = require '../../server/models/User'
users = yield User.find({
$and: [
{ emailLower: {$exists: true}},
{ anonymous: true },
],
slug: {$exists: false}
}).limit(1000).sort({_id:-1})
console.log 'found', users.length, 'users'
successes = 0
for user in users
try
console.log 'save', user.id, user.get('name'), user.get('email'), user.get('anonymous'), user.get('slug'), user.get('emailLower')
yield user.save()
successes += 1
catch e
if e.response.message is 'is already in use' and e.response.property is 'name'
oldUsername = user.get('name')
newUsername = yield User.unconflictNameAsync(user.get('name'))
content = changedUsernameTemplate({
oldUsername
newUsername
})
console.log "\tChange name '#{oldUsername}' => '#{newUsername}'"
context =
template: sendwithus.templates.plain_text_email
recipient:
address: user.get('email')
sender:
address: '<EMAIL>'
name: '<NAME>Combat Team'
template_data:
subject: 'Your Username Has Changed'
contentHTML: content
user.set('name': newUsername)
yield user.save()
yield sendwithus.api.sendAsync(context)
# return
console.log("Fixed #{successes} / #{users.length} users")
return 'Done'
if _.last(process.argv) is 'run'
database = require '../../server/commons/database'
mongoose = require 'mongoose'
### SET UP ###
do (setupLodash = this) ->
GLOBAL._ = require 'lodash'
_.str = require 'underscore.string'
_.mixin _.str.exports()
GLOBAL.tv4 = require('tv4').tv4
database.connect()
co ->
yield exports.run()
process.exit()
| true | # Usage:
# > coffee -c scripts/node/2016-10-07-fix-anonymous-users.coffee; node scripts/node/2016-10-07-fix-anonymous-users.js run
require('coffee-script');
require('coffee-script/register');
_ = require 'lodash'
sendwithus = require '../../server/sendwithus'
log = require 'winston'
str = require 'underscore.string'
co = require 'co'
changedUsernameTemplate = _.template("
<p>
Hi, CodeCombat user!
</p>
<p>
Just letting you know we've made a change to your account settings which may change how you log in. Here are your old settings:
</p>
<ul>
<li>Old username: <%= oldUsername %></li>
</ul>
</p>
Your old username conflicts with another user's. This should have been prevented on signup, our apologies!
Here are your new settings:
</p>
<ul>
<li>New username: <%= newUsername %></li>
</ul>
<p>
Please <a href='https://codecombat.com/account/settings'>visit the site</a> if you would like to update your settings.
And let us know if you have any questions!
</p>
<p>
- CodeCombat Team
</p>
")
exports.run = ->
co ->
mongoose = require 'mongoose'
User = require '../../server/models/User'
users = yield User.find({
$and: [
{ emailLower: {$exists: true}},
{ anonymous: true },
],
slug: {$exists: false}
}).limit(1000).sort({_id:-1})
console.log 'found', users.length, 'users'
successes = 0
for user in users
try
console.log 'save', user.id, user.get('name'), user.get('email'), user.get('anonymous'), user.get('slug'), user.get('emailLower')
yield user.save()
successes += 1
catch e
if e.response.message is 'is already in use' and e.response.property is 'name'
oldUsername = user.get('name')
newUsername = yield User.unconflictNameAsync(user.get('name'))
content = changedUsernameTemplate({
oldUsername
newUsername
})
console.log "\tChange name '#{oldUsername}' => '#{newUsername}'"
context =
template: sendwithus.templates.plain_text_email
recipient:
address: user.get('email')
sender:
address: 'PI:EMAIL:<EMAIL>END_PI'
name: 'PI:NAME:<NAME>END_PICombat Team'
template_data:
subject: 'Your Username Has Changed'
contentHTML: content
user.set('name': newUsername)
yield user.save()
yield sendwithus.api.sendAsync(context)
# return
console.log("Fixed #{successes} / #{users.length} users")
return 'Done'
if _.last(process.argv) is 'run'
database = require '../../server/commons/database'
mongoose = require 'mongoose'
### SET UP ###
do (setupLodash = this) ->
GLOBAL._ = require 'lodash'
_.str = require 'underscore.string'
_.mixin _.str.exports()
GLOBAL.tv4 = require('tv4').tv4
database.connect()
co ->
yield exports.run()
process.exit()
|
[
{
"context": "ampaign.create(\n # {\n # name : \"register\",\n # isActive : true,\n # type ",
"end": 407,
"score": 0.5219244956970215,
"start": 399,
"tag": "NAME",
"value": "register"
}
] | workers/social/lib/social/models/rewards/rewardcampaign.coffee | ezgikaysi/koding | 1 | { Model } = require 'bongo'
jraphical = require 'jraphical'
KodingError = require '../../error'
module.exports = class JRewardCampaign extends jraphical.Module
# Examples for working with campaigns
#
# Warning: if campaign initial amount is 0
# then this is an infinite campaign
#
# # Create
#
# KD.remote.api.JRewardCampaign.create(
# {
# name : "register",
# isActive : true,
# type : "disk",
# unit : "MB",
# initialAmount : 1000,
# maxAmount : 1000000,
# perEventAmount : 500,
# startDate : new Date(),
# endDate : new Date("01/18/2015")
# }, function(err, campaign) {
# console.log(err, campaign);
# }
# );
#
# # Find & Modify
#
# var campaign = null;
#
# KD.remote.api.JRewardCampaign.one(
# {
# name: "register"
# }, function(err, _campaign){
# if (err) {
# console.warn(err);
# }
# else if (_campaign) {
# campaign = _campaign;
# console.log("Campaign assigned to local variable `campaign`");
# }
# else {
# console.error("No campaign found.");
# }
# }
# );
#
# if (campaign) {
# campaign.update({isActive: false}, log);
# }
#
{ signature, secure } = require 'bongo'
@trait __dirname, '../../traits/protected'
{ permit } = require '../group/permissionset'
@share()
@set
permissions :
'manage campaign' : []
sharedEvents :
static : []
instance : []
indexes :
name : 'unique'
schema :
name :
type : String
required : yes
slug : String
isActive : Boolean
type :
type : String
default : 'disk'
unit :
type : String
default : 'MB'
initialAmount :
type : Number
maxAmount :
type : Number
perEventAmount :
type : Number
givenAmount :
type : Number
default : 0
startDate :
type : Date
default : -> new Date
endDate :
type : Date
default : -> new Date
createdAt :
type : Date
default : -> new Date
sharedMethods :
static :
create : [
(signature Object, Function)
(signature Object, Object, Function)
]
one :
(signature Object, Function)
some :
(signature Object, Object, Function)
isValid :
(signature String, Function)
instance :
update : [
(signature Object, Function)
(signature Object, Object, Function)
]
remove : [
(signature Function)
(signature Object, Function)
]
# Helpers
# -------
DEFAULT_CAMPAIGN = 'register'
deleteExistingCampaign = (data, callback) ->
{ name } = data
return callback new KodingError 'Name not provided' unless name
JRewardCampaign.one { name }, (err, campaign) ->
return callback err if err
return callback null unless campaign
campaign.remove callback
# Private Methods
# ---------------
# Static Methods
@fetchCampaign = (campaignName, callback) ->
unless callback
[campaignName, callback] = [DEFAULT_CAMPAIGN, campaignName]
JRewardCampaign.one { name: campaignName }, (err, campaign) ->
return callback err if err
callback null, campaign
# Instance Methods
increaseGivenAmount: (size, callback) ->
unless callback
[size, callback] = [@perEventAmount, size]
@update { $inc: { givenAmount: size } } , callback
# Shared Methods
# --------------
# Static Methods
@some$ = permit 'manage campaign',
success: (client, selector, options, callback) ->
@some selector, options, callback
@one$ = permit 'manage campaign',
success: (client, selector, callback) ->
@one selector, callback
@create = permit 'manage campaign',
success: (client, data, callback) ->
campaign = new JRewardCampaign data
campaign.save (err) ->
if err then callback err
else callback null, campaign
# Since we may need to use this method in pages where
# users not registered yet, we are using secure instead
# permit, from permission grid.
@isValid = secure (client, campaignName, callback) ->
unless callback
[campaignName, callback] = [DEFAULT_CAMPAIGN, campaignName]
JRewardCampaign.fetchCampaign campaignName, (err, campaign) ->
return callback err if err
return callback null, { isValid: no } unless campaign
{ maxAmount,
givenAmount,
initialAmount,
perEventAmount,
endDate, startDate } = campaign
if Date.now() < startDate.getTime()
return callback null, { isValid: no }
# if date is valid
if Date.now() > endDate.getTime()
return callback null, { isValid: no }
# if campaign initial amount is 0
# then this is an infinite campaign
if initialAmount is 0
return callback null, { isValid: yes, campaign }
# if campaign hit the limits
if givenAmount + perEventAmount > maxAmount
return callback null, { isValid: no }
return callback null, { isValid: yes, campaign }
# Instance Methods
update$: permit 'manage campaign',
success: (client, data, callback) ->
@update { $set: data }, callback
remove$: permit 'manage campaign',
success: (client, data, callback) ->
@remove callback
| 147607 | { Model } = require 'bongo'
jraphical = require 'jraphical'
KodingError = require '../../error'
module.exports = class JRewardCampaign extends jraphical.Module
# Examples for working with campaigns
#
# Warning: if campaign initial amount is 0
# then this is an infinite campaign
#
# # Create
#
# KD.remote.api.JRewardCampaign.create(
# {
# name : "<NAME>",
# isActive : true,
# type : "disk",
# unit : "MB",
# initialAmount : 1000,
# maxAmount : 1000000,
# perEventAmount : 500,
# startDate : new Date(),
# endDate : new Date("01/18/2015")
# }, function(err, campaign) {
# console.log(err, campaign);
# }
# );
#
# # Find & Modify
#
# var campaign = null;
#
# KD.remote.api.JRewardCampaign.one(
# {
# name: "register"
# }, function(err, _campaign){
# if (err) {
# console.warn(err);
# }
# else if (_campaign) {
# campaign = _campaign;
# console.log("Campaign assigned to local variable `campaign`");
# }
# else {
# console.error("No campaign found.");
# }
# }
# );
#
# if (campaign) {
# campaign.update({isActive: false}, log);
# }
#
{ signature, secure } = require 'bongo'
@trait __dirname, '../../traits/protected'
{ permit } = require '../group/permissionset'
@share()
@set
permissions :
'manage campaign' : []
sharedEvents :
static : []
instance : []
indexes :
name : 'unique'
schema :
name :
type : String
required : yes
slug : String
isActive : Boolean
type :
type : String
default : 'disk'
unit :
type : String
default : 'MB'
initialAmount :
type : Number
maxAmount :
type : Number
perEventAmount :
type : Number
givenAmount :
type : Number
default : 0
startDate :
type : Date
default : -> new Date
endDate :
type : Date
default : -> new Date
createdAt :
type : Date
default : -> new Date
sharedMethods :
static :
create : [
(signature Object, Function)
(signature Object, Object, Function)
]
one :
(signature Object, Function)
some :
(signature Object, Object, Function)
isValid :
(signature String, Function)
instance :
update : [
(signature Object, Function)
(signature Object, Object, Function)
]
remove : [
(signature Function)
(signature Object, Function)
]
# Helpers
# -------
DEFAULT_CAMPAIGN = 'register'
deleteExistingCampaign = (data, callback) ->
{ name } = data
return callback new KodingError 'Name not provided' unless name
JRewardCampaign.one { name }, (err, campaign) ->
return callback err if err
return callback null unless campaign
campaign.remove callback
# Private Methods
# ---------------
# Static Methods
@fetchCampaign = (campaignName, callback) ->
unless callback
[campaignName, callback] = [DEFAULT_CAMPAIGN, campaignName]
JRewardCampaign.one { name: campaignName }, (err, campaign) ->
return callback err if err
callback null, campaign
# Instance Methods
increaseGivenAmount: (size, callback) ->
unless callback
[size, callback] = [@perEventAmount, size]
@update { $inc: { givenAmount: size } } , callback
# Shared Methods
# --------------
# Static Methods
@some$ = permit 'manage campaign',
success: (client, selector, options, callback) ->
@some selector, options, callback
@one$ = permit 'manage campaign',
success: (client, selector, callback) ->
@one selector, callback
@create = permit 'manage campaign',
success: (client, data, callback) ->
campaign = new JRewardCampaign data
campaign.save (err) ->
if err then callback err
else callback null, campaign
# Since we may need to use this method in pages where
# users not registered yet, we are using secure instead
# permit, from permission grid.
@isValid = secure (client, campaignName, callback) ->
unless callback
[campaignName, callback] = [DEFAULT_CAMPAIGN, campaignName]
JRewardCampaign.fetchCampaign campaignName, (err, campaign) ->
return callback err if err
return callback null, { isValid: no } unless campaign
{ maxAmount,
givenAmount,
initialAmount,
perEventAmount,
endDate, startDate } = campaign
if Date.now() < startDate.getTime()
return callback null, { isValid: no }
# if date is valid
if Date.now() > endDate.getTime()
return callback null, { isValid: no }
# if campaign initial amount is 0
# then this is an infinite campaign
if initialAmount is 0
return callback null, { isValid: yes, campaign }
# if campaign hit the limits
if givenAmount + perEventAmount > maxAmount
return callback null, { isValid: no }
return callback null, { isValid: yes, campaign }
# Instance Methods
update$: permit 'manage campaign',
success: (client, data, callback) ->
@update { $set: data }, callback
remove$: permit 'manage campaign',
success: (client, data, callback) ->
@remove callback
| true | { Model } = require 'bongo'
jraphical = require 'jraphical'
KodingError = require '../../error'
module.exports = class JRewardCampaign extends jraphical.Module
# Examples for working with campaigns
#
# Warning: if campaign initial amount is 0
# then this is an infinite campaign
#
# # Create
#
# KD.remote.api.JRewardCampaign.create(
# {
# name : "PI:NAME:<NAME>END_PI",
# isActive : true,
# type : "disk",
# unit : "MB",
# initialAmount : 1000,
# maxAmount : 1000000,
# perEventAmount : 500,
# startDate : new Date(),
# endDate : new Date("01/18/2015")
# }, function(err, campaign) {
# console.log(err, campaign);
# }
# );
#
# # Find & Modify
#
# var campaign = null;
#
# KD.remote.api.JRewardCampaign.one(
# {
# name: "register"
# }, function(err, _campaign){
# if (err) {
# console.warn(err);
# }
# else if (_campaign) {
# campaign = _campaign;
# console.log("Campaign assigned to local variable `campaign`");
# }
# else {
# console.error("No campaign found.");
# }
# }
# );
#
# if (campaign) {
# campaign.update({isActive: false}, log);
# }
#
{ signature, secure } = require 'bongo'
@trait __dirname, '../../traits/protected'
{ permit } = require '../group/permissionset'
@share()
@set
permissions :
'manage campaign' : []
sharedEvents :
static : []
instance : []
indexes :
name : 'unique'
schema :
name :
type : String
required : yes
slug : String
isActive : Boolean
type :
type : String
default : 'disk'
unit :
type : String
default : 'MB'
initialAmount :
type : Number
maxAmount :
type : Number
perEventAmount :
type : Number
givenAmount :
type : Number
default : 0
startDate :
type : Date
default : -> new Date
endDate :
type : Date
default : -> new Date
createdAt :
type : Date
default : -> new Date
sharedMethods :
static :
create : [
(signature Object, Function)
(signature Object, Object, Function)
]
one :
(signature Object, Function)
some :
(signature Object, Object, Function)
isValid :
(signature String, Function)
instance :
update : [
(signature Object, Function)
(signature Object, Object, Function)
]
remove : [
(signature Function)
(signature Object, Function)
]
# Helpers
# -------
DEFAULT_CAMPAIGN = 'register'
deleteExistingCampaign = (data, callback) ->
{ name } = data
return callback new KodingError 'Name not provided' unless name
JRewardCampaign.one { name }, (err, campaign) ->
return callback err if err
return callback null unless campaign
campaign.remove callback
# Private Methods
# ---------------
# Static Methods
@fetchCampaign = (campaignName, callback) ->
unless callback
[campaignName, callback] = [DEFAULT_CAMPAIGN, campaignName]
JRewardCampaign.one { name: campaignName }, (err, campaign) ->
return callback err if err
callback null, campaign
# Instance Methods
increaseGivenAmount: (size, callback) ->
unless callback
[size, callback] = [@perEventAmount, size]
@update { $inc: { givenAmount: size } } , callback
# Shared Methods
# --------------
# Static Methods
@some$ = permit 'manage campaign',
success: (client, selector, options, callback) ->
@some selector, options, callback
@one$ = permit 'manage campaign',
success: (client, selector, callback) ->
@one selector, callback
@create = permit 'manage campaign',
success: (client, data, callback) ->
campaign = new JRewardCampaign data
campaign.save (err) ->
if err then callback err
else callback null, campaign
# Since we may need to use this method in pages where
# users not registered yet, we are using secure instead
# permit, from permission grid.
@isValid = secure (client, campaignName, callback) ->
unless callback
[campaignName, callback] = [DEFAULT_CAMPAIGN, campaignName]
JRewardCampaign.fetchCampaign campaignName, (err, campaign) ->
return callback err if err
return callback null, { isValid: no } unless campaign
{ maxAmount,
givenAmount,
initialAmount,
perEventAmount,
endDate, startDate } = campaign
if Date.now() < startDate.getTime()
return callback null, { isValid: no }
# if date is valid
if Date.now() > endDate.getTime()
return callback null, { isValid: no }
# if campaign initial amount is 0
# then this is an infinite campaign
if initialAmount is 0
return callback null, { isValid: yes, campaign }
# if campaign hit the limits
if givenAmount + perEventAmount > maxAmount
return callback null, { isValid: no }
return callback null, { isValid: yes, campaign }
# Instance Methods
update$: permit 'manage campaign',
success: (client, data, callback) ->
@update { $set: data }, callback
remove$: permit 'manage campaign',
success: (client, data, callback) ->
@remove callback
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9988880157470703,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-http-many-ended-pipelines.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
# no warnings should happen!
trace = console.trace
console.trace = ->
trace.apply console, arguments
throw new Error("no tracing should happen here")return
http = require("http")
net = require("net")
numRequests = 20
done = 0
server = http.createServer((req, res) ->
res.end "ok"
# Oh no! The connection died!
req.socket.destroy()
server.close() if ++done is numRequests
return
)
server.listen common.PORT
client = net.connect(
port: common.PORT
allowHalfOpen: true
)
i = 0
while i < numRequests
client.write "GET / HTTP/1.1\r\n" + "Host: some.host.name\r\n" + "\r\n\r\n"
i++
client.end()
client.pipe process.stdout
| 111805 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
# no warnings should happen!
trace = console.trace
console.trace = ->
trace.apply console, arguments
throw new Error("no tracing should happen here")return
http = require("http")
net = require("net")
numRequests = 20
done = 0
server = http.createServer((req, res) ->
res.end "ok"
# Oh no! The connection died!
req.socket.destroy()
server.close() if ++done is numRequests
return
)
server.listen common.PORT
client = net.connect(
port: common.PORT
allowHalfOpen: true
)
i = 0
while i < numRequests
client.write "GET / HTTP/1.1\r\n" + "Host: some.host.name\r\n" + "\r\n\r\n"
i++
client.end()
client.pipe process.stdout
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
# no warnings should happen!
trace = console.trace
console.trace = ->
trace.apply console, arguments
throw new Error("no tracing should happen here")return
http = require("http")
net = require("net")
numRequests = 20
done = 0
server = http.createServer((req, res) ->
res.end "ok"
# Oh no! The connection died!
req.socket.destroy()
server.close() if ++done is numRequests
return
)
server.listen common.PORT
client = net.connect(
port: common.PORT
allowHalfOpen: true
)
i = 0
while i < numRequests
client.write "GET / HTTP/1.1\r\n" + "Host: some.host.name\r\n" + "\r\n\r\n"
i++
client.end()
client.pipe process.stdout
|
[
{
"context": " already specified\n # see https://github.com/AveVlad/gulp-connect/issues/172\n if typeof (@https) ",
"end": 1360,
"score": 0.9997270107269287,
"start": 1353,
"tag": "USERNAME",
"value": "AveVlad"
},
{
"context": " '/certs/server.crt'\n @https.passphras... | node_modules/gulp-connect/src/index.coffee | mandosir/reactQA | 0 | path = require("path")
es = require("event-stream")
util = require("gulp-util")
http = require("http")
https = require("https")
fs = require("fs")
connect = require("connect")
liveReload = require("connect-livereload")
tiny_lr = require("tiny-lr")
apps = []
http2 = undefined
try
http2 = require('http2')
class ConnectApp
constructor: (options) ->
@name = options.name || "Server"
@port = options.port || "8080"
@root = options.root || path.dirname(module.parent.id)
@host = options.host || "localhost"
@debug = options.debug || false
@silent = options.silent || false
@https = options.https || false
@livereload = options.livereload || false
@middleware = options.middleware || undefined
@serverInit = options.serverInit || undefined
@fallback = options.fallback || undefined
@oldMethod("open") if options.open
@sockets = []
@app = undefined
@lr = undefined
@run()
run: ->
@app = connect()
@handlers().forEach (middleware) =>
if typeof (middleware) is "object"
@app.use middleware[0], middleware[1]
else
@app.use middleware
@app.use connect.directory(if typeof @root == "object" then @root[0] else @root)
if @https
# use some defaults when not set. do not touch when a key is already specified
# see https://github.com/AveVlad/gulp-connect/issues/172
if typeof (@https) is 'boolean' || !@https.key
# change it into an object if it is not already one
if !(typeof (@https) is "object")
@https = {}
@https.key = fs.readFileSync __dirname + '/certs/server.key'
@https.cert = fs.readFileSync __dirname + '/certs/server.crt'
@https.ca = fs.readFileSync __dirname + '/certs/server.crt'
@https.passphrase = 'gulp'
@server = (http2 || https).createServer(@https, @app)
else
@server = http.createServer @app
if @serverInit
@serverInit @server
@server.listen @port, (err) =>
if err
@log "Error on starting server: #{err}"
else
@log "#{@name} started http#{if @https then 's' else ''}://#{@host}:#{@port}"
stoped = false
sockets = []
@server.on "close", =>
if (!stoped)
stoped = true
@log "#{@name} stopped"
# Log connections and request in debug
@server.on "connection", (socket) =>
@logDebug "Received incoming connection from #{socket.address().address}"
@sockets.push socket
socket.on "close", =>
@sockets.splice @sockets.indexOf(socket), 1
@server.on "request", (request, response) =>
@logDebug "Received request #{request.method} #{request.url}"
@server.on "error", (err) =>
@log err.toString()
stopServer = =>
if (!stoped)
@sockets.forEach (socket) =>
socket.destroy()
@server.close()
process.nextTick( ->
process.exit(0)
)
process.on("SIGINT", stopServer)
process.on("exit", stopServer)
if @livereload
tiny_lr.Server::error = ->
if @https
@lr = tiny_lr
key: @https.key || fs.readFileSync __dirname + '/certs/server.key'
cert: @https.cert || fs.readFileSync __dirname + '/certs/server.crt'
else
@lr = tiny_lr()
@lr.listen @livereload.port
@log "LiveReload started on port #{@livereload.port}"
handlers: ->
steps = if @middleware then @middleware.call(this, connect, @) else []
if @livereload
@livereload = {} if typeof @livereload is "boolean"
@livereload.port = 35729 unless @livereload.port
steps.unshift liveReload(@livereload)
if typeof @root == "object"
@root.forEach (path) ->
steps.push connect.static(path)
else
steps.push connect.static(@root)
if @fallback
steps.push (req, res) =>
fallbackPath = @fallback
if typeof @fallback is "function"
fallbackPath = @fallback(req, res)
require('fs').createReadStream(fallbackPath).pipe(res)
return steps
log: (text) ->
if !@silent
util.log util.colors.green(text)
logWarning: (text) ->
if !@silent
util.log util.colors.yellow(text)
logDebug: (text) ->
if @debug
util.log util.colors.blue(text)
oldMethod: (type) ->
text = 'does not work in gulp-connect v 2.*. Please read "readme" https://github.com/AveVlad/gulp-connect'
switch type
when "open" then @logWarning("Option open #{text}")
module.exports =
server: (options = {}) ->
app = new ConnectApp(options)
apps.push(app)
app
reload: ->
es.map (file, callback) ->
apps.forEach (app) =>
if app.livereload and typeof app.lr == "object"
app.lr.changed body:
files: file.path
callback null, file
serverClose: ->
apps.forEach((app) -> do app.server.close)
apps = []
| 107170 | path = require("path")
es = require("event-stream")
util = require("gulp-util")
http = require("http")
https = require("https")
fs = require("fs")
connect = require("connect")
liveReload = require("connect-livereload")
tiny_lr = require("tiny-lr")
apps = []
http2 = undefined
try
http2 = require('http2')
class ConnectApp
constructor: (options) ->
@name = options.name || "Server"
@port = options.port || "8080"
@root = options.root || path.dirname(module.parent.id)
@host = options.host || "localhost"
@debug = options.debug || false
@silent = options.silent || false
@https = options.https || false
@livereload = options.livereload || false
@middleware = options.middleware || undefined
@serverInit = options.serverInit || undefined
@fallback = options.fallback || undefined
@oldMethod("open") if options.open
@sockets = []
@app = undefined
@lr = undefined
@run()
run: ->
@app = connect()
@handlers().forEach (middleware) =>
if typeof (middleware) is "object"
@app.use middleware[0], middleware[1]
else
@app.use middleware
@app.use connect.directory(if typeof @root == "object" then @root[0] else @root)
if @https
# use some defaults when not set. do not touch when a key is already specified
# see https://github.com/AveVlad/gulp-connect/issues/172
if typeof (@https) is 'boolean' || !@https.key
# change it into an object if it is not already one
if !(typeof (@https) is "object")
@https = {}
@https.key = fs.readFileSync __dirname + '/certs/server.key'
@https.cert = fs.readFileSync __dirname + '/certs/server.crt'
@https.ca = fs.readFileSync __dirname + '/certs/server.crt'
@https.passphrase = '<PASSWORD>'
@server = (http2 || https).createServer(@https, @app)
else
@server = http.createServer @app
if @serverInit
@serverInit @server
@server.listen @port, (err) =>
if err
@log "Error on starting server: #{err}"
else
@log "#{@name} started http#{if @https then 's' else ''}://#{@host}:#{@port}"
stoped = false
sockets = []
@server.on "close", =>
if (!stoped)
stoped = true
@log "#{@name} stopped"
# Log connections and request in debug
@server.on "connection", (socket) =>
@logDebug "Received incoming connection from #{socket.address().address}"
@sockets.push socket
socket.on "close", =>
@sockets.splice @sockets.indexOf(socket), 1
@server.on "request", (request, response) =>
@logDebug "Received request #{request.method} #{request.url}"
@server.on "error", (err) =>
@log err.toString()
stopServer = =>
if (!stoped)
@sockets.forEach (socket) =>
socket.destroy()
@server.close()
process.nextTick( ->
process.exit(0)
)
process.on("SIGINT", stopServer)
process.on("exit", stopServer)
if @livereload
tiny_lr.Server::error = ->
if @https
@lr = tiny_lr
key: @https.key || fs.readFileSync __dirname + '/certs/server.key'
cert: @https.cert || fs.readFileSync __dirname + '/certs/server.crt'
else
@lr = tiny_lr()
@lr.listen @livereload.port
@log "LiveReload started on port #{@livereload.port}"
handlers: ->
steps = if @middleware then @middleware.call(this, connect, @) else []
if @livereload
@livereload = {} if typeof @livereload is "boolean"
@livereload.port = 35729 unless @livereload.port
steps.unshift liveReload(@livereload)
if typeof @root == "object"
@root.forEach (path) ->
steps.push connect.static(path)
else
steps.push connect.static(@root)
if @fallback
steps.push (req, res) =>
fallbackPath = @fallback
if typeof @fallback is "function"
fallbackPath = @fallback(req, res)
require('fs').createReadStream(fallbackPath).pipe(res)
return steps
log: (text) ->
if !@silent
util.log util.colors.green(text)
logWarning: (text) ->
if !@silent
util.log util.colors.yellow(text)
logDebug: (text) ->
if @debug
util.log util.colors.blue(text)
oldMethod: (type) ->
text = 'does not work in gulp-connect v 2.*. Please read "readme" https://github.com/AveVlad/gulp-connect'
switch type
when "open" then @logWarning("Option open #{text}")
module.exports =
server: (options = {}) ->
app = new ConnectApp(options)
apps.push(app)
app
reload: ->
es.map (file, callback) ->
apps.forEach (app) =>
if app.livereload and typeof app.lr == "object"
app.lr.changed body:
files: file.path
callback null, file
serverClose: ->
apps.forEach((app) -> do app.server.close)
apps = []
| true | path = require("path")
es = require("event-stream")
util = require("gulp-util")
http = require("http")
https = require("https")
fs = require("fs")
connect = require("connect")
liveReload = require("connect-livereload")
tiny_lr = require("tiny-lr")
apps = []
http2 = undefined
try
http2 = require('http2')
class ConnectApp
constructor: (options) ->
@name = options.name || "Server"
@port = options.port || "8080"
@root = options.root || path.dirname(module.parent.id)
@host = options.host || "localhost"
@debug = options.debug || false
@silent = options.silent || false
@https = options.https || false
@livereload = options.livereload || false
@middleware = options.middleware || undefined
@serverInit = options.serverInit || undefined
@fallback = options.fallback || undefined
@oldMethod("open") if options.open
@sockets = []
@app = undefined
@lr = undefined
@run()
run: ->
@app = connect()
@handlers().forEach (middleware) =>
if typeof (middleware) is "object"
@app.use middleware[0], middleware[1]
else
@app.use middleware
@app.use connect.directory(if typeof @root == "object" then @root[0] else @root)
if @https
# use some defaults when not set. do not touch when a key is already specified
# see https://github.com/AveVlad/gulp-connect/issues/172
if typeof (@https) is 'boolean' || !@https.key
# change it into an object if it is not already one
if !(typeof (@https) is "object")
@https = {}
@https.key = fs.readFileSync __dirname + '/certs/server.key'
@https.cert = fs.readFileSync __dirname + '/certs/server.crt'
@https.ca = fs.readFileSync __dirname + '/certs/server.crt'
@https.passphrase = 'PI:PASSWORD:<PASSWORD>END_PI'
@server = (http2 || https).createServer(@https, @app)
else
@server = http.createServer @app
if @serverInit
@serverInit @server
@server.listen @port, (err) =>
if err
@log "Error on starting server: #{err}"
else
@log "#{@name} started http#{if @https then 's' else ''}://#{@host}:#{@port}"
stoped = false
sockets = []
@server.on "close", =>
if (!stoped)
stoped = true
@log "#{@name} stopped"
# Log connections and request in debug
@server.on "connection", (socket) =>
@logDebug "Received incoming connection from #{socket.address().address}"
@sockets.push socket
socket.on "close", =>
@sockets.splice @sockets.indexOf(socket), 1
@server.on "request", (request, response) =>
@logDebug "Received request #{request.method} #{request.url}"
@server.on "error", (err) =>
@log err.toString()
stopServer = =>
if (!stoped)
@sockets.forEach (socket) =>
socket.destroy()
@server.close()
process.nextTick( ->
process.exit(0)
)
process.on("SIGINT", stopServer)
process.on("exit", stopServer)
if @livereload
tiny_lr.Server::error = ->
if @https
@lr = tiny_lr
key: @https.key || fs.readFileSync __dirname + '/certs/server.key'
cert: @https.cert || fs.readFileSync __dirname + '/certs/server.crt'
else
@lr = tiny_lr()
@lr.listen @livereload.port
@log "LiveReload started on port #{@livereload.port}"
handlers: ->
steps = if @middleware then @middleware.call(this, connect, @) else []
if @livereload
@livereload = {} if typeof @livereload is "boolean"
@livereload.port = 35729 unless @livereload.port
steps.unshift liveReload(@livereload)
if typeof @root == "object"
@root.forEach (path) ->
steps.push connect.static(path)
else
steps.push connect.static(@root)
if @fallback
steps.push (req, res) =>
fallbackPath = @fallback
if typeof @fallback is "function"
fallbackPath = @fallback(req, res)
require('fs').createReadStream(fallbackPath).pipe(res)
return steps
log: (text) ->
if !@silent
util.log util.colors.green(text)
logWarning: (text) ->
if !@silent
util.log util.colors.yellow(text)
logDebug: (text) ->
if @debug
util.log util.colors.blue(text)
oldMethod: (type) ->
text = 'does not work in gulp-connect v 2.*. Please read "readme" https://github.com/AveVlad/gulp-connect'
switch type
when "open" then @logWarning("Option open #{text}")
module.exports =
server: (options = {}) ->
app = new ConnectApp(options)
apps.push(app)
app
reload: ->
es.map (file, callback) ->
apps.forEach (app) =>
if app.livereload and typeof app.lr == "object"
app.lr.changed body:
files: file.path
callback null, file
serverClose: ->
apps.forEach((app) -> do app.server.close)
apps = []
|
[
{
"context": "avdl-compiler v#{pkg.version} (https://github.com/keybase/node-avdl-compiler)\n // Input file: my_t",
"end": 496,
"score": 0.9267602562904358,
"start": 489,
"tag": "USERNAME",
"value": "keybase"
},
{
"context": "avdl-compiler v#{pkg.version} (https://github.com... | src/go_emit.test.iced | AngelKey/Angelkey.avdlcompiler | 0 | {GoEmitter} = require("./go_emit");
pkg = require '../package.json'
describe "GoEmitter", () ->
emitter = null
beforeEach () ->
emitter = new GoEmitter
describe "emit_preface", () ->
it "Should emit a preface", () ->
emitter.emit_preface ["./my_test_file.avdl"], {namespace: "chat1"}
code = emitter._code.join "\n"
expect(code).toBe("""
// Auto-generated to Go types and interfaces using avdl-compiler v#{pkg.version} (https://github.com/keybase/node-avdl-compiler)
// Input file: my_test_file.avdl
package chat1\n
""")
return
it "Should note that it only generated types if types_only is enabled", () ->
emitter.emit_preface ["./my_test_file.avdl"], {namespace: "chat1"}, {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
// Auto-generated to Go types using avdl-compiler v#{pkg.version} (https://github.com/keybase/node-avdl-compiler)
// Input file: my_test_file.avdl
package chat1\n
""")
return
return
describe "emit_imports", () ->
it "should handle both GOPATH based paths and relative paths", () ->
imports = [
{
path: "../gregor1",
type: "idl",
import_as: "gregor1"
},
{
path: "github.com/keybase/client/go/protocol/keybase1",
type: "idl",
import_as: "keybase1"
},
]
emitter.emit_imports {imports, messages: {}, types: []}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe('''
import (
\tgregor1 "github.com/keybase/node-avdl-compiler/location/of/gregor1"
\tkeybase1 "github.com/keybase/client/go/protocol/keybase1"
)\n\n
''')
return
it "should ignore packages that aren't imported with a package name", () ->
imports = [
{
path: "common.avdl",
type: "idl"
}
]
emitter.emit_imports {imports, messages: {}, types: []}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
)\n\n
""")
return
it "should only import the rpc package if types_only is false", () ->
emitter.emit_imports {imports: [], messages: {}, types: []}, 'location/of/my/output.go', {types_only: false}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"github.com/keybase/go-framed-msgpack-rpc/rpc"
)\n\n
""")
return
it "should only import the content and time packages if types_only is false and the file contains messages", () ->
emitter.emit_imports {imports: [], messages: {fake_message: 'blah'}, types: []}, 'location/of/my/output.go', {types_only: false}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"github.com/keybase/go-framed-msgpack-rpc/rpc"
\tcontext "golang.org/x/net/context"
\t"time"
)\n\n
""")
return
it "should output the errors package if there are variants", () ->
emitter.emit_imports {imports: [], messages: {}, types: [{
type: "variant",
name: "TextPaymentResult",
}]}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"errors"
)\n\n
""")
return
return
describe "emit_typedef", () ->
it "Should emit a string typedef", () ->
type = {
type: "record"
name: "BuildPaymentID"
fields: []
typedef: "string"
}
emitter.emit_typedef type
code = emitter._code.join "\n"
expect(code).toBe("""
type BuildPaymentID string
func (o BuildPaymentID) DeepCopy() BuildPaymentID {
\treturn o
}\n
""")
return
return
describe "emit_record", () ->
it "Should emit a struct with primative value keys", () ->
record = {
type: "record"
name: "TestRecord"
fields: [
{
type: "string",
name: "statusDescription"
},
{
type: "boolean"
name: "isValidThing"
},
{
type: "long",
name: "longInt"
},
{
type: "double",
name: "doubleOrNothin"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tStatusDescription\tstring\t`codec:"statusDescription" json:"statusDescription"`
\tIsValidThing\tbool\t`codec:"isValidThing" json:"isValidThing"`
\tLongInt\tint64\t`codec:"longInt" json:"longInt"`
\tDoubleOrNothin\tfloat64\t`codec:"doubleOrNothin" json:"doubleOrNothin"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tStatusDescription: o.StatusDescription,
\t\tIsValidThing: o.IsValidThing,
\t\tLongInt: o.LongInt,
\t\tDoubleOrNothin: o.DoubleOrNothin,
\t}
}\n
""")
return
it "Should not emit a DeepCopy function in the option is given", () ->
record = {
type: "record"
name: "TestRecord"
fields: [
{
type: "string",
name: "statusDescription"
},
]
}
emitter.emit_record record, { no_deep_copy: true }
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tStatusDescription\tstring\t`codec:"statusDescription" json:"statusDescription"`
}\n
""")
return
it "Should support custom types as fields", () ->
record = {
type: "record"
name: "TestRecord"
fields: [
{
type: "MySuperCoolCustomType",
name: "superCool"
},
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tSuperCool\tMySuperCoolCustomType\t`codec:"superCool" json:"superCool"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tSuperCool: o.SuperCool.DeepCopy(),
\t}
}\n
""")
return
it "Should emit a struct with an optional type", () ->
record = {
type: "record"
name: "TestRecord"
fields: [
{
type: [null, "string"],
name: "maybeStatusDescription"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tMaybeStatusDescription\t*string\t`codec:"maybeStatusDescription,omitempty" json:"maybeStatusDescription,omitempty"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tMaybeStatusDescription: (func (x *string) *string {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\ttmp := (*x)
\t\t\treturn &tmp
\t\t})(o.MaybeStatusDescription),
\t}
}\n
""")
return
it "Should emit a struct with an array value", () ->
record = {
type: "record",
name: "PaymentsPageLocal",
fields: [
{
type: {
type: "array",
items: "PaymentOrErrorLocal"
},
name: "payments"
},
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type PaymentsPageLocal struct {
\tPayments []PaymentOrErrorLocal `codec:"payments" json:"payments"`
}
func (o PaymentsPageLocal) DeepCopy() PaymentsPageLocal {
\treturn PaymentsPageLocal{
\t\tPayments: (func (x []PaymentOrErrorLocal) []PaymentOrErrorLocal {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\tret := make([]PaymentOrErrorLocal, len(x))
\t\t\tfor i, v := range x {
\t\t\t\tvCopy := v.DeepCopy()
\t\t\t\tret[i] = vCopy
\t\t\t}
\t\t\treturn ret
\t\t})(o.Payments),
\t}
}\n
""")
return
it "Should emit a struct with a map type", () ->
record = {
type: "record"
name: "StellarServerDefinitions"
fields: [
{
type: "int",
name: "revision"
},
{
type: {
type: "map"
values: "OutsideCurrencyDefinition"
keys: "OutsideCurrencyCode"
}
name: "currencies"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type StellarServerDefinitions struct {
\tRevision\tint\t`codec:"revision" json:"revision"`
\tCurrencies\tmap[OutsideCurrencyCode]OutsideCurrencyDefinition\t`codec:"currencies" json:"currencies"`
}
func (o StellarServerDefinitions) DeepCopy() StellarServerDefinitions {
\treturn StellarServerDefinitions{
\t\tRevision: o.Revision,
\t\tCurrencies: (func (x map[OutsideCurrencyCode]OutsideCurrencyDefinition) map[OutsideCurrencyCode]OutsideCurrencyDefinition {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\tret := make(map[OutsideCurrencyCode]OutsideCurrencyDefinition, len(x))
\t\t\tfor k, v := range x {
\t\t\t\tkCopy := k.DeepCopy()
\t\t\t\tvCopy := v.DeepCopy()
\t\t\t\tret[kCopy] = vCopy
\t\t\t}
\t\t\treturn ret
\t\t})(o.Currencies),
\t}
}\n
""")
return
return
describe "emit_fixed", () ->
it "Should emit a fixed length type", () ->
emitter.emit_fixed { name: "FunHash", size: 32 }
code = emitter._code.join "\n"
expect(code).toBe("""
type FunHash [32]byte
func (o FunHash) DeepCopy() FunHash {
\tvar ret FunHash
\tcopy(ret[:], o[:])
\treturn ret
}\n
""")
return
return
describe "emit_enum", () ->
it "Should emit an enum", () ->
test_enum = {
type: "enum",
name: "AuditVersion",
symbols: [
"V0_0",
"V1_1",
"V2_2",
"V3_3"
]
}
emitter.emit_enum test_enum
code = emitter._code.join "\n"
expect(code).toBe("""
type AuditVersion int
const (
\tAuditVersion_V0 AuditVersion = 0
\tAuditVersion_V1 AuditVersion = 1
\tAuditVersion_V2 AuditVersion = 2
\tAuditVersion_V3 AuditVersion = 3
)
func (o AuditVersion) DeepCopy() AuditVersion { return o }
var AuditVersionMap = map[string]AuditVersion{
\t"V0": 0,
\t"V1": 1,
\t"V2": 2,
\t"V3": 3,
}
var AuditVersionRevMap = map[AuditVersion]string{
\t0: "V0",
\t1: "V1",
\t2: "V2",
\t3: "V3",
}
func (e AuditVersion) String() string {
\tif v, ok := AuditVersionRevMap[e]; ok {
\t\treturn v
\t}
\treturn fmt.Sprintf("%v", int(e))
}\n
""")
return
it "Should not emit a string function if nostring is given", () ->
test_enum = {
type: "enum",
name: "AuditVersion",
symbols: [
"V0_0",
"V1_1",
"V2_2",
"V3_3"
]
}
emitter.emit_enum test_enum, true
code = emitter._code.join "\n"
expect(code).toBe("""
type AuditVersion int
const (
\tAuditVersion_V0 AuditVersion = 0
\tAuditVersion_V1 AuditVersion = 1
\tAuditVersion_V2 AuditVersion = 2
\tAuditVersion_V3 AuditVersion = 3
)
func (o AuditVersion) DeepCopy() AuditVersion { return o }
var AuditVersionMap = map[string]AuditVersion{
\t"V0": 0,
\t"V1": 1,
\t"V2": 2,
\t"V3": 3,
}
var AuditVersionRevMap = map[AuditVersion]string{
\t0: "V0",
\t1: "V1",
\t2: "V2",
\t3: "V3",
}\n
""")
return
return
return
| 12735 | {GoEmitter} = require("./go_emit");
pkg = require '../package.json'
describe "GoEmitter", () ->
emitter = null
beforeEach () ->
emitter = new GoEmitter
describe "emit_preface", () ->
it "Should emit a preface", () ->
emitter.emit_preface ["./my_test_file.avdl"], {namespace: "chat1"}
code = emitter._code.join "\n"
expect(code).toBe("""
// Auto-generated to Go types and interfaces using avdl-compiler v#{pkg.version} (https://github.com/keybase/node-avdl-compiler)
// Input file: my_test_file.avdl
package chat1\n
""")
return
it "Should note that it only generated types if types_only is enabled", () ->
emitter.emit_preface ["./my_test_file.avdl"], {namespace: "chat1"}, {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
// Auto-generated to Go types using avdl-compiler v#{pkg.version} (https://github.com/keybase/node-avdl-compiler)
// Input file: my_test_file.avdl
package chat1\n
""")
return
return
describe "emit_imports", () ->
it "should handle both GOPATH based paths and relative paths", () ->
imports = [
{
path: "../gregor1",
type: "idl",
import_as: "gregor1"
},
{
path: "github.com/keybase/client/go/protocol/keybase1",
type: "idl",
import_as: "keybase1"
},
]
emitter.emit_imports {imports, messages: {}, types: []}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe('''
import (
\tgregor1 "github.com/keybase/node-avdl-compiler/location/of/gregor1"
\tkeybase1 "github.com/keybase/client/go/protocol/keybase1"
)\n\n
''')
return
it "should ignore packages that aren't imported with a package name", () ->
imports = [
{
path: "common.avdl",
type: "idl"
}
]
emitter.emit_imports {imports, messages: {}, types: []}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
)\n\n
""")
return
it "should only import the rpc package if types_only is false", () ->
emitter.emit_imports {imports: [], messages: {}, types: []}, 'location/of/my/output.go', {types_only: false}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"github.com/keybase/go-framed-msgpack-rpc/rpc"
)\n\n
""")
return
it "should only import the content and time packages if types_only is false and the file contains messages", () ->
emitter.emit_imports {imports: [], messages: {fake_message: 'blah'}, types: []}, 'location/of/my/output.go', {types_only: false}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"github.com/keybase/go-framed-msgpack-rpc/rpc"
\tcontext "golang.org/x/net/context"
\t"time"
)\n\n
""")
return
it "should output the errors package if there are variants", () ->
emitter.emit_imports {imports: [], messages: {}, types: [{
type: "variant",
name: "TextPaymentResult",
}]}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"errors"
)\n\n
""")
return
return
describe "emit_typedef", () ->
it "Should emit a string typedef", () ->
type = {
type: "record"
name: "BuildPaymentID"
fields: []
typedef: "string"
}
emitter.emit_typedef type
code = emitter._code.join "\n"
expect(code).toBe("""
type BuildPaymentID string
func (o BuildPaymentID) DeepCopy() BuildPaymentID {
\treturn o
}\n
""")
return
return
describe "emit_record", () ->
it "Should emit a struct with primative value keys", () ->
record = {
type: "record"
name: "<NAME>"
fields: [
{
type: "string",
name: "statusDescription"
},
{
type: "boolean"
name: "isValidThing"
},
{
type: "long",
name: "longInt"
},
{
type: "double",
name: "doubleOrNothin"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tStatusDescription\tstring\t`codec:"statusDescription" json:"statusDescription"`
\tIsValidThing\tbool\t`codec:"isValidThing" json:"isValidThing"`
\tLongInt\tint64\t`codec:"longInt" json:"longInt"`
\tDoubleOrNothin\tfloat64\t`codec:"doubleOrNothin" json:"doubleOrNothin"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tStatusDescription: o.StatusDescription,
\t\tIsValidThing: o.IsValidThing,
\t\tLongInt: o.LongInt,
\t\tDoubleOrNothin: o.DoubleOrNothin,
\t}
}\n
""")
return
it "Should not emit a DeepCopy function in the option is given", () ->
record = {
type: "record"
name: "<NAME>"
fields: [
{
type: "string",
name: "statusDescription"
},
]
}
emitter.emit_record record, { no_deep_copy: true }
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tStatusDescription\tstring\t`codec:"statusDescription" json:"statusDescription"`
}\n
""")
return
it "Should support custom types as fields", () ->
record = {
type: "record"
name: "<NAME>"
fields: [
{
type: "MySuperCoolCustomType",
name: "<NAME>Cool"
},
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tSuperCool\tMySuperCoolCustomType\t`codec:"superCool" json:"superCool"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tSuperCool: o.SuperCool.DeepCopy(),
\t}
}\n
""")
return
it "Should emit a struct with an optional type", () ->
record = {
type: "record"
name: "<NAME>"
fields: [
{
type: [null, "string"],
name: "maybeStatusDescription"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tMaybeStatusDescription\t*string\t`codec:"maybeStatusDescription,omitempty" json:"maybeStatusDescription,omitempty"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tMaybeStatusDescription: (func (x *string) *string {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\ttmp := (*x)
\t\t\treturn &tmp
\t\t})(o.MaybeStatusDescription),
\t}
}\n
""")
return
it "Should emit a struct with an array value", () ->
record = {
type: "record",
name: "PaymentsPageLocal",
fields: [
{
type: {
type: "array",
items: "PaymentOrErrorLocal"
},
name: "payments"
},
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type PaymentsPageLocal struct {
\tPayments []PaymentOrErrorLocal `codec:"payments" json:"payments"`
}
func (o PaymentsPageLocal) DeepCopy() PaymentsPageLocal {
\treturn PaymentsPageLocal{
\t\tPayments: (func (x []PaymentOrErrorLocal) []PaymentOrErrorLocal {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\tret := make([]PaymentOrErrorLocal, len(x))
\t\t\tfor i, v := range x {
\t\t\t\tvCopy := v.DeepCopy()
\t\t\t\tret[i] = vCopy
\t\t\t}
\t\t\treturn ret
\t\t})(o.Payments),
\t}
}\n
""")
return
it "Should emit a struct with a map type", () ->
record = {
type: "record"
name: "StellarServerDefinitions"
fields: [
{
type: "int",
name: "revision"
},
{
type: {
type: "map"
values: "OutsideCurrencyDefinition"
keys: "OutsideCurrencyCode"
}
name: "currencies"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type StellarServerDefinitions struct {
\tRevision\tint\t`codec:"revision" json:"revision"`
\tCurrencies\tmap[OutsideCurrencyCode]OutsideCurrencyDefinition\t`codec:"currencies" json:"currencies"`
}
func (o StellarServerDefinitions) DeepCopy() StellarServerDefinitions {
\treturn StellarServerDefinitions{
\t\tRevision: o.Revision,
\t\tCurrencies: (func (x map[OutsideCurrencyCode]OutsideCurrencyDefinition) map[OutsideCurrencyCode]OutsideCurrencyDefinition {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\tret := make(map[OutsideCurrencyCode]OutsideCurrencyDefinition, len(x))
\t\t\tfor k, v := range x {
\t\t\t\tkCopy := k.DeepCopy()
\t\t\t\tvCopy := v.DeepCopy()
\t\t\t\tret[kCopy] = vCopy
\t\t\t}
\t\t\treturn ret
\t\t})(o.Currencies),
\t}
}\n
""")
return
return
describe "emit_fixed", () ->
it "Should emit a fixed length type", () ->
emitter.emit_fixed { name: "FunHash", size: 32 }
code = emitter._code.join "\n"
expect(code).toBe("""
type FunHash [32]byte
func (o FunHash) DeepCopy() FunHash {
\tvar ret FunHash
\tcopy(ret[:], o[:])
\treturn ret
}\n
""")
return
return
describe "emit_enum", () ->
it "Should emit an enum", () ->
test_enum = {
type: "enum",
name: "AuditVersion",
symbols: [
"V0_0",
"V1_1",
"V2_2",
"V3_3"
]
}
emitter.emit_enum test_enum
code = emitter._code.join "\n"
expect(code).toBe("""
type AuditVersion int
const (
\tAuditVersion_V0 AuditVersion = 0
\tAuditVersion_V1 AuditVersion = 1
\tAuditVersion_V2 AuditVersion = 2
\tAuditVersion_V3 AuditVersion = 3
)
func (o AuditVersion) DeepCopy() AuditVersion { return o }
var AuditVersionMap = map[string]AuditVersion{
\t"V0": 0,
\t"V1": 1,
\t"V2": 2,
\t"V3": 3,
}
var AuditVersionRevMap = map[AuditVersion]string{
\t0: "V0",
\t1: "V1",
\t2: "V2",
\t3: "V3",
}
func (e AuditVersion) String() string {
\tif v, ok := AuditVersionRevMap[e]; ok {
\t\treturn v
\t}
\treturn fmt.Sprintf("%v", int(e))
}\n
""")
return
it "Should not emit a string function if nostring is given", () ->
test_enum = {
type: "enum",
name: "AuditVersion",
symbols: [
"V0_0",
"V1_1",
"V2_2",
"V3_3"
]
}
emitter.emit_enum test_enum, true
code = emitter._code.join "\n"
expect(code).toBe("""
type AuditVersion int
const (
\tAuditVersion_V0 AuditVersion = 0
\tAuditVersion_V1 AuditVersion = 1
\tAuditVersion_V2 AuditVersion = 2
\tAuditVersion_V3 AuditVersion = 3
)
func (o AuditVersion) DeepCopy() AuditVersion { return o }
var AuditVersionMap = map[string]AuditVersion{
\t"V0": 0,
\t"V1": 1,
\t"V2": 2,
\t"V3": 3,
}
var AuditVersionRevMap = map[AuditVersion]string{
\t0: "V0",
\t1: "V1",
\t2: "V2",
\t3: "V3",
}\n
""")
return
return
return
| true | {GoEmitter} = require("./go_emit");
pkg = require '../package.json'
describe "GoEmitter", () ->
emitter = null
beforeEach () ->
emitter = new GoEmitter
describe "emit_preface", () ->
it "Should emit a preface", () ->
emitter.emit_preface ["./my_test_file.avdl"], {namespace: "chat1"}
code = emitter._code.join "\n"
expect(code).toBe("""
// Auto-generated to Go types and interfaces using avdl-compiler v#{pkg.version} (https://github.com/keybase/node-avdl-compiler)
// Input file: my_test_file.avdl
package chat1\n
""")
return
it "Should note that it only generated types if types_only is enabled", () ->
emitter.emit_preface ["./my_test_file.avdl"], {namespace: "chat1"}, {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
// Auto-generated to Go types using avdl-compiler v#{pkg.version} (https://github.com/keybase/node-avdl-compiler)
// Input file: my_test_file.avdl
package chat1\n
""")
return
return
describe "emit_imports", () ->
it "should handle both GOPATH based paths and relative paths", () ->
imports = [
{
path: "../gregor1",
type: "idl",
import_as: "gregor1"
},
{
path: "github.com/keybase/client/go/protocol/keybase1",
type: "idl",
import_as: "keybase1"
},
]
emitter.emit_imports {imports, messages: {}, types: []}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe('''
import (
\tgregor1 "github.com/keybase/node-avdl-compiler/location/of/gregor1"
\tkeybase1 "github.com/keybase/client/go/protocol/keybase1"
)\n\n
''')
return
it "should ignore packages that aren't imported with a package name", () ->
imports = [
{
path: "common.avdl",
type: "idl"
}
]
emitter.emit_imports {imports, messages: {}, types: []}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
)\n\n
""")
return
it "should only import the rpc package if types_only is false", () ->
emitter.emit_imports {imports: [], messages: {}, types: []}, 'location/of/my/output.go', {types_only: false}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"github.com/keybase/go-framed-msgpack-rpc/rpc"
)\n\n
""")
return
it "should only import the content and time packages if types_only is false and the file contains messages", () ->
emitter.emit_imports {imports: [], messages: {fake_message: 'blah'}, types: []}, 'location/of/my/output.go', {types_only: false}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"github.com/keybase/go-framed-msgpack-rpc/rpc"
\tcontext "golang.org/x/net/context"
\t"time"
)\n\n
""")
return
it "should output the errors package if there are variants", () ->
emitter.emit_imports {imports: [], messages: {}, types: [{
type: "variant",
name: "TextPaymentResult",
}]}, 'location/of/my/output.go', {types_only: true}
code = emitter._code.join "\n"
expect(code).toBe("""
import (
\t"errors"
)\n\n
""")
return
return
describe "emit_typedef", () ->
it "Should emit a string typedef", () ->
type = {
type: "record"
name: "BuildPaymentID"
fields: []
typedef: "string"
}
emitter.emit_typedef type
code = emitter._code.join "\n"
expect(code).toBe("""
type BuildPaymentID string
func (o BuildPaymentID) DeepCopy() BuildPaymentID {
\treturn o
}\n
""")
return
return
describe "emit_record", () ->
it "Should emit a struct with primative value keys", () ->
record = {
type: "record"
name: "PI:NAME:<NAME>END_PI"
fields: [
{
type: "string",
name: "statusDescription"
},
{
type: "boolean"
name: "isValidThing"
},
{
type: "long",
name: "longInt"
},
{
type: "double",
name: "doubleOrNothin"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tStatusDescription\tstring\t`codec:"statusDescription" json:"statusDescription"`
\tIsValidThing\tbool\t`codec:"isValidThing" json:"isValidThing"`
\tLongInt\tint64\t`codec:"longInt" json:"longInt"`
\tDoubleOrNothin\tfloat64\t`codec:"doubleOrNothin" json:"doubleOrNothin"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tStatusDescription: o.StatusDescription,
\t\tIsValidThing: o.IsValidThing,
\t\tLongInt: o.LongInt,
\t\tDoubleOrNothin: o.DoubleOrNothin,
\t}
}\n
""")
return
it "Should not emit a DeepCopy function in the option is given", () ->
record = {
type: "record"
name: "PI:NAME:<NAME>END_PI"
fields: [
{
type: "string",
name: "statusDescription"
},
]
}
emitter.emit_record record, { no_deep_copy: true }
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tStatusDescription\tstring\t`codec:"statusDescription" json:"statusDescription"`
}\n
""")
return
it "Should support custom types as fields", () ->
record = {
type: "record"
name: "PI:NAME:<NAME>END_PI"
fields: [
{
type: "MySuperCoolCustomType",
name: "PI:NAME:<NAME>END_PICool"
},
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tSuperCool\tMySuperCoolCustomType\t`codec:"superCool" json:"superCool"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tSuperCool: o.SuperCool.DeepCopy(),
\t}
}\n
""")
return
it "Should emit a struct with an optional type", () ->
record = {
type: "record"
name: "PI:NAME:<NAME>END_PI"
fields: [
{
type: [null, "string"],
name: "maybeStatusDescription"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type TestRecord struct {
\tMaybeStatusDescription\t*string\t`codec:"maybeStatusDescription,omitempty" json:"maybeStatusDescription,omitempty"`
}
func (o TestRecord) DeepCopy() TestRecord {
\treturn TestRecord{
\t\tMaybeStatusDescription: (func (x *string) *string {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\ttmp := (*x)
\t\t\treturn &tmp
\t\t})(o.MaybeStatusDescription),
\t}
}\n
""")
return
it "Should emit a struct with an array value", () ->
record = {
type: "record",
name: "PaymentsPageLocal",
fields: [
{
type: {
type: "array",
items: "PaymentOrErrorLocal"
},
name: "payments"
},
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type PaymentsPageLocal struct {
\tPayments []PaymentOrErrorLocal `codec:"payments" json:"payments"`
}
func (o PaymentsPageLocal) DeepCopy() PaymentsPageLocal {
\treturn PaymentsPageLocal{
\t\tPayments: (func (x []PaymentOrErrorLocal) []PaymentOrErrorLocal {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\tret := make([]PaymentOrErrorLocal, len(x))
\t\t\tfor i, v := range x {
\t\t\t\tvCopy := v.DeepCopy()
\t\t\t\tret[i] = vCopy
\t\t\t}
\t\t\treturn ret
\t\t})(o.Payments),
\t}
}\n
""")
return
it "Should emit a struct with a map type", () ->
record = {
type: "record"
name: "StellarServerDefinitions"
fields: [
{
type: "int",
name: "revision"
},
{
type: {
type: "map"
values: "OutsideCurrencyDefinition"
keys: "OutsideCurrencyCode"
}
name: "currencies"
}
]
}
emitter.emit_record record
code = emitter._code.join "\n"
expect(code).toBe("""
type StellarServerDefinitions struct {
\tRevision\tint\t`codec:"revision" json:"revision"`
\tCurrencies\tmap[OutsideCurrencyCode]OutsideCurrencyDefinition\t`codec:"currencies" json:"currencies"`
}
func (o StellarServerDefinitions) DeepCopy() StellarServerDefinitions {
\treturn StellarServerDefinitions{
\t\tRevision: o.Revision,
\t\tCurrencies: (func (x map[OutsideCurrencyCode]OutsideCurrencyDefinition) map[OutsideCurrencyCode]OutsideCurrencyDefinition {
\t\t\tif x == nil {
\t\t\t\treturn nil
\t\t\t}
\t\t\tret := make(map[OutsideCurrencyCode]OutsideCurrencyDefinition, len(x))
\t\t\tfor k, v := range x {
\t\t\t\tkCopy := k.DeepCopy()
\t\t\t\tvCopy := v.DeepCopy()
\t\t\t\tret[kCopy] = vCopy
\t\t\t}
\t\t\treturn ret
\t\t})(o.Currencies),
\t}
}\n
""")
return
return
describe "emit_fixed", () ->
it "Should emit a fixed length type", () ->
emitter.emit_fixed { name: "FunHash", size: 32 }
code = emitter._code.join "\n"
expect(code).toBe("""
type FunHash [32]byte
func (o FunHash) DeepCopy() FunHash {
\tvar ret FunHash
\tcopy(ret[:], o[:])
\treturn ret
}\n
""")
return
return
describe "emit_enum", () ->
it "Should emit an enum", () ->
test_enum = {
type: "enum",
name: "AuditVersion",
symbols: [
"V0_0",
"V1_1",
"V2_2",
"V3_3"
]
}
emitter.emit_enum test_enum
code = emitter._code.join "\n"
expect(code).toBe("""
type AuditVersion int
const (
\tAuditVersion_V0 AuditVersion = 0
\tAuditVersion_V1 AuditVersion = 1
\tAuditVersion_V2 AuditVersion = 2
\tAuditVersion_V3 AuditVersion = 3
)
func (o AuditVersion) DeepCopy() AuditVersion { return o }
var AuditVersionMap = map[string]AuditVersion{
\t"V0": 0,
\t"V1": 1,
\t"V2": 2,
\t"V3": 3,
}
var AuditVersionRevMap = map[AuditVersion]string{
\t0: "V0",
\t1: "V1",
\t2: "V2",
\t3: "V3",
}
func (e AuditVersion) String() string {
\tif v, ok := AuditVersionRevMap[e]; ok {
\t\treturn v
\t}
\treturn fmt.Sprintf("%v", int(e))
}\n
""")
return
it "Should not emit a string function if nostring is given", () ->
test_enum = {
type: "enum",
name: "AuditVersion",
symbols: [
"V0_0",
"V1_1",
"V2_2",
"V3_3"
]
}
emitter.emit_enum test_enum, true
code = emitter._code.join "\n"
expect(code).toBe("""
type AuditVersion int
const (
\tAuditVersion_V0 AuditVersion = 0
\tAuditVersion_V1 AuditVersion = 1
\tAuditVersion_V2 AuditVersion = 2
\tAuditVersion_V3 AuditVersion = 3
)
func (o AuditVersion) DeepCopy() AuditVersion { return o }
var AuditVersionMap = map[string]AuditVersion{
\t"V0": 0,
\t"V1": 1,
\t"V2": 2,
\t"V3": 3,
}
var AuditVersionRevMap = map[AuditVersion]string{
\t0: "V0",
\t1: "V1",
\t2: "V2",
\t3: "V3",
}\n
""")
return
return
return
|
[
{
"context": "ng unless 'string' is typeof opts.name\n\n key = \"#{opts.name}-#{opts.version}\"\n\n storage = @appStorages[key] or= new AppStora",
"end": 520,
"score": 0.998031735420227,
"start": 490,
"tag": "KEY",
"value": "\"#{opts.name}-#{opts.version}\""
}
] | client/app/lib/appstoragecontroller.coffee | ezgikaysi/koding | 1 | kd = require 'kd'
AppStorage = require './appstorage'
module.exports =
class AppStorageController extends kd.Controller
constructor: ->
super
@appStorages = {}
storage: (name, version) ->
if 'object' is typeof name then opts = name
else
opts =
name : name
version : version or AppStorage.DEFAULT_VERSION
throwString = 'storage name must be provided'
throw throwString unless 'string' is typeof opts.name
key = "#{opts.name}-#{opts.version}"
storage = @appStorages[key] or= new AppStorage opts.name, opts.version
storage.fetchStorage() unless opts.fetch is false
return storage
| 212730 | kd = require 'kd'
AppStorage = require './appstorage'
module.exports =
class AppStorageController extends kd.Controller
constructor: ->
super
@appStorages = {}
storage: (name, version) ->
if 'object' is typeof name then opts = name
else
opts =
name : name
version : version or AppStorage.DEFAULT_VERSION
throwString = 'storage name must be provided'
throw throwString unless 'string' is typeof opts.name
key = <KEY>
storage = @appStorages[key] or= new AppStorage opts.name, opts.version
storage.fetchStorage() unless opts.fetch is false
return storage
| true | kd = require 'kd'
AppStorage = require './appstorage'
module.exports =
class AppStorageController extends kd.Controller
constructor: ->
super
@appStorages = {}
storage: (name, version) ->
if 'object' is typeof name then opts = name
else
opts =
name : name
version : version or AppStorage.DEFAULT_VERSION
throwString = 'storage name must be provided'
throw throwString unless 'string' is typeof opts.name
key = PI:KEY:<KEY>END_PI
storage = @appStorages[key] or= new AppStorage opts.name, opts.version
storage.fetchStorage() unless opts.fetch is false
return storage
|
[
{
"context": "Mocha-cakes Reference ========\nhttps://github.com/quangv/mocha-cakes\nhttps://github.com/visionmedia/should",
"end": 339,
"score": 0.9995476603507996,
"start": 333,
"tag": "USERNAME",
"value": "quangv"
},
{
"context": "/github.com/quangv/mocha-cakes\nhttps://github.com... | src/test/nesting_spec.coffee | NicMcPhee/whooping-crane-model | 0 | 'use strict'
require 'mocha-cakes'
ModelParameters = require '../lib/model_parameters'
Clock = require '../lib/clock'
Bird = require '../lib/bird'
Population = require '../lib/population'
Nest = require '../lib/nest'
Nesting = require '../lib/nesting'
###
======== A Handy Little Mocha-cakes Reference ========
https://github.com/quangv/mocha-cakes
https://github.com/visionmedia/should.js
https://github.com/visionmedia/mocha
Mocha-cakes:
Feature, Scenario: maps to describe
Given, When, Then: maps to it,
but if first message argument is ommited, it'll be a describe
And, But, I: maps to it,
but if first message argument is ommited, it'll be a describe
Mocha hooks:
before ()-> # before describe
after ()-> # after describe
beforeEach ()-> # before each it
afterEach ()-> # after each it
Should assertions:
should.exist('hello')
should.fail('expected an error!')
true.should.be.ok
true.should.be.true
false.should.be.false
(()-> arguments)(1,2,3).should.be.arguments
[1,2,3].should.eql([1,2,3])
should.strictEqual(undefined, value)
user.age.should.be.within(5, 50)
username.should.match(/^\w+$/)
user.should.be.a('object')
[].should.be.an.instanceOf(Array)
user.should.have.property('age', 15)
user.age.should.be.above(5)
user.age.should.be.below(100)
user.pets.should.have.length(5)
res.should.have.status(200) #res.statusCode should be 200
res.should.be.json
res.should.be.html
res.should.have.header('Content-Length', '123')
[].should.be.empty
[1,2,3].should.include(3)
'foo bar baz'.should.include('foo')
{ name: 'TJ', pet: tobi }.user.should.include({ pet: tobi, name: 'TJ' })
{ foo: 'bar', baz: 'raz' }.should.have.keys('foo', 'bar')
(()-> throw new Error('failed to baz')).should.throwError(/^fail.+/)
user.should.have.property('pets').with.lengthOf(4)
user.should.be.a('object').and.have.property('name', 'tj')
###
###
It's possible that I could build the simulation without
creating these nest objects (just act on pairs of individuals)
but it's easier for me to think about this way, so I think
I'm going to go with it for now.
###
makeNest = (nestingTime) ->
firstBird = new Bird()
secondBird = new Bird()
nest = new Nest([firstBird, secondBird])
nest._nestingTime = nestingTime
nest
sleep = (ms) ->
start = new Date().getTime()
continue while new Date().getTime() - start < ms
Feature "Nesting",
"In order to model crane populations",
"as a modeler",
"I need to model nesting and nest management", ->
Feature "Can build nests from list of breeding pairs",
"In order to model nesting",
"as a modeler",
"I want to be able to construct nests from breeding pairs", ->
Scenario "New nests from breeding pairs", ->
before -> Clock.reset()
population = null
numBirds = 100
nesting = null
expectedNests = null
Given "I construct a population of #{numBirds} birds", ->
population = new Population(100)
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create breeding pairs", ->
population.mateUnpairedBirds()
When "I construct nests from the breeding pairs", ->
matingPairs = population.matingPairs()
expectedNests =
ModelParameters.nestingProbability * matingPairs.length
nesting = new Nesting(matingPairs)
Then "I will usually have about #{expectedNests} nests", ->
nesting.activeNests().length.should.be.approximately(expectedNests,
0.33 * expectedNests)
#nesting.should.fail("WHY DOES THIS KEEP PRINTING NULL?")
Feature "Can model egg collection",
"In order to model nest management",
"as a modeler",
"I need to model human collection of eggs from early nests", ->
Scenario "Egg collection from proportion of early nests", ->
numEarlyNests = 17
numLateNests = 32
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
numActiveNests = numUncollectedNests + numLateNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "there should be #{totalNests} nests", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numActiveNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql numReleasedNests
Feature "Can model nest abandonment",
"In order to model nesting",
"as as modeler",
"I need to be able to model nest abandonment", ->
Scenario "Abandoment of all early nests w/o collection", ->
numEarlyNests = 37
numLateNests = 18
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numEarlyNests
And "I should have #{numLateNests} active nests", ->
nesting.activeNests().length.should.eql numLateNests
Scenario "Abandoment of all early nests after collection", ->
Scenario "More early nests than relase count", ->
numEarlyNests = 37
numLateNests = 5
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numLateNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql numReleasedNests
And "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numUncollectedNests
Scenario "Fewer early nests than relase count", ->
numEarlyNests = 3
numLateNests = 5
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numLateNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{2*numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql (2*numReleasedNests)
And "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numUncollectedNests
Feature "Egg hatching",
"In order to model egg hatching",
"as a modeler",
"I need to model the conversion of eggs into birds", ->
Scenario "Correct number and type with only late nests", ->
numLateNests = 37
lateNests = null
nesting = null
numBirds =
Math.floor(numLateNests * ModelParameters.eggConversionRate)
newBirds = null
Given "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = lateNests
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be about #{numBirds} new birds", ->
newBirds.length.should.be.approximately(numBirds, 0.33*numBirds)
And "all those birds should be wild reared", ->
bird.howReared().should.eql Bird.WILD_REARED for bird in newBirds
Scenario "Correct number and type with only early nests", ->
numEarlyNests = 37
earlyNests = null
nesting = null
numCollectedNests =
numEarlyNests * ModelParameters.collectionProbability
numBirds = Math.min(ModelParameters.releaseCount, numCollectedNests)
newBirds = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be #{numBirds} new birds", ->
newBirds.length.should.eql numBirds
And "all those birds should be captive reared", ->
bird.howReared().should.eql Bird.CAPTIVE_REARED for bird in newBirds
Scenario "Correct number and type with mixed nests", ->
numEarlyNests = 37
numLateNests = 45
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
numEarlyNests * ModelParameters.collectionProbability
numEarlyBirds =
Math.min(ModelParameters.releaseCount, numCollectedNests)
numLateBirds =
Math.floor(numLateNests * ModelParameters.eggConversionRate)
numBirds = numEarlyBirds + numLateBirds
newBirds = null
numCaptiveReared = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be the combination of \
both of those nest sets", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be about #{numBirds} new birds", ->
newBirds.length.should.be.approximately(numBirds, 0.5 * numBirds)
And "#{numEarlyBirds} of those birds should be captive reared", ->
captiveReared = newBirds.filter((b) -> b.isCaptive())
numCaptiveReared = captiveReared.length
numCaptiveReared.should.eql numEarlyBirds
And "the rest of those birds should be wild reared", ->
wildReared = newBirds.filter((b) -> b.isWild())
numWildReared = wildReared.length
wildReared.length.should.eql (newBirds.length - numCaptiveReared)
Feature "Full reproduction cycle",
"In order to model the crane lifecycle",
"as a modeler",
"I need to be able to model a full year reproduction cycle", ->
Scenario "Initial population is all late nesters", ->
numInitialBirds = 200
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds = numNests * ModelParameters.eggConversionRate
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.LATE)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be late nesters", ->
lateNesters = newBirds.filter((b) -> b.isLate())
expectedLate = expectedNumBirds * (1 - ModelParameters.mutationRate)
lateNesters.length.should.be.approximately(expectedLate,
expectedLate * 0.5)
Scenario false, "Large initial population is all early nesters", ->
numInitialBirds = 100
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
expectedEarly =
expectedNumBirds * (1 - ModelParameters.mutationRate)
earlyNesters.length.should.be.approximately(expectedEarly,
expectedEarly * 0.5)
Scenario false, "Small initial population is all early nesters", ->
# Small enough that the expected number of birds is less than
# ModelParameters.releaseCount
numInitialBirds = 32
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
expectedEarly =
expectedNumBirds * (1 - ModelParameters.mutationRate)
earlyNesters.length.should.be.approximately(expectedEarly,
expectedEarly * 0.5)
Scenario false, "Mixed initial population of early and late nesters", ->
numEarlyNesters = 200
numLateNesters = 200
numInitialBirds = numEarlyNesters + numLateNesters
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
numNests.should.eql 100
numAllEarlyNests = 0.25 * numNests
numAllLateNests = 0.25 * numNests
numMixedNests = 0.5 * numNests
numAllEarlyNests.should.eql 25
numAllLateNests.should.eql 25
numMixedNests.should.eql 50
# Assumes the early wins strategy
numEarlyNests = numAllEarlyNests + numMixedNests
numLateNests = numAllLateNests
numReNests = numEarlyNests * ModelParameters.renestingProbability
numEarlyNests.should.eql 75
numLateNests.should.eql 25
numReNests.should.eql 37.5
numWildNests = numReNests + numLateNests
numWildNests.should.eql 62.5
expectedCaptiveBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
expectedCaptiveBirds.should.eql 6
expectedWildBirds = numWildNests * ModelParameters.eggConversionRate
expectedWildBirds.should.eql 31.25
expectedNumBirds = expectedCaptiveBirds + expectedWildBirds
expectedNumBirds.should.eql 37.25
# The 2/3 comes from:
# 1/3 of these nests come from early-early (EE) pairs and
# 2/3 come from early-late (EL) pairs.
# All of the EE pairs generate early-nesting offspring, and
# 1/2 of the EL pairs generate early-nesting offspring, so we
# get 1/3 + (2/3)*(1/2) = 1/3 + 1/3 = 2/3 of these birds will
# be early nesters.
expectedEarlyNesters =
2/3 * (expectedCaptiveBirds +
numReNests * ModelParameters.eggConversionRate)
expectedEarlyNesters.should.eql 16.5
# The 1/3 comes from the math above.
expectedLateNesters =
numLateNests * ModelParameters.eggConversionRate +
(1/3) * (expectedCaptiveBirds +
numReNests * ModelParameters.eggConversionRate)
expectedLateNesters.should.eql 20.75
expectedNumBirds.should.eql (expectedEarlyNesters+expectedLateNesters)
population = new Population(0)
nesting = null
newBirds = null
Given "I construct a population of #{numEarlyNesters} \
early birds and #{numLateNesters} late birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numEarlyNesters]
population.addBird(new Bird(Bird.LATE)) for [0...numLateNesters]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.33 * expectedNumBirds)
And "approximately #{expectedEarlyNesters} should be \
early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
earlyNesters.length.should.be.approximately(expectedEarlyNesters,
expectedEarlyNesters)
And "approximately #{expectedLateNesters} should be late nesters", ->
lateNesters = newBirds.filter((b) -> b.isLate())
lateNesters.length.should.be.approximately(expectedLateNesters,
expectedLateNesters * 0.5)
And "the original birds are all
#{ModelParameters.pairingAge} years old", ->
population.birds().every(
(b) -> b.age().should.eql ModelParameters.pairingAge)
And "the new birds are all 0 years old", ->
newBirds.every((b) -> b.age().should.eql 0)
| 168385 | 'use strict'
require 'mocha-cakes'
ModelParameters = require '../lib/model_parameters'
Clock = require '../lib/clock'
Bird = require '../lib/bird'
Population = require '../lib/population'
Nest = require '../lib/nest'
Nesting = require '../lib/nesting'
###
======== A Handy Little Mocha-cakes Reference ========
https://github.com/quangv/mocha-cakes
https://github.com/visionmedia/should.js
https://github.com/visionmedia/mocha
Mocha-cakes:
Feature, Scenario: maps to describe
Given, When, Then: maps to it,
but if first message argument is ommited, it'll be a describe
And, But, I: maps to it,
but if first message argument is ommited, it'll be a describe
Mocha hooks:
before ()-> # before describe
after ()-> # after describe
beforeEach ()-> # before each it
afterEach ()-> # after each it
Should assertions:
should.exist('hello')
should.fail('expected an error!')
true.should.be.ok
true.should.be.true
false.should.be.false
(()-> arguments)(1,2,3).should.be.arguments
[1,2,3].should.eql([1,2,3])
should.strictEqual(undefined, value)
user.age.should.be.within(5, 50)
username.should.match(/^\w+$/)
user.should.be.a('object')
[].should.be.an.instanceOf(Array)
user.should.have.property('age', 15)
user.age.should.be.above(5)
user.age.should.be.below(100)
user.pets.should.have.length(5)
res.should.have.status(200) #res.statusCode should be 200
res.should.be.json
res.should.be.html
res.should.have.header('Content-Length', '123')
[].should.be.empty
[1,2,3].should.include(3)
'foo bar baz'.should.include('foo')
{ name: '<NAME>', pet: tobi }.user.should.include({ pet: tobi, name: '<NAME>' })
{ foo: 'bar', baz: 'raz' }.should.have.keys('foo', 'bar')
(()-> throw new Error('failed to baz')).should.throwError(/^fail.+/)
user.should.have.property('pets').with.lengthOf(4)
user.should.be.a('object').and.have.property('name', '<NAME>')
###
###
It's possible that I could build the simulation without
creating these nest objects (just act on pairs of individuals)
but it's easier for me to think about this way, so I think
I'm going to go with it for now.
###
makeNest = (nestingTime) ->
firstBird = new Bird()
secondBird = new Bird()
nest = new Nest([firstBird, secondBird])
nest._nestingTime = nestingTime
nest
sleep = (ms) ->
start = new Date().getTime()
continue while new Date().getTime() - start < ms
Feature "Nesting",
"In order to model crane populations",
"as a modeler",
"I need to model nesting and nest management", ->
Feature "Can build nests from list of breeding pairs",
"In order to model nesting",
"as a modeler",
"I want to be able to construct nests from breeding pairs", ->
Scenario "New nests from breeding pairs", ->
before -> Clock.reset()
population = null
numBirds = 100
nesting = null
expectedNests = null
Given "I construct a population of #{numBirds} birds", ->
population = new Population(100)
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create breeding pairs", ->
population.mateUnpairedBirds()
When "I construct nests from the breeding pairs", ->
matingPairs = population.matingPairs()
expectedNests =
ModelParameters.nestingProbability * matingPairs.length
nesting = new Nesting(matingPairs)
Then "I will usually have about #{expectedNests} nests", ->
nesting.activeNests().length.should.be.approximately(expectedNests,
0.33 * expectedNests)
#nesting.should.fail("WHY DOES THIS KEEP PRINTING NULL?")
Feature "Can model egg collection",
"In order to model nest management",
"as a modeler",
"I need to model human collection of eggs from early nests", ->
Scenario "Egg collection from proportion of early nests", ->
numEarlyNests = 17
numLateNests = 32
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
numActiveNests = numUncollectedNests + numLateNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "there should be #{totalNests} nests", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numActiveNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql numReleasedNests
Feature "Can model nest abandonment",
"In order to model nesting",
"as as modeler",
"I need to be able to model nest abandonment", ->
Scenario "Abandoment of all early nests w/o collection", ->
numEarlyNests = 37
numLateNests = 18
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numEarlyNests
And "I should have #{numLateNests} active nests", ->
nesting.activeNests().length.should.eql numLateNests
Scenario "Abandoment of all early nests after collection", ->
Scenario "More early nests than relase count", ->
numEarlyNests = 37
numLateNests = 5
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numLateNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql numReleasedNests
And "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numUncollectedNests
Scenario "Fewer early nests than relase count", ->
numEarlyNests = 3
numLateNests = 5
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numLateNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{2*numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql (2*numReleasedNests)
And "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numUncollectedNests
Feature "Egg hatching",
"In order to model egg hatching",
"as a modeler",
"I need to model the conversion of eggs into birds", ->
Scenario "Correct number and type with only late nests", ->
numLateNests = 37
lateNests = null
nesting = null
numBirds =
Math.floor(numLateNests * ModelParameters.eggConversionRate)
newBirds = null
Given "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = lateNests
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be about #{numBirds} new birds", ->
newBirds.length.should.be.approximately(numBirds, 0.33*numBirds)
And "all those birds should be wild reared", ->
bird.howReared().should.eql Bird.WILD_REARED for bird in newBirds
Scenario "Correct number and type with only early nests", ->
numEarlyNests = 37
earlyNests = null
nesting = null
numCollectedNests =
numEarlyNests * ModelParameters.collectionProbability
numBirds = Math.min(ModelParameters.releaseCount, numCollectedNests)
newBirds = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be #{numBirds} new birds", ->
newBirds.length.should.eql numBirds
And "all those birds should be captive reared", ->
bird.howReared().should.eql Bird.CAPTIVE_REARED for bird in newBirds
Scenario "Correct number and type with mixed nests", ->
numEarlyNests = 37
numLateNests = 45
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
numEarlyNests * ModelParameters.collectionProbability
numEarlyBirds =
Math.min(ModelParameters.releaseCount, numCollectedNests)
numLateBirds =
Math.floor(numLateNests * ModelParameters.eggConversionRate)
numBirds = numEarlyBirds + numLateBirds
newBirds = null
numCaptiveReared = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be the combination of \
both of those nest sets", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be about #{numBirds} new birds", ->
newBirds.length.should.be.approximately(numBirds, 0.5 * numBirds)
And "#{numEarlyBirds} of those birds should be captive reared", ->
captiveReared = newBirds.filter((b) -> b.isCaptive())
numCaptiveReared = captiveReared.length
numCaptiveReared.should.eql numEarlyBirds
And "the rest of those birds should be wild reared", ->
wildReared = newBirds.filter((b) -> b.isWild())
numWildReared = wildReared.length
wildReared.length.should.eql (newBirds.length - numCaptiveReared)
Feature "Full reproduction cycle",
"In order to model the crane lifecycle",
"as a modeler",
"I need to be able to model a full year reproduction cycle", ->
Scenario "Initial population is all late nesters", ->
numInitialBirds = 200
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds = numNests * ModelParameters.eggConversionRate
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.LATE)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be late nesters", ->
lateNesters = newBirds.filter((b) -> b.isLate())
expectedLate = expectedNumBirds * (1 - ModelParameters.mutationRate)
lateNesters.length.should.be.approximately(expectedLate,
expectedLate * 0.5)
Scenario false, "Large initial population is all early nesters", ->
numInitialBirds = 100
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
expectedEarly =
expectedNumBirds * (1 - ModelParameters.mutationRate)
earlyNesters.length.should.be.approximately(expectedEarly,
expectedEarly * 0.5)
Scenario false, "Small initial population is all early nesters", ->
# Small enough that the expected number of birds is less than
# ModelParameters.releaseCount
numInitialBirds = 32
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
expectedEarly =
expectedNumBirds * (1 - ModelParameters.mutationRate)
earlyNesters.length.should.be.approximately(expectedEarly,
expectedEarly * 0.5)
Scenario false, "Mixed initial population of early and late nesters", ->
numEarlyNesters = 200
numLateNesters = 200
numInitialBirds = numEarlyNesters + numLateNesters
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
numNests.should.eql 100
numAllEarlyNests = 0.25 * numNests
numAllLateNests = 0.25 * numNests
numMixedNests = 0.5 * numNests
numAllEarlyNests.should.eql 25
numAllLateNests.should.eql 25
numMixedNests.should.eql 50
# Assumes the early wins strategy
numEarlyNests = numAllEarlyNests + numMixedNests
numLateNests = numAllLateNests
numReNests = numEarlyNests * ModelParameters.renestingProbability
numEarlyNests.should.eql 75
numLateNests.should.eql 25
numReNests.should.eql 37.5
numWildNests = numReNests + numLateNests
numWildNests.should.eql 62.5
expectedCaptiveBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
expectedCaptiveBirds.should.eql 6
expectedWildBirds = numWildNests * ModelParameters.eggConversionRate
expectedWildBirds.should.eql 31.25
expectedNumBirds = expectedCaptiveBirds + expectedWildBirds
expectedNumBirds.should.eql 37.25
# The 2/3 comes from:
# 1/3 of these nests come from early-early (EE) pairs and
# 2/3 come from early-late (EL) pairs.
# All of the EE pairs generate early-nesting offspring, and
# 1/2 of the EL pairs generate early-nesting offspring, so we
# get 1/3 + (2/3)*(1/2) = 1/3 + 1/3 = 2/3 of these birds will
# be early nesters.
expectedEarlyNesters =
2/3 * (expectedCaptiveBirds +
numReNests * ModelParameters.eggConversionRate)
expectedEarlyNesters.should.eql 16.5
# The 1/3 comes from the math above.
expectedLateNesters =
numLateNests * ModelParameters.eggConversionRate +
(1/3) * (expectedCaptiveBirds +
numReNests * ModelParameters.eggConversionRate)
expectedLateNesters.should.eql 20.75
expectedNumBirds.should.eql (expectedEarlyNesters+expectedLateNesters)
population = new Population(0)
nesting = null
newBirds = null
Given "I construct a population of #{numEarlyNesters} \
early birds and #{numLateNesters} late birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numEarlyNesters]
population.addBird(new Bird(Bird.LATE)) for [0...numLateNesters]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.33 * expectedNumBirds)
And "approximately #{expectedEarlyNesters} should be \
early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
earlyNesters.length.should.be.approximately(expectedEarlyNesters,
expectedEarlyNesters)
And "approximately #{expectedLateNesters} should be late nesters", ->
lateNesters = newBirds.filter((b) -> b.isLate())
lateNesters.length.should.be.approximately(expectedLateNesters,
expectedLateNesters * 0.5)
And "the original birds are all
#{ModelParameters.pairingAge} years old", ->
population.birds().every(
(b) -> b.age().should.eql ModelParameters.pairingAge)
And "the new birds are all 0 years old", ->
newBirds.every((b) -> b.age().should.eql 0)
| true | 'use strict'
require 'mocha-cakes'
ModelParameters = require '../lib/model_parameters'
Clock = require '../lib/clock'
Bird = require '../lib/bird'
Population = require '../lib/population'
Nest = require '../lib/nest'
Nesting = require '../lib/nesting'
###
======== A Handy Little Mocha-cakes Reference ========
https://github.com/quangv/mocha-cakes
https://github.com/visionmedia/should.js
https://github.com/visionmedia/mocha
Mocha-cakes:
Feature, Scenario: maps to describe
Given, When, Then: maps to it,
but if first message argument is ommited, it'll be a describe
And, But, I: maps to it,
but if first message argument is ommited, it'll be a describe
Mocha hooks:
before ()-> # before describe
after ()-> # after describe
beforeEach ()-> # before each it
afterEach ()-> # after each it
Should assertions:
should.exist('hello')
should.fail('expected an error!')
true.should.be.ok
true.should.be.true
false.should.be.false
(()-> arguments)(1,2,3).should.be.arguments
[1,2,3].should.eql([1,2,3])
should.strictEqual(undefined, value)
user.age.should.be.within(5, 50)
username.should.match(/^\w+$/)
user.should.be.a('object')
[].should.be.an.instanceOf(Array)
user.should.have.property('age', 15)
user.age.should.be.above(5)
user.age.should.be.below(100)
user.pets.should.have.length(5)
res.should.have.status(200) #res.statusCode should be 200
res.should.be.json
res.should.be.html
res.should.have.header('Content-Length', '123')
[].should.be.empty
[1,2,3].should.include(3)
'foo bar baz'.should.include('foo')
{ name: 'PI:NAME:<NAME>END_PI', pet: tobi }.user.should.include({ pet: tobi, name: 'PI:NAME:<NAME>END_PI' })
{ foo: 'bar', baz: 'raz' }.should.have.keys('foo', 'bar')
(()-> throw new Error('failed to baz')).should.throwError(/^fail.+/)
user.should.have.property('pets').with.lengthOf(4)
user.should.be.a('object').and.have.property('name', 'PI:NAME:<NAME>END_PI')
###
###
It's possible that I could build the simulation without
creating these nest objects (just act on pairs of individuals)
but it's easier for me to think about this way, so I think
I'm going to go with it for now.
###
makeNest = (nestingTime) ->
firstBird = new Bird()
secondBird = new Bird()
nest = new Nest([firstBird, secondBird])
nest._nestingTime = nestingTime
nest
sleep = (ms) ->
start = new Date().getTime()
continue while new Date().getTime() - start < ms
Feature "Nesting",
"In order to model crane populations",
"as a modeler",
"I need to model nesting and nest management", ->
Feature "Can build nests from list of breeding pairs",
"In order to model nesting",
"as a modeler",
"I want to be able to construct nests from breeding pairs", ->
Scenario "New nests from breeding pairs", ->
before -> Clock.reset()
population = null
numBirds = 100
nesting = null
expectedNests = null
Given "I construct a population of #{numBirds} birds", ->
population = new Population(100)
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create breeding pairs", ->
population.mateUnpairedBirds()
When "I construct nests from the breeding pairs", ->
matingPairs = population.matingPairs()
expectedNests =
ModelParameters.nestingProbability * matingPairs.length
nesting = new Nesting(matingPairs)
Then "I will usually have about #{expectedNests} nests", ->
nesting.activeNests().length.should.be.approximately(expectedNests,
0.33 * expectedNests)
#nesting.should.fail("WHY DOES THIS KEEP PRINTING NULL?")
Feature "Can model egg collection",
"In order to model nest management",
"as a modeler",
"I need to model human collection of eggs from early nests", ->
Scenario "Egg collection from proportion of early nests", ->
numEarlyNests = 17
numLateNests = 32
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
numActiveNests = numUncollectedNests + numLateNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "there should be #{totalNests} nests", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numActiveNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql numReleasedNests
Feature "Can model nest abandonment",
"In order to model nesting",
"as as modeler",
"I need to be able to model nest abandonment", ->
Scenario "Abandoment of all early nests w/o collection", ->
numEarlyNests = 37
numLateNests = 18
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numEarlyNests
And "I should have #{numLateNests} active nests", ->
nesting.activeNests().length.should.eql numLateNests
Scenario "Abandoment of all early nests after collection", ->
Scenario "More early nests than relase count", ->
numEarlyNests = 37
numLateNests = 5
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numLateNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql numReleasedNests
And "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numUncollectedNests
Scenario "Fewer early nests than relase count", ->
numEarlyNests = 3
numLateNests = 5
totalNests = numEarlyNests + numLateNests
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
Math.floor(numEarlyNests * ModelParameters.collectionProbability)
numReleasedNests =
Math.min(numCollectedNests, ModelParameters.releaseCount)
numUncollectedNests =
numEarlyNests - numCollectedNests
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
Then "Total number of nests should be #{totalNests}", ->
nesting.activeNests().length.should.eql totalNests
When "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
Then "I should have #{numUncollectedNests + numLateNests} \
active nests", ->
nesting.activeNests().length.should.eql numLateNests
And "I should have #{2*numCollectedNests} collected nests", ->
nesting.collectedNests().length.should.eql (2*numCollectedNests)
And "I should have #{2*numReleasedNests} released nests", ->
nesting.releasedNests().length.should.eql (2*numReleasedNests)
And "I should have #{numEarlyNests} abandoned nests", ->
nesting.abandonedNests().length.should.eql numUncollectedNests
Feature "Egg hatching",
"In order to model egg hatching",
"as a modeler",
"I need to model the conversion of eggs into birds", ->
Scenario "Correct number and type with only late nests", ->
numLateNests = 37
lateNests = null
nesting = null
numBirds =
Math.floor(numLateNests * ModelParameters.eggConversionRate)
newBirds = null
Given "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = lateNests
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be about #{numBirds} new birds", ->
newBirds.length.should.be.approximately(numBirds, 0.33*numBirds)
And "all those birds should be wild reared", ->
bird.howReared().should.eql Bird.WILD_REARED for bird in newBirds
Scenario "Correct number and type with only early nests", ->
numEarlyNests = 37
earlyNests = null
nesting = null
numCollectedNests =
numEarlyNests * ModelParameters.collectionProbability
numBirds = Math.min(ModelParameters.releaseCount, numCollectedNests)
newBirds = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I set the nesting to be those nests", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be #{numBirds} new birds", ->
newBirds.length.should.eql numBirds
And "all those birds should be captive reared", ->
bird.howReared().should.eql Bird.CAPTIVE_REARED for bird in newBirds
Scenario "Correct number and type with mixed nests", ->
numEarlyNests = 37
numLateNests = 45
earlyNests = null
lateNests = null
nesting = null
numCollectedNests =
numEarlyNests * ModelParameters.collectionProbability
numEarlyBirds =
Math.min(ModelParameters.releaseCount, numCollectedNests)
numLateBirds =
Math.floor(numLateNests * ModelParameters.eggConversionRate)
numBirds = numEarlyBirds + numLateBirds
newBirds = null
numCaptiveReared = null
Given "I construct #{numEarlyNests} early nests", ->
earlyNests = (makeNest(Bird.EARLY) for [0...numEarlyNests])
And "I construct #{numLateNests} late nests", ->
lateNests = (makeNest(Bird.LATE) for [0...numLateNests])
And "I set the nesting to be the combination of \
both of those nest sets", ->
nesting = new Nesting([])
nesting._activeNests = earlyNests.concat(lateNests)
And "Eggs are collected", ->
nesting.collectEggs()
And "Birds abandon their nests", ->
nesting.abandonNests()
When "eggs hatch", ->
newBirds = nesting.hatchEggs()
Then "there should be about #{numBirds} new birds", ->
newBirds.length.should.be.approximately(numBirds, 0.5 * numBirds)
And "#{numEarlyBirds} of those birds should be captive reared", ->
captiveReared = newBirds.filter((b) -> b.isCaptive())
numCaptiveReared = captiveReared.length
numCaptiveReared.should.eql numEarlyBirds
And "the rest of those birds should be wild reared", ->
wildReared = newBirds.filter((b) -> b.isWild())
numWildReared = wildReared.length
wildReared.length.should.eql (newBirds.length - numCaptiveReared)
Feature "Full reproduction cycle",
"In order to model the crane lifecycle",
"as a modeler",
"I need to be able to model a full year reproduction cycle", ->
Scenario "Initial population is all late nesters", ->
numInitialBirds = 200
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds = numNests * ModelParameters.eggConversionRate
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.LATE)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be late nesters", ->
lateNesters = newBirds.filter((b) -> b.isLate())
expectedLate = expectedNumBirds * (1 - ModelParameters.mutationRate)
lateNesters.length.should.be.approximately(expectedLate,
expectedLate * 0.5)
Scenario false, "Large initial population is all early nesters", ->
numInitialBirds = 100
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
expectedEarly =
expectedNumBirds * (1 - ModelParameters.mutationRate)
earlyNesters.length.should.be.approximately(expectedEarly,
expectedEarly * 0.5)
Scenario false, "Small initial population is all early nesters", ->
# Small enough that the expected number of birds is less than
# ModelParameters.releaseCount
numInitialBirds = 32
population = new Population(0)
nesting = null
newBirds = null
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
expectedNumBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
Given "I construct a population of #{numInitialBirds} birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numInitialBirds]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.5 * expectedNumBirds)
And "almost all of them will be early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
expectedEarly =
expectedNumBirds * (1 - ModelParameters.mutationRate)
earlyNesters.length.should.be.approximately(expectedEarly,
expectedEarly * 0.5)
Scenario false, "Mixed initial population of early and late nesters", ->
numEarlyNesters = 200
numLateNesters = 200
numInitialBirds = numEarlyNesters + numLateNesters
numPairs = numInitialBirds // 2
numNests = numPairs * ModelParameters.nestingProbability
numNests.should.eql 100
numAllEarlyNests = 0.25 * numNests
numAllLateNests = 0.25 * numNests
numMixedNests = 0.5 * numNests
numAllEarlyNests.should.eql 25
numAllLateNests.should.eql 25
numMixedNests.should.eql 50
# Assumes the early wins strategy
numEarlyNests = numAllEarlyNests + numMixedNests
numLateNests = numAllLateNests
numReNests = numEarlyNests * ModelParameters.renestingProbability
numEarlyNests.should.eql 75
numLateNests.should.eql 25
numReNests.should.eql 37.5
numWildNests = numReNests + numLateNests
numWildNests.should.eql 62.5
expectedCaptiveBirds =
Math.min(numNests * ModelParameters.collectionProbability,
ModelParameters.releaseCount)
expectedCaptiveBirds.should.eql 6
expectedWildBirds = numWildNests * ModelParameters.eggConversionRate
expectedWildBirds.should.eql 31.25
expectedNumBirds = expectedCaptiveBirds + expectedWildBirds
expectedNumBirds.should.eql 37.25
# The 2/3 comes from:
# 1/3 of these nests come from early-early (EE) pairs and
# 2/3 come from early-late (EL) pairs.
# All of the EE pairs generate early-nesting offspring, and
# 1/2 of the EL pairs generate early-nesting offspring, so we
# get 1/3 + (2/3)*(1/2) = 1/3 + 1/3 = 2/3 of these birds will
# be early nesters.
expectedEarlyNesters =
2/3 * (expectedCaptiveBirds +
numReNests * ModelParameters.eggConversionRate)
expectedEarlyNesters.should.eql 16.5
# The 1/3 comes from the math above.
expectedLateNesters =
numLateNests * ModelParameters.eggConversionRate +
(1/3) * (expectedCaptiveBirds +
numReNests * ModelParameters.eggConversionRate)
expectedLateNesters.should.eql 20.75
expectedNumBirds.should.eql (expectedEarlyNesters+expectedLateNesters)
population = new Population(0)
nesting = null
newBirds = null
Given "I construct a population of #{numEarlyNesters} \
early birds and #{numLateNesters} late birds", ->
Clock.reset()
population.addBird(new Bird(Bird.EARLY)) for [0...numEarlyNesters]
population.addBird(new Bird(Bird.LATE)) for [0...numLateNesters]
And "I set the clock ahead #{ModelParameters.pairingAge} years", ->
Clock.setYear(ModelParameters.pairingAge)
And "I create mating pairs", ->
population.mateUnpairedBirds()
And "I create a nesting environment", ->
nesting = new Nesting(population.matingPairs())
When "I run the reproduction cycle", ->
newBirds = nesting.reproductionCycle()
Then "I will usually have about #{expectedNumBirds} new birds", ->
newBirds.length.should.be.approximately(expectedNumBirds,
0.33 * expectedNumBirds)
And "approximately #{expectedEarlyNesters} should be \
early nesters", ->
earlyNesters = newBirds.filter((b) -> b.isEarly())
earlyNesters.length.should.be.approximately(expectedEarlyNesters,
expectedEarlyNesters)
And "approximately #{expectedLateNesters} should be late nesters", ->
lateNesters = newBirds.filter((b) -> b.isLate())
lateNesters.length.should.be.approximately(expectedLateNesters,
expectedLateNesters * 0.5)
And "the original birds are all
#{ModelParameters.pairingAge} years old", ->
population.birds().every(
(b) -> b.age().should.eql ModelParameters.pairingAge)
And "the new birds are all 0 years old", ->
newBirds.every((b) -> b.age().should.eql 0)
|
[
{
"context": " .get('/v1/rooms/history?format=json&auth_token=testtoken&room_id=testchan&date=recent')\n .reply 200",
"end": 483,
"score": 0.6677356958389282,
"start": 474,
"tag": "KEY",
"value": "testtoken"
},
{
"context": " \"from\": {\n \"name... | test/server.coffee | boxuk/albot | 0 | Require = require('covershot').require.bind(null, require)
should = require('chai').should()
Nock = require 'nock'
Configuration = Require '../lib/configuration'
Server = Require '../lib/server'
describe 'Server', () ->
describe '#action()', () ->
it 'should detect only new commands', (done) ->
count = 0
Nock('http://api.hipchat.com')
.persist()
.filteringRequestBody(/.*/, '*')
.get('/v1/rooms/history?format=json&auth_token=testtoken&room_id=testchan&date=recent')
.reply 200, (uri, requestBody) ->
count += 1
if (count is 1)
{
"messages": [
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "Garret Heaton",
"user_id": 10
},
"message": "testbot pulls"
}
]
}
else
{
"messages": [
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "Garret Heaton",
"user_id": 10
},
"message": "testbot pulls"
},
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "Garret Heaton",
"user_id": 10
},
"message": "testbot help"
}
]
}
Server.action '10', (id, cmd) ->
clearInterval(id)
cmd.name.should.equal 'Help'
done()
describe '#dispach()', () ->
it 'should find the right command based on a message line', () ->
cmd = Server.dispatch("testbot pulls")
cmd.should.have.property('name').equal("Pull Requests")
it 'should not dispatch for anything', () ->
cmd = Server.dispatch("anything")
should.not.exist cmd
it 'should not answer to himself', () ->
cmd = Server.dispatch("albot pulls", "#{Configuration.Nickname}")
should.not.exist cmd
it 'should match one argument', () ->
cmd = Server.dispatch("testbot help repository")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
it 'should match arguments with upper cases', () ->
cmd = Server.dispatch("testbot help Repository")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("Repository")
cmd.should.not.have.deep.property('args[1]')
it 'should match arguments with some special characters', () ->
cmd = Server.dispatch("testbot help -Do+not.merge_:")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("-Do+not.merge_:")
it 'should match url', () ->
cmd = Server.dispatch("testbot help http://github.com/testing")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("http://github.com/testing")
it 'should match arguments with numbers', () ->
cmd = Server.dispatch("testbot help 24")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("24")
it 'should match two arguments', () ->
cmd = Server.dispatch("testbot help repository two")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
cmd.should.have.deep.property('args[1]').equal("two")
it 'should match up to five arguments', () ->
cmd = Server.dispatch("testbot help repository two up to five")
cmd.should.have.deep.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
cmd.should.have.deep.property('args[1]').equal("two")
cmd.should.have.deep.property('args[2]').equal("up")
cmd.should.have.deep.property('args[3]').equal("to")
cmd.should.have.deep.property('args[4]').equal("five")
it 'should match PR url anywhere in a message', () ->
cmd = Server.dispatch("PR: https://github.com/me/albot/pull/25 https://github.com/you/albot/pull/42")
cmd.should.have.property('name').equal("Pull Requests")
cmd.should.have.deep.property('args[0]').equal("PR: https://github.com/me/albot/pull/25 https://github.com/you/albot/pull/42")
it 'should match an Issue url anywhere in a message', () ->
cmd = Server.dispatch("Issue: https://thieriotandco.atlassian.net/browse/ALB-94")
cmd.should.have.property('name').equal("Issues")
cmd.should.have.deep.property('args[0]').equal("Issue: https://thieriotandco.atlassian.net/browse/ALB-94")
| 163519 | Require = require('covershot').require.bind(null, require)
should = require('chai').should()
Nock = require 'nock'
Configuration = Require '../lib/configuration'
Server = Require '../lib/server'
describe 'Server', () ->
describe '#action()', () ->
it 'should detect only new commands', (done) ->
count = 0
Nock('http://api.hipchat.com')
.persist()
.filteringRequestBody(/.*/, '*')
.get('/v1/rooms/history?format=json&auth_token=<KEY>&room_id=testchan&date=recent')
.reply 200, (uri, requestBody) ->
count += 1
if (count is 1)
{
"messages": [
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "<NAME>",
"user_id": 10
},
"message": "testbot pulls"
}
]
}
else
{
"messages": [
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "<NAME>",
"user_id": 10
},
"message": "testbot pulls"
},
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "<NAME>",
"user_id": 10
},
"message": "testbot help"
}
]
}
Server.action '10', (id, cmd) ->
clearInterval(id)
cmd.name.should.equal 'Help'
done()
describe '#dispach()', () ->
it 'should find the right command based on a message line', () ->
cmd = Server.dispatch("testbot pulls")
cmd.should.have.property('name').equal("Pull Requests")
it 'should not dispatch for anything', () ->
cmd = Server.dispatch("anything")
should.not.exist cmd
it 'should not answer to himself', () ->
cmd = Server.dispatch("albot pulls", "#{Configuration.Nickname}")
should.not.exist cmd
it 'should match one argument', () ->
cmd = Server.dispatch("testbot help repository")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
it 'should match arguments with upper cases', () ->
cmd = Server.dispatch("testbot help Repository")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("Repository")
cmd.should.not.have.deep.property('args[1]')
it 'should match arguments with some special characters', () ->
cmd = Server.dispatch("testbot help -Do+not.merge_:")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("-Do+not.merge_:")
it 'should match url', () ->
cmd = Server.dispatch("testbot help http://github.com/testing")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("http://github.com/testing")
it 'should match arguments with numbers', () ->
cmd = Server.dispatch("testbot help 24")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("24")
it 'should match two arguments', () ->
cmd = Server.dispatch("testbot help repository two")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
cmd.should.have.deep.property('args[1]').equal("two")
it 'should match up to five arguments', () ->
cmd = Server.dispatch("testbot help repository two up to five")
cmd.should.have.deep.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
cmd.should.have.deep.property('args[1]').equal("two")
cmd.should.have.deep.property('args[2]').equal("up")
cmd.should.have.deep.property('args[3]').equal("to")
cmd.should.have.deep.property('args[4]').equal("five")
it 'should match PR url anywhere in a message', () ->
cmd = Server.dispatch("PR: https://github.com/me/albot/pull/25 https://github.com/you/albot/pull/42")
cmd.should.have.property('name').equal("Pull Requests")
cmd.should.have.deep.property('args[0]').equal("PR: https://github.com/me/albot/pull/25 https://github.com/you/albot/pull/42")
it 'should match an Issue url anywhere in a message', () ->
cmd = Server.dispatch("Issue: https://thieriotandco.atlassian.net/browse/ALB-94")
cmd.should.have.property('name').equal("Issues")
cmd.should.have.deep.property('args[0]').equal("Issue: https://thieriotandco.atlassian.net/browse/ALB-94")
| true | Require = require('covershot').require.bind(null, require)
should = require('chai').should()
Nock = require 'nock'
Configuration = Require '../lib/configuration'
Server = Require '../lib/server'
describe 'Server', () ->
describe '#action()', () ->
it 'should detect only new commands', (done) ->
count = 0
Nock('http://api.hipchat.com')
.persist()
.filteringRequestBody(/.*/, '*')
.get('/v1/rooms/history?format=json&auth_token=PI:KEY:<KEY>END_PI&room_id=testchan&date=recent')
.reply 200, (uri, requestBody) ->
count += 1
if (count is 1)
{
"messages": [
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "PI:NAME:<NAME>END_PI",
"user_id": 10
},
"message": "testbot pulls"
}
]
}
else
{
"messages": [
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "PI:NAME:<NAME>END_PI",
"user_id": 10
},
"message": "testbot pulls"
},
{
"date": "2010-11-19T15:48:19-0800",
"from": {
"name": "PI:NAME:<NAME>END_PI",
"user_id": 10
},
"message": "testbot help"
}
]
}
Server.action '10', (id, cmd) ->
clearInterval(id)
cmd.name.should.equal 'Help'
done()
describe '#dispach()', () ->
it 'should find the right command based on a message line', () ->
cmd = Server.dispatch("testbot pulls")
cmd.should.have.property('name').equal("Pull Requests")
it 'should not dispatch for anything', () ->
cmd = Server.dispatch("anything")
should.not.exist cmd
it 'should not answer to himself', () ->
cmd = Server.dispatch("albot pulls", "#{Configuration.Nickname}")
should.not.exist cmd
it 'should match one argument', () ->
cmd = Server.dispatch("testbot help repository")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
it 'should match arguments with upper cases', () ->
cmd = Server.dispatch("testbot help Repository")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("Repository")
cmd.should.not.have.deep.property('args[1]')
it 'should match arguments with some special characters', () ->
cmd = Server.dispatch("testbot help -Do+not.merge_:")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("-Do+not.merge_:")
it 'should match url', () ->
cmd = Server.dispatch("testbot help http://github.com/testing")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("http://github.com/testing")
it 'should match arguments with numbers', () ->
cmd = Server.dispatch("testbot help 24")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("24")
it 'should match two arguments', () ->
cmd = Server.dispatch("testbot help repository two")
cmd.should.have.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
cmd.should.have.deep.property('args[1]').equal("two")
it 'should match up to five arguments', () ->
cmd = Server.dispatch("testbot help repository two up to five")
cmd.should.have.deep.property('name').equal("Help")
cmd.should.have.deep.property('args[0]').equal("repository")
cmd.should.have.deep.property('args[1]').equal("two")
cmd.should.have.deep.property('args[2]').equal("up")
cmd.should.have.deep.property('args[3]').equal("to")
cmd.should.have.deep.property('args[4]').equal("five")
it 'should match PR url anywhere in a message', () ->
cmd = Server.dispatch("PR: https://github.com/me/albot/pull/25 https://github.com/you/albot/pull/42")
cmd.should.have.property('name').equal("Pull Requests")
cmd.should.have.deep.property('args[0]').equal("PR: https://github.com/me/albot/pull/25 https://github.com/you/albot/pull/42")
it 'should match an Issue url anywhere in a message', () ->
cmd = Server.dispatch("Issue: https://thieriotandco.atlassian.net/browse/ALB-94")
cmd.should.have.property('name').equal("Issues")
cmd.should.have.deep.property('args[0]').equal("Issue: https://thieriotandco.atlassian.net/browse/ALB-94")
|
[
{
"context": "ime Completions converted from https://github.com/Southclaw/pawn-sublime-language\n# Converter created by Rena",
"end": 118,
"score": 0.9127237200737,
"start": 109,
"tag": "USERNAME",
"value": "Southclaw"
},
{
"context": "hclaw/pawn-sublime-language\n# Converter created... | snippets/SIF.DebugLabels.pwn.cson | Wuzi/language-pawn | 4 | # SIF.DebugLabels.pwn snippets for Atom converted from Sublime Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by Renato "Hii" Garcia
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'DefineDebugLabelType':
'prefix': 'DefineDebugLabelType'
'body': 'DefineDebugLabelType(${1:name[]}, ${2:colour = 0xFFFFFFFF})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'CreateDebugLabel':
'prefix': 'CreateDebugLabel'
'body': 'CreateDebugLabel(${1:type}, ${2:entityid}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:string[] = \"\"}, ${7:worldid = -1}, ${8:interiorid = -1})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'DestroyDebugLabel':
'prefix': 'DestroyDebugLabel'
'body': 'DestroyDebugLabel(${1:labelid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'ShowDebugLabelsForPlayer':
'prefix': 'ShowDebugLabelsForPlayer'
'body': 'ShowDebugLabelsForPlayer(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'ShowAllDebugLabelsForPlayer':
'prefix': 'ShowAllDebugLabelsForPlayer'
'body': 'ShowAllDebugLabelsForPlayer(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'HideDebugLabelsForPlayer':
'prefix': 'HideDebugLabelsForPlayer'
'body': 'HideDebugLabelsForPlayer(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'HideAllDebugLabelsForPlayer':
'prefix': 'HideAllDebugLabelsForPlayer'
'body': 'HideAllDebugLabelsForPlayer(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'UpdateDebugLabelString':
'prefix': 'UpdateDebugLabelString'
'body': 'UpdateDebugLabelString(${1:labelid}, ${2:string[]})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsValidDebugLabel':
'prefix': 'IsValidDebugLabel'
'body': 'IsValidDebugLabel(${1:labelid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'SetDebugLabelPos':
'prefix': 'SetDebugLabelPos'
'body': 'SetDebugLabelPos(${1:labelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsPlayerToggledDebugLabels':
'prefix': 'IsPlayerToggledDebugLabels'
'body': 'IsPlayerToggledDebugLabels(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsPlayerToggledAllDebugLabels':
'prefix': 'IsPlayerToggledAllDebugLabels'
'body': 'IsPlayerToggledAllDebugLabels(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
| 209311 | # SIF.DebugLabels.pwn snippets for Atom converted from Sublime Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by <NAME> "<NAME>
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'DefineDebugLabelType':
'prefix': 'DefineDebugLabelType'
'body': 'DefineDebugLabelType(${1:name[]}, ${2:colour = 0xFFFFFFFF})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'CreateDebugLabel':
'prefix': 'CreateDebugLabel'
'body': 'CreateDebugLabel(${1:type}, ${2:entityid}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:string[] = \"\"}, ${7:worldid = -1}, ${8:interiorid = -1})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'DestroyDebugLabel':
'prefix': 'DestroyDebugLabel'
'body': 'DestroyDebugLabel(${1:labelid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'ShowDebugLabelsForPlayer':
'prefix': 'ShowDebugLabelsForPlayer'
'body': 'ShowDebugLabelsForPlayer(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'ShowAllDebugLabelsForPlayer':
'prefix': 'ShowAllDebugLabelsForPlayer'
'body': 'ShowAllDebugLabelsForPlayer(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'HideDebugLabelsForPlayer':
'prefix': 'HideDebugLabelsForPlayer'
'body': 'HideDebugLabelsForPlayer(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'HideAllDebugLabelsForPlayer':
'prefix': 'HideAllDebugLabelsForPlayer'
'body': 'HideAllDebugLabelsForPlayer(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'UpdateDebugLabelString':
'prefix': 'UpdateDebugLabelString'
'body': 'UpdateDebugLabelString(${1:labelid}, ${2:string[]})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsValidDebugLabel':
'prefix': 'IsValidDebugLabel'
'body': 'IsValidDebugLabel(${1:labelid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'SetDebugLabelPos':
'prefix': 'SetDebugLabelPos'
'body': 'SetDebugLabelPos(${1:labelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsPlayerToggledDebugLabels':
'prefix': 'IsPlayerToggledDebugLabels'
'body': 'IsPlayerToggledDebugLabels(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsPlayerToggledAllDebugLabels':
'prefix': 'IsPlayerToggledAllDebugLabels'
'body': 'IsPlayerToggledAllDebugLabels(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
| true | # SIF.DebugLabels.pwn snippets for Atom converted from Sublime Completions converted from https://github.com/Southclaw/pawn-sublime-language
# Converter created by PI:NAME:<NAME>END_PI "PI:NAME:<NAME>END_PI
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'DefineDebugLabelType':
'prefix': 'DefineDebugLabelType'
'body': 'DefineDebugLabelType(${1:name[]}, ${2:colour = 0xFFFFFFFF})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'CreateDebugLabel':
'prefix': 'CreateDebugLabel'
'body': 'CreateDebugLabel(${1:type}, ${2:entityid}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:string[] = \"\"}, ${7:worldid = -1}, ${8:interiorid = -1})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'DestroyDebugLabel':
'prefix': 'DestroyDebugLabel'
'body': 'DestroyDebugLabel(${1:labelid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'ShowDebugLabelsForPlayer':
'prefix': 'ShowDebugLabelsForPlayer'
'body': 'ShowDebugLabelsForPlayer(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'ShowAllDebugLabelsForPlayer':
'prefix': 'ShowAllDebugLabelsForPlayer'
'body': 'ShowAllDebugLabelsForPlayer(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'HideDebugLabelsForPlayer':
'prefix': 'HideDebugLabelsForPlayer'
'body': 'HideDebugLabelsForPlayer(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'HideAllDebugLabelsForPlayer':
'prefix': 'HideAllDebugLabelsForPlayer'
'body': 'HideAllDebugLabelsForPlayer(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'UpdateDebugLabelString':
'prefix': 'UpdateDebugLabelString'
'body': 'UpdateDebugLabelString(${1:labelid}, ${2:string[]})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsValidDebugLabel':
'prefix': 'IsValidDebugLabel'
'body': 'IsValidDebugLabel(${1:labelid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'SetDebugLabelPos':
'prefix': 'SetDebugLabelPos'
'body': 'SetDebugLabelPos(${1:labelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsPlayerToggledDebugLabels':
'prefix': 'IsPlayerToggledDebugLabels'
'body': 'IsPlayerToggledDebugLabels(${1:playerid}, ${2:type})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
'IsPlayerToggledAllDebugLabels':
'prefix': 'IsPlayerToggledAllDebugLabels'
'body': 'IsPlayerToggledAllDebugLabels(${1:playerid})'
'description': 'Function from: SIF'
'descriptionMoreURL': 'https://github.com/Southclaw/SIF'
|
[
{
"context": "kenRequestParams\n body :\n token : 'someInvalidToken'\n\n request.post checkTokenPostParams, (err, re",
"end": 901,
"score": 0.8218567967414856,
"start": 885,
"tag": "PASSWORD",
"value": "someInvalidToken"
}
] | servers/lib/server/handlers/checktoken.test.coffee | ezgikaysi/koding | 1 | { async
expect
request
generateRandomEmail
generateRandomString } = require '../../../testhelper'
{ generateCheckTokenRequestParams
generateCreateTeamRequestParams } = require '../../../testhelper/handler/teamhelper'
JInvitation = require '../../../models/invitation'
# here we have actual tests
runTests = -> describe 'server.handlers.checktoken', ->
it 'should send HTTP 400 if token is not set', (done) ->
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : ''
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'token is required'
done()
it 'should send HTTP 404 if token is invalid', (done) ->
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : 'someInvalidToken'
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
expect(body).to.be.equal 'invitation not found'
done()
it 'should send HTTP 200 if token is valid', (done) ->
token = ''
inviteeEmail = generateRandomEmail()
queue = [
(next) ->
options = { body : { invitees : inviteeEmail } }
generateCreateTeamRequestParams options, (createTeamRequestParams) ->
# expecting HTTP 200 status code
request.post createTeamRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting invitation to be created
params = { email : inviteeEmail }
JInvitation.one params, (err, invitation) ->
expect(err).to.not.exist
expect(invitation).to.exist
expect(invitation.code).to.exist
expect(invitation.email).to.be.equal inviteeEmail
expect(invitation.status).to.be.equal 'pending'
# saving user invitation token
token = invitation.code
next()
(next) ->
# expecting newly created invitation token to be validated
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : token
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
]
async.series queue, done
runTests()
| 76305 | { async
expect
request
generateRandomEmail
generateRandomString } = require '../../../testhelper'
{ generateCheckTokenRequestParams
generateCreateTeamRequestParams } = require '../../../testhelper/handler/teamhelper'
JInvitation = require '../../../models/invitation'
# here we have actual tests
runTests = -> describe 'server.handlers.checktoken', ->
it 'should send HTTP 400 if token is not set', (done) ->
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : ''
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'token is required'
done()
it 'should send HTTP 404 if token is invalid', (done) ->
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : '<PASSWORD>'
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
expect(body).to.be.equal 'invitation not found'
done()
it 'should send HTTP 200 if token is valid', (done) ->
token = ''
inviteeEmail = generateRandomEmail()
queue = [
(next) ->
options = { body : { invitees : inviteeEmail } }
generateCreateTeamRequestParams options, (createTeamRequestParams) ->
# expecting HTTP 200 status code
request.post createTeamRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting invitation to be created
params = { email : inviteeEmail }
JInvitation.one params, (err, invitation) ->
expect(err).to.not.exist
expect(invitation).to.exist
expect(invitation.code).to.exist
expect(invitation.email).to.be.equal inviteeEmail
expect(invitation.status).to.be.equal 'pending'
# saving user invitation token
token = invitation.code
next()
(next) ->
# expecting newly created invitation token to be validated
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : token
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
]
async.series queue, done
runTests()
| true | { async
expect
request
generateRandomEmail
generateRandomString } = require '../../../testhelper'
{ generateCheckTokenRequestParams
generateCreateTeamRequestParams } = require '../../../testhelper/handler/teamhelper'
JInvitation = require '../../../models/invitation'
# here we have actual tests
runTests = -> describe 'server.handlers.checktoken', ->
it 'should send HTTP 400 if token is not set', (done) ->
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : ''
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 400
expect(body).to.be.equal 'token is required'
done()
it 'should send HTTP 404 if token is invalid', (done) ->
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : 'PI:PASSWORD:<PASSWORD>END_PI'
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 404
expect(body).to.be.equal 'invitation not found'
done()
it 'should send HTTP 200 if token is valid', (done) ->
token = ''
inviteeEmail = generateRandomEmail()
queue = [
(next) ->
options = { body : { invitees : inviteeEmail } }
generateCreateTeamRequestParams options, (createTeamRequestParams) ->
# expecting HTTP 200 status code
request.post createTeamRequestParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
(next) ->
# expecting invitation to be created
params = { email : inviteeEmail }
JInvitation.one params, (err, invitation) ->
expect(err).to.not.exist
expect(invitation).to.exist
expect(invitation.code).to.exist
expect(invitation.email).to.be.equal inviteeEmail
expect(invitation.status).to.be.equal 'pending'
# saving user invitation token
token = invitation.code
next()
(next) ->
# expecting newly created invitation token to be validated
checkTokenPostParams = generateCheckTokenRequestParams
body :
token : token
request.post checkTokenPostParams, (err, res, body) ->
expect(err).to.not.exist
expect(res.statusCode).to.be.equal 200
next()
]
async.series queue, done
runTests()
|
[
{
"context": "entification = email\n data.credential = password\n data.recaptcha_response_field = recap",
"end": 4125,
"score": 0.6948018670082092,
"start": 4117,
"tag": "PASSWORD",
"value": "password"
}
] | models/auth/auth.coffee | signonsridhar/sridhar_hbs | 0 | define(['bases/model',
'models/user/user',
'models/phone_number/phone_number',
'env!dev:models/auth/fixture/authenticate',
'env!dev:models/auth/fixture/validateaccesskey',
'env!dev:models/auth/fixture/validateemailaccesskey',
'env!dev:models/auth/fixture/keepalive',
'env!dev:models/auth/fixture/forgot_password',
'env!dev:models/auth/fixture/resend_activation',
'env!dev:models/auth/fixture/get_security_questions',
'env!dev:models/auth/fixture/validate_security_questions',
'env!dev:models/phone_number/fixture/get_conference_numbers'
], (BaseModel, User, PhoneNumber)->
Auth = BaseModel.extend({
pages:[
{main:'config'},
{main:'account'},
{main:'group_settings'},
{main:'wizard'},
{main:'settings'}
],
check_page_needs_auth: (param_page)->
for page in this.pages
return true if page.main == param_page.main
return false
auth:null,
get_auth:()->
this.auth = new Auth() if not this.auth?
this.auth
check:(page = null)->
result = $.Deferred()
#return result.resolve()
#we don't need to check the auth of this page
if not this.check_page_needs_auth(page)
return result.resolve()
auth = this.get_auth()
auth.collect_access_info_from_route()
#1. grab access key
access_key = auth.get_access_key()
#alert(access_key)
user_id = auth.get_user_id()
if access_key? #2.1 if we have accessKey, check whether it's still valid
auth.validate_access_key().then((expiry_status)->
result.reject() if not expiry_status
).then(()->
User.get_authenticated_user(auth).then((authenticated_user)->
auth.attr('auth_user', authenticated_user)
authenticated_user
)
).then((auth_user)->
auth.attr('logged_in', true)
auth.extend_access_key()
auth.attr('tenant_id', auth_user.attr('tenant_id'))
auth.attr('account_id', auth_user.attr('account_id'))
auth.attr('tenant_name', auth_user.attr('tenant_name'))
result.resolve(auth_user)
).then((auth_user)->
PhoneNumber.get_conference_numbers({partnerid: auth_user.attr('partner_id')}).then((conf_numbers)->
auth.attr('conference_numbers', conf_numbers )
)
)
else #2.2 if we don't then..okay.
result.reject()
#result.resolve()
result
forgot_password:(email)->
$.post('/bss/authentication?action=forgotpassword',
JSON.stringify({"identification": email, "partner_code": "tmus"})
)
validate_email_access_key:(access_key)->
$.get("/bss/authentication?action=validateemailaccesskey&accesskeyid=#{access_key}").then( (response)->
return response.data
)
}, {
init:()->
collect_access_info_from_route:()->
check_access_info_attrs = ['accesskey', 'userid', 'expires']
result = {}
can.batch.start()
for access_attr in check_access_info_attrs
result[access_attr] = can.route.attr(access_attr) #check url
can.route.attr(access_attr, null) if result[access_attr]? #if it exists, remove it from url
this.set_access_info(result) if _.chain(result).values().compact().value().length
can.batch.stop()
result
login_attempt:(email, password, recaptcha_response_field, recaptcha_challenge_field)->
result = $.Deferred()
data = {}
data.identification = email
data.credential = password
data.recaptcha_response_field = recaptcha_response_field
data.recaptcha_challenge_field = recaptcha_challenge_field
data.partner_code = 'tmus'
$.post("/bss/authentication?action=authenticate",
JSON.stringify(data)
).done((response)->
data = response.data
if response.code == 100
result.resolve(data)
else
result.reject()
).fail((response)->
result.reject(response)
)
result
logout:()->
$.get("/bss/authentication?action=signout&accesskeyid=#{this.get_access_key()}", ()=>
this.logged_out()
)
###
logged in, record the access info and do the redirect to the last visited page.
###
logged_in: (access_info, redirect = true)->
this.set_access_info(access_info)
history = localStorage.getItem('history')
localStorage.removeItem('history')
this.attr('logged_in', true)
if redirect and not history? or window.location.hash != can.route.url({main:'auth', sub:'login'})
window.location = can.route.url({main:'config'})
else if redirect and history
window.location = history
###
log user out, remove the access info, record the last visited place
###
logged_out:()->
localStorage.setItem('history', window.location)
this.set_access_info(null)
return if window.location.hash == can.route.url({main:'auth', sub:'login'})
window.location = can.route.url({main:'auth', sub:'login'})
window.location.reload()
###
record the access info, parses, normalizes the attributes(userid->user_id) and caches it.
This function is the backbone to getting attributes such as access_key, expires
###
set_access_info:(access_info)->
if access_info == null
this.access_info_cache = result
else
normalize_keys = [
['userid', 'user_id'],
['accesskey', 'access_key'],
['expires', 'expires']
]
result = {}
for normalize_key in normalize_keys
backend_attr = normalize_key[0]
our_attr = normalize_key[1]
result[normalize_key[1]] = if access_info[backend_attr]? then access_info[backend_attr] else access_info[our_attr]
this.access_info_cache = result
result = JSON.stringify(result)
localStorage.setItem('access_info', result)
###
get and cache the access info
###
get_access_info:()->
if not this.access_info_cache?
raw_access_info = localStorage.getItem('access_info')
this.access_info_cache = if raw_access_info != 'undefined' then JSON.parse(raw_access_info) else {}
this.access_info_cache = {} if not this.access_info_cache?
this.access_info_cache
get_access_key:()->
this.get_access_info().access_key
get_expiry:()->
return Date.parse(this.get_access_info().expires)
set_expiry:(new_expiry)->
access_info = this.get_access_info()
access_info.expires = new_expiry.toUTCString()
this.set_access_info(access_info)
get_user:()->
this.attr('auth_user')
get_user_id:()->
this.get_access_info().user_id
###
extends the access_key validity, promises a date object which is the new expiry
###
extend_access_key:()->
access_key = this.get_access_key()
$.get("/bss/authentication?action=keepalive&accesskeyid=#{access_key}").then((response)=>
new Date(Date.parse(response.data))
).done( (new_expiry_date)=>
this.set_expiry(new_expiry_date)
)
###
validates whether the access key is still valid or not
###
validate_access_key:()->
access_key = this.get_access_key()
$.get("/bss/authentication?action=validateaccesskey&accesskeyid=#{access_key}").then( (response)->
return response.data
)
transform_security_questions_from:(raw_questions)->
result = {}
result[raw_question.securityQuestionId] = raw_question.securityQuestion for raw_question in raw_questions
return result
transform_security_questions_to:(questions, value_type = 'securityAnswer')->
result = []
for id, value of questions
q = {}
q.securityQuestionId = id
q[value_type] = value
result.push(q)
return result
get_security_questions:(email)->
if email
url = "/bss/user?action=getusersecurityquestion&identification=#{email}"
else
url = "/bss/system?action=getsecurityquestions"
$.get(url).then( (response)=>
return this.transform_security_questions_from(response.data.securityQuestions)
)
validate_security_questions: (email, questions)->
data = {
security_questions: this.transform_security_questions_to(questions),
identification: email
}
$.post('/bss/user?action=validateusersecurityquestion', JSON.stringify(data))
})
window.auth = Auth.get_auth()
return Auth
) | 117075 | define(['bases/model',
'models/user/user',
'models/phone_number/phone_number',
'env!dev:models/auth/fixture/authenticate',
'env!dev:models/auth/fixture/validateaccesskey',
'env!dev:models/auth/fixture/validateemailaccesskey',
'env!dev:models/auth/fixture/keepalive',
'env!dev:models/auth/fixture/forgot_password',
'env!dev:models/auth/fixture/resend_activation',
'env!dev:models/auth/fixture/get_security_questions',
'env!dev:models/auth/fixture/validate_security_questions',
'env!dev:models/phone_number/fixture/get_conference_numbers'
], (BaseModel, User, PhoneNumber)->
Auth = BaseModel.extend({
pages:[
{main:'config'},
{main:'account'},
{main:'group_settings'},
{main:'wizard'},
{main:'settings'}
],
check_page_needs_auth: (param_page)->
for page in this.pages
return true if page.main == param_page.main
return false
auth:null,
get_auth:()->
this.auth = new Auth() if not this.auth?
this.auth
check:(page = null)->
result = $.Deferred()
#return result.resolve()
#we don't need to check the auth of this page
if not this.check_page_needs_auth(page)
return result.resolve()
auth = this.get_auth()
auth.collect_access_info_from_route()
#1. grab access key
access_key = auth.get_access_key()
#alert(access_key)
user_id = auth.get_user_id()
if access_key? #2.1 if we have accessKey, check whether it's still valid
auth.validate_access_key().then((expiry_status)->
result.reject() if not expiry_status
).then(()->
User.get_authenticated_user(auth).then((authenticated_user)->
auth.attr('auth_user', authenticated_user)
authenticated_user
)
).then((auth_user)->
auth.attr('logged_in', true)
auth.extend_access_key()
auth.attr('tenant_id', auth_user.attr('tenant_id'))
auth.attr('account_id', auth_user.attr('account_id'))
auth.attr('tenant_name', auth_user.attr('tenant_name'))
result.resolve(auth_user)
).then((auth_user)->
PhoneNumber.get_conference_numbers({partnerid: auth_user.attr('partner_id')}).then((conf_numbers)->
auth.attr('conference_numbers', conf_numbers )
)
)
else #2.2 if we don't then..okay.
result.reject()
#result.resolve()
result
forgot_password:(email)->
$.post('/bss/authentication?action=forgotpassword',
JSON.stringify({"identification": email, "partner_code": "tmus"})
)
validate_email_access_key:(access_key)->
$.get("/bss/authentication?action=validateemailaccesskey&accesskeyid=#{access_key}").then( (response)->
return response.data
)
}, {
init:()->
collect_access_info_from_route:()->
check_access_info_attrs = ['accesskey', 'userid', 'expires']
result = {}
can.batch.start()
for access_attr in check_access_info_attrs
result[access_attr] = can.route.attr(access_attr) #check url
can.route.attr(access_attr, null) if result[access_attr]? #if it exists, remove it from url
this.set_access_info(result) if _.chain(result).values().compact().value().length
can.batch.stop()
result
login_attempt:(email, password, recaptcha_response_field, recaptcha_challenge_field)->
result = $.Deferred()
data = {}
data.identification = email
data.credential = <PASSWORD>
data.recaptcha_response_field = recaptcha_response_field
data.recaptcha_challenge_field = recaptcha_challenge_field
data.partner_code = 'tmus'
$.post("/bss/authentication?action=authenticate",
JSON.stringify(data)
).done((response)->
data = response.data
if response.code == 100
result.resolve(data)
else
result.reject()
).fail((response)->
result.reject(response)
)
result
logout:()->
$.get("/bss/authentication?action=signout&accesskeyid=#{this.get_access_key()}", ()=>
this.logged_out()
)
###
logged in, record the access info and do the redirect to the last visited page.
###
logged_in: (access_info, redirect = true)->
this.set_access_info(access_info)
history = localStorage.getItem('history')
localStorage.removeItem('history')
this.attr('logged_in', true)
if redirect and not history? or window.location.hash != can.route.url({main:'auth', sub:'login'})
window.location = can.route.url({main:'config'})
else if redirect and history
window.location = history
###
log user out, remove the access info, record the last visited place
###
logged_out:()->
localStorage.setItem('history', window.location)
this.set_access_info(null)
return if window.location.hash == can.route.url({main:'auth', sub:'login'})
window.location = can.route.url({main:'auth', sub:'login'})
window.location.reload()
###
record the access info, parses, normalizes the attributes(userid->user_id) and caches it.
This function is the backbone to getting attributes such as access_key, expires
###
set_access_info:(access_info)->
if access_info == null
this.access_info_cache = result
else
normalize_keys = [
['userid', 'user_id'],
['accesskey', 'access_key'],
['expires', 'expires']
]
result = {}
for normalize_key in normalize_keys
backend_attr = normalize_key[0]
our_attr = normalize_key[1]
result[normalize_key[1]] = if access_info[backend_attr]? then access_info[backend_attr] else access_info[our_attr]
this.access_info_cache = result
result = JSON.stringify(result)
localStorage.setItem('access_info', result)
###
get and cache the access info
###
get_access_info:()->
if not this.access_info_cache?
raw_access_info = localStorage.getItem('access_info')
this.access_info_cache = if raw_access_info != 'undefined' then JSON.parse(raw_access_info) else {}
this.access_info_cache = {} if not this.access_info_cache?
this.access_info_cache
get_access_key:()->
this.get_access_info().access_key
get_expiry:()->
return Date.parse(this.get_access_info().expires)
set_expiry:(new_expiry)->
access_info = this.get_access_info()
access_info.expires = new_expiry.toUTCString()
this.set_access_info(access_info)
get_user:()->
this.attr('auth_user')
get_user_id:()->
this.get_access_info().user_id
###
extends the access_key validity, promises a date object which is the new expiry
###
extend_access_key:()->
access_key = this.get_access_key()
$.get("/bss/authentication?action=keepalive&accesskeyid=#{access_key}").then((response)=>
new Date(Date.parse(response.data))
).done( (new_expiry_date)=>
this.set_expiry(new_expiry_date)
)
###
validates whether the access key is still valid or not
###
validate_access_key:()->
access_key = this.get_access_key()
$.get("/bss/authentication?action=validateaccesskey&accesskeyid=#{access_key}").then( (response)->
return response.data
)
transform_security_questions_from:(raw_questions)->
result = {}
result[raw_question.securityQuestionId] = raw_question.securityQuestion for raw_question in raw_questions
return result
transform_security_questions_to:(questions, value_type = 'securityAnswer')->
result = []
for id, value of questions
q = {}
q.securityQuestionId = id
q[value_type] = value
result.push(q)
return result
get_security_questions:(email)->
if email
url = "/bss/user?action=getusersecurityquestion&identification=#{email}"
else
url = "/bss/system?action=getsecurityquestions"
$.get(url).then( (response)=>
return this.transform_security_questions_from(response.data.securityQuestions)
)
validate_security_questions: (email, questions)->
data = {
security_questions: this.transform_security_questions_to(questions),
identification: email
}
$.post('/bss/user?action=validateusersecurityquestion', JSON.stringify(data))
})
window.auth = Auth.get_auth()
return Auth
) | true | define(['bases/model',
'models/user/user',
'models/phone_number/phone_number',
'env!dev:models/auth/fixture/authenticate',
'env!dev:models/auth/fixture/validateaccesskey',
'env!dev:models/auth/fixture/validateemailaccesskey',
'env!dev:models/auth/fixture/keepalive',
'env!dev:models/auth/fixture/forgot_password',
'env!dev:models/auth/fixture/resend_activation',
'env!dev:models/auth/fixture/get_security_questions',
'env!dev:models/auth/fixture/validate_security_questions',
'env!dev:models/phone_number/fixture/get_conference_numbers'
], (BaseModel, User, PhoneNumber)->
Auth = BaseModel.extend({
pages:[
{main:'config'},
{main:'account'},
{main:'group_settings'},
{main:'wizard'},
{main:'settings'}
],
check_page_needs_auth: (param_page)->
for page in this.pages
return true if page.main == param_page.main
return false
auth:null,
get_auth:()->
this.auth = new Auth() if not this.auth?
this.auth
check:(page = null)->
result = $.Deferred()
#return result.resolve()
#we don't need to check the auth of this page
if not this.check_page_needs_auth(page)
return result.resolve()
auth = this.get_auth()
auth.collect_access_info_from_route()
#1. grab access key
access_key = auth.get_access_key()
#alert(access_key)
user_id = auth.get_user_id()
if access_key? #2.1 if we have accessKey, check whether it's still valid
auth.validate_access_key().then((expiry_status)->
result.reject() if not expiry_status
).then(()->
User.get_authenticated_user(auth).then((authenticated_user)->
auth.attr('auth_user', authenticated_user)
authenticated_user
)
).then((auth_user)->
auth.attr('logged_in', true)
auth.extend_access_key()
auth.attr('tenant_id', auth_user.attr('tenant_id'))
auth.attr('account_id', auth_user.attr('account_id'))
auth.attr('tenant_name', auth_user.attr('tenant_name'))
result.resolve(auth_user)
).then((auth_user)->
PhoneNumber.get_conference_numbers({partnerid: auth_user.attr('partner_id')}).then((conf_numbers)->
auth.attr('conference_numbers', conf_numbers )
)
)
else #2.2 if we don't then..okay.
result.reject()
#result.resolve()
result
forgot_password:(email)->
$.post('/bss/authentication?action=forgotpassword',
JSON.stringify({"identification": email, "partner_code": "tmus"})
)
validate_email_access_key:(access_key)->
$.get("/bss/authentication?action=validateemailaccesskey&accesskeyid=#{access_key}").then( (response)->
return response.data
)
}, {
init:()->
collect_access_info_from_route:()->
check_access_info_attrs = ['accesskey', 'userid', 'expires']
result = {}
can.batch.start()
for access_attr in check_access_info_attrs
result[access_attr] = can.route.attr(access_attr) #check url
can.route.attr(access_attr, null) if result[access_attr]? #if it exists, remove it from url
this.set_access_info(result) if _.chain(result).values().compact().value().length
can.batch.stop()
result
login_attempt:(email, password, recaptcha_response_field, recaptcha_challenge_field)->
result = $.Deferred()
data = {}
data.identification = email
data.credential = PI:PASSWORD:<PASSWORD>END_PI
data.recaptcha_response_field = recaptcha_response_field
data.recaptcha_challenge_field = recaptcha_challenge_field
data.partner_code = 'tmus'
$.post("/bss/authentication?action=authenticate",
JSON.stringify(data)
).done((response)->
data = response.data
if response.code == 100
result.resolve(data)
else
result.reject()
).fail((response)->
result.reject(response)
)
result
logout:()->
$.get("/bss/authentication?action=signout&accesskeyid=#{this.get_access_key()}", ()=>
this.logged_out()
)
###
logged in, record the access info and do the redirect to the last visited page.
###
logged_in: (access_info, redirect = true)->
this.set_access_info(access_info)
history = localStorage.getItem('history')
localStorage.removeItem('history')
this.attr('logged_in', true)
if redirect and not history? or window.location.hash != can.route.url({main:'auth', sub:'login'})
window.location = can.route.url({main:'config'})
else if redirect and history
window.location = history
###
log user out, remove the access info, record the last visited place
###
logged_out:()->
localStorage.setItem('history', window.location)
this.set_access_info(null)
return if window.location.hash == can.route.url({main:'auth', sub:'login'})
window.location = can.route.url({main:'auth', sub:'login'})
window.location.reload()
###
record the access info, parses, normalizes the attributes(userid->user_id) and caches it.
This function is the backbone to getting attributes such as access_key, expires
###
set_access_info:(access_info)->
if access_info == null
this.access_info_cache = result
else
normalize_keys = [
['userid', 'user_id'],
['accesskey', 'access_key'],
['expires', 'expires']
]
result = {}
for normalize_key in normalize_keys
backend_attr = normalize_key[0]
our_attr = normalize_key[1]
result[normalize_key[1]] = if access_info[backend_attr]? then access_info[backend_attr] else access_info[our_attr]
this.access_info_cache = result
result = JSON.stringify(result)
localStorage.setItem('access_info', result)
###
get and cache the access info
###
get_access_info:()->
if not this.access_info_cache?
raw_access_info = localStorage.getItem('access_info')
this.access_info_cache = if raw_access_info != 'undefined' then JSON.parse(raw_access_info) else {}
this.access_info_cache = {} if not this.access_info_cache?
this.access_info_cache
get_access_key:()->
this.get_access_info().access_key
get_expiry:()->
return Date.parse(this.get_access_info().expires)
set_expiry:(new_expiry)->
access_info = this.get_access_info()
access_info.expires = new_expiry.toUTCString()
this.set_access_info(access_info)
get_user:()->
this.attr('auth_user')
get_user_id:()->
this.get_access_info().user_id
###
extends the access_key validity, promises a date object which is the new expiry
###
extend_access_key:()->
access_key = this.get_access_key()
$.get("/bss/authentication?action=keepalive&accesskeyid=#{access_key}").then((response)=>
new Date(Date.parse(response.data))
).done( (new_expiry_date)=>
this.set_expiry(new_expiry_date)
)
###
validates whether the access key is still valid or not
###
validate_access_key:()->
access_key = this.get_access_key()
$.get("/bss/authentication?action=validateaccesskey&accesskeyid=#{access_key}").then( (response)->
return response.data
)
transform_security_questions_from:(raw_questions)->
result = {}
result[raw_question.securityQuestionId] = raw_question.securityQuestion for raw_question in raw_questions
return result
transform_security_questions_to:(questions, value_type = 'securityAnswer')->
result = []
for id, value of questions
q = {}
q.securityQuestionId = id
q[value_type] = value
result.push(q)
return result
get_security_questions:(email)->
if email
url = "/bss/user?action=getusersecurityquestion&identification=#{email}"
else
url = "/bss/system?action=getsecurityquestions"
$.get(url).then( (response)=>
return this.transform_security_questions_from(response.data.securityQuestions)
)
validate_security_questions: (email, questions)->
data = {
security_questions: this.transform_security_questions_to(questions),
identification: email
}
$.post('/bss/user?action=validateusersecurityquestion', JSON.stringify(data))
})
window.auth = Auth.get_auth()
return Auth
) |
[
{
"context": "# (c) Wilson Woodcock Berkow. MIT License.\n\n# parser0.6.coffee\n\n\"use strict\"\nu",
"end": 28,
"score": 0.9998385906219482,
"start": 6,
"tag": "NAME",
"value": "Wilson Woodcock Berkow"
}
] | makeParser.coffee | WilsonBerkow/makeParser-coffee | 0 | # (c) Wilson Woodcock Berkow. MIT License.
# parser0.6.coffee
"use strict"
util = Object.freeze # This is frozen as it is publicly added to makeParser.
"y": (le) -> # Y-Combinator
((f) ->
f(f)
) (f) ->
le (args...) ->
f(f).call(this, args...)
"loop": (f) -> util.y(f)() # For functional/expressional loops. Interface: util.loop (repeat) => (i = 0, data, ...) => ...
"makeObject": (proto, ownProps) ->
result = Object.create proto
for prop, val of ownProps
if ownProps.hasOwnProperty prop
result[prop] = val
result
"typeOf": (x) ->
if Array.isArray(x) then "array"
else if Object.prototype.toString.call(x) is "[object RegExp]" then "regexp"
else if x is null then "null"
else `typeof x`
cons = (x, xs) -> [x, xs...]
head = (xs) -> xs[0]
tail = (xs) -> xs.slice(1)
last = (xs) -> xs[xs.length - 1]
empty = (l) -> l.length is 0
concat = (xs, ys) -> xs.concat(ys)
parsingError = (msg) -> {"name": "Parsing Error", "msg": msg}
isParsingError = (err) -> err?.name is "Parsing Error"
makeParsingInstance = do -> # The parsing instance contains methods for parsing, and the information for parsing (e.g. source being parsed and index/position in it).
isParser = (x) -> util.typeOf(x) is "function" and x.isParser
makeSureParser = (x) -> if isParser(x) then x else makeParser(x) # This must be used here to prevent infinite recursion between @require and makeParser, but does not need to be used anywhere else.
parserUsage = (which) -> # This is the abstraction for @optional and @require
(a, b, c) ->
# Handle all arrangements of arguments:
# (The arrangements are (lookFor, found, notFound)
# (lookFor, {found, notFound, dontAdvance, args})
# ({lookFor, found, notFound, dontAdvance, args}))
if util.typeOf(a) in ["number", "null", "undefined"]
@throw "Invalid argument to @#{which}.\n\nJSON.stringify(the argument) is #{JSON.stringify a}.\n\n\nString(the argument) is #{a}"
if util.typeOf(a) is "object"
prsr = makeSureParser a.lookFor
other = a
else if util.typeOf(b) is "object"
prsr = makeSureParser a
other = b
else
prsr = makeSureParser a
other = {
found: b
notFound: c
args: []
}
if which is "optional"
other.notFound ?= -> # This makes nothing happen if the thing isnt found.
prsr.requireFor(@, other)
proto = Object.freeze
# FIRST ARE THE UTIL-LIKE FUNCTIONS:
"loop": util.loop
"beginsWith": (s) -> # TODO: test @test so that it can be used instead.
if util.typeOf(@str.startsWith) is "function"
@str.slice(@index).startsWith(s)
else
@loop (repeat) => (i = @index) =>
if @str.slice(0, i + 1) is s
true
else
repeat(i + 1)
"char": (x = 0) -> @str[@index + x] # Gets the current char, or one before/after it depending on -x-.
"soFar": -> @str[@startIndex...@index]
"until": (x) -> if util.typeOf(x) is "string" then @str[@index...@str.indexOf(x, @index)] else @str[@index...(@index + x)]
# NOW THE PARSING FUNCTIONS:
"test": (prsr) ->
!!(prsr.opt()(@str, startAt: @index))
"advance": (x) ->
@index += switch util.typeOf x
when "string" then x.length
when "number" then x
when "undefined" then 1
"advancePast": (s) -> # Consider removing this. I have never had a need for it and do not see one.
while not @beginsWith(s)
@advance()
@advance(s)
"throw": (msg) ->
throw parsingError msg
"reqBool": (bool, err) ->
if !bool
throw parsingError(err ? "Test to @reqBool failed.")
"caseParse": (o, onNoMatch) -> # Each key of -o- is a string to optionally parse.
@loop (repeat) => (keys = Object.keys o) =>
if empty(keys)
if onNoMatch then onNoMatch()
else @throw("Expected one of the following: " + JSON.stringify(Object.keys(o), null, 4))
else
fn = o[head(keys)]
@optional head(keys),
found: => if fn.isParser then @require fn else fn() # TODO: Should it always do @require(fn), never just fn()?
notFound: => repeat tail keys
"white": (req) ->
if req is "req"
@require makeParser.getWhite
else
@optional makeParser.getWhite
"optional": parserUsage("optional")
"require": parserUsage("require")
"end": ->
if @char() isnt undefined
throw parsingError("Expected end of input, instead found '#{@char()}'")
(str, i = 0) ->
if util.typeOf(str) isnt "string"
throw new Error "First argument to a parser must be a string. Instead,\n#{str}\nof type #{util.typeOf(str)} was found."
util.makeObject(proto, {
"index": i
"str": str
"startIndex": i
})
parserListToString = (arr) -> # For stringifying an array of parser options as to display the name of each parser.
util.loop (repeat) => (options = arr, str = "[") =>
if empty options
str.slice(0, -2) + "]" # The slice gets rid of the ", " at the end.
else
x = head options
switch util.typeOf(x)
when "function"
if x.isParser
repeat(tail(options), str + "#{x.parserName}, ")
else
repeat(tail(options), str + "(not-yet-named fn parser), ")
when "regexp"
repeat(tail(options), str + x.toString() + ", ")
when "undefined"
repeat(tail(options), str + "(undefined), ")
else
repeat(tail(options), str + JSON.stringify(x) + ", ")
@makeParser = (args...) ->
# Handle args combos:
if args.length > 1
name = args[0]
x = args[1]
else if args.length is 1
x = args[0]
else
throw new Error("makeParser requires arguments")
### Overloads of x in -makeParser-:
string: x is the string to be required
array: x is a list of options, where one must match. Earlier elements have higher precedence.
regexp: if the string doesn't match x, it throws, otherwise x.exec(@str) is skipped returned.
parser-function: x is just returned
plain function: x is used to make a parser-function. (Returns a NEW function that is a parser
function which USES the input function by applying it to an instance. It also
sets some methods (requireFor, ...) and sets .isParser to true).
###
# Handles all overloads:
if util.typeOf(x) is "string" # This is the simplest overload: just require it (and advance past it). RETURNS the string.
makeParser (name ? "string-form-parser: #{JSON.stringify x}"), ->
if @beginsWith x
@advance x.length # (Can't do @require(x) here because x is a string and this is the def of @require(x) for strings).
@soFar()
else
@throw """Expected "#{x}" at index #{@index} and instead found #{@char()} in string:\n#{JSON.stringify(@str)}"""
else if util.typeOf(x) is "array" # Each element of the array is an OPTION, and this requires one to match. RETURNS whatever the matched option returns.
makeParser (name ? "array-form-parser: #{parserListToString x}"), (args) ->
errors = []
@loop (repeat) => (i = 0, errors = []) =>
if i >= x.length
@throw "(From #{name}) Expected one of the following: #{parserListToString(x)} in string:\n#{@str}\nat index #{@index}. Errors were #{JSON.stringify errors, null, 4}"
@optional x[i],
args: args
notFound: (err) ->
repeat(i + 1, errors.concat([err]))
else if util.typeOf(x) is "regexp"
makeParser (name ? "regexp-form-parser: #{x}"), ->
val = x.exec @str[@index...]
if val is null
@throw "Expected the regexp pattern " + x.toString() + " in string ``#{@str}'' at index #{@index}"
else
@require val[0]
else if util.typeOf(x) is "function" # This is the primary form, which every other overload is defined in terms of.
if x.isParser
makeParser (name ? "copy of: #{x.parserName}"), ->
@require x
else # This is the usual case. -x- is a function intended to be made into a parser.
### The parser (in the variable -parser-), can have the following arrangments of arguments:
1. "string"
2. ({string, startAt, args, found, notFound})
3. (string, {startAt, args, found, notFound})
4. (string, startAt, {args, found, notFound})
5. (string, startAt, args, {found, notFound})
6. (string, startAt, args, found, notFound)
###
useParserAbstraction = (callback) ->
(args...) ->
if args.length is 1
if util.typeOf(args[0]) is "string"
str = args[0]
else
str = args[0].lookFor
startAt = args[0].startAt
other = args[0]
else if args.length is 2
str = args[0]
if util.typeOf(args[1]) is "object"
startAt = args[1].startAt
other = args[1]
else
startAt = args[1]
else if args.length is 3
str = args[0]
startAt = args[1]
other = args[2]
callback(str, startAt, other)
parser = useParserAbstraction (str, startAt, other) ->
parser.requireFor(makeParsingInstance(str, startAt), other)
parser.wholeStr = useParserAbstraction (str, startAt, other) ->
parsingInstance = makeParsingInstance(str, startAt)
result = parser.requireFor(parsingInstance, other)
if parsingInstance.index isnt parsingInstance.str.length
throw parsingError("Expected end of string index #{parsingInstance.index}.")
else
result
parser.requireFor = (origInstance, other = {}) ->
if util.typeOf(other.notFound) is "function"
# This branch is effectively the @optional function.
instance = makeParsingInstance(origInstance.str, origInstance.index)
try
val = x.call(instance, other.args)
catch e
if isParsingError e
err = e
else
throw e
if err
other.notFound(err)
else
origInstance.index = instance.index unless other.dontAdvance # This is what synchronizes the referenced instance with the one it's used in, so that @require()ing another function also advances @index in the current instance.
if other.found
other.found(val)
else
val
else
instance = makeParsingInstance(origInstance.str, origInstance.index)
val = x.call(instance, other.args)
if not other.dontAdvance
origInstance.index = instance.index
if other.found
other.found(val)
else
val
parser.makeNamed = (name) ->
newPrsr = makeParser name, (args) ->
@require parser, args: args
parser.return = (fn) ->
makeParser parser.parserName, (args) ->
@require parser, args: args, found: fn
parser.then = (x) -> # Note: This returns the result of the LAST parser (e.g. in getFoo.then(getBar).then(getBaz)), while makeParser.seq returns a list of the results.
makeParser ->
@require parser
@require x
parser.opt = ->
makeParser (parser.parserName + "--opt"), ->
@optional parser
parser.isParser = true
parser.parserName = name ? "a parser"
Object.freeze parser
else
throw new Error("The -makeParser- function requires argument(s).")
makeParser.util = util
makeParser.seq = (xs...) ->
makeParser ->
findings = []
@loop (repeat) => (i = 0) =>
if i < xs.length
findings.push(@require xs[i])
repeat(i + 1)
else
findings.src = @soFar()
findings
makeParser.many = (x, other) -> # TODO: test other.amt. TODO: CHANGE THE ARGS PROPERTY: make it take an ARRAY of args, not an object of args.
if not x?
throw new Error "Invalid first argument to makeParser.many: #{x}"
if util.typeOf(other.amt) is "number" # TODO: Consider putting this case into another function, like -makeParser.several-, so it is very clear which is being used.
parseInner = makeParser (args = {}) ->
args.amtLeft ?= other?.amt
if args.amtLeft <= 0
[]
else
first = @require x
rest = []
if args.amtLeft is 1
[first]
else
@optional ->
if other?.sep
@require other?.sep
rest = @require parseInner,
args:
amtLeft: args.amtLeft - 1
cons first, rest
makeParser ->
if other?.start
@require other.start
findings = @require parseInner
if other?.end
@require other.end
findings
else
parseInner = makeParser -> # This parses it without the start- and end-sequence so that it can have a simple recursive structure.
first = @require x
rest = []
@optional ->
if other?.sep
@require other?.sep
rest = @require parseInner
cons first, rest
makeParser ->
if other?.start
@require other.start
findings = if other?.allowEmpty then @optional(parseInner) else @require(parseInner)
if other?.end
@require other.end
findings ? [] # When elements are optional and not there, findings will be `undefined` as it is returned from the @optional(parserInner) invoc above.
makeParser.cases = (a, b) ->
if util.typeOf(a) is "string"
name = a
o = b
else
o = a
makeParser name, ->
@caseParse(o)
makeParser.getWhite = makeParser "getWhite", /^[\s]+/
makeParser.jsonParsers = do -> # TODO: REALLY TEST ALL OF THESE INDIVIDUALLY AND CAREFULLY (I just (6/20/14) fixed a stupid, glaring would-be reference error). Also, CHANGE THE NAME. Its too long. make it makeParser.json
getString = makeParser "getString", (args) ->
@require '"'
@loop (repeat) => =>
if @char() is undefined
@throw "Unterminated JSON string literal"
if @char() is '"' and (@char(-1) isnt "\\" or @char(-2) is "\\") # This means that the string has been terminated.
@advance()
JSON.parse(@soFar())
else
@advance()
repeat()
getDigits = makeParser "getDigits", /^[\d]+/ # This returns the string of digits.
getNumber = makeParser "getNumber", ->
@optional ["+", "-"]
@require getDigits
@optional ->
@require "."
@require getDigits
@optional ->
@require "e"
@optional ["+", "-"]
@require getDigits
JSON.parse(@soFar())
getBoolean = makeParser "getBoolean", ->
@caseParse
"true": -> true
"false": -> false
getNull = makeParser("getNull", "null").return(-> null)
getArray = do ->
arrayBody = makeParser ->
# This parses everything betweten the [ and ] in an array, and requires there to be at least one item.
fst = @require getJson
@white()
@optional ",",
found: => # Handles the rest of the elements.
@white()
rest = @require arrayBody
cons fst, rest
notFound: => [fst] # This means -fst- is the only element.
makeParser "getArray", ->
@require "["
@white()
val = @optional arrayBody
@white()
@require "]"
val ? []
getObject = do ->
getObjectPair = makeParser -> # Parses "PROP": VAL forms.
prop = @require string
@require ":"
@white()
val = @require getJson
{"prop": prop, "val": val, "src": @soFar()}
getObjectBody = makeParser ->
@optional getObjectPair,
found: (fst) =>
rest = [] # This is the default if there is nothing else in the object
@optional ->
@white()
@require ","
@white()
rest = @require getObjectBody
cons fst, rest
notFound: => [] # No pairs in the list corresponds to an empty object.
makeParser "getObject", ->
# Validate structure and get data:
@require "{"
@white()
pairs = @require getObjectBody
@white()
@require "}"
# Construct object:
obj = {}
@loop (repeat) => (i = 0) =>
if i < pairs.length
obj[pairs[i].prop] = pairs[i].val
repeat(i + 1)
obj
getJson = makeParser "getJson", [getNumber, getNull, getBoolean, getArray, getString, getObject]
Object.freeze {
"getJson": getJson
"getString": getString
"getNumber": getNumber
"getBoolean": getBoolean
"getNull": getNull
"getArray": getArray
"getObject": getObject
}
Object.freeze makeParser
### TODO: Things to THINK ABOUT for the future:
-Allowing some easier way of creating parsers like ones from makeParser
out of parsers which are just plain functions (i.e. some functions
which create makeParser-parsers from something like JSON.parse).
-Making parsers be objects (not functions) with these methods:
"atStart": Just run the parser (equivalent to what a parser is now)
"wholeStr": Runs the parser and makes sure that the content occupies the ENTIRE string (and there is no shit at the end of the string).
or at least having the latter feature.
-Make the "args" property of the options object in @require and @optional calls be an array of args
rather than an object to be passed as the first arg.
-ADDITIONALLY: Should makeParser record a stack so that when this error
function is called, you can expand the error and observe which parsers
were called, without having to look at the actual stack and just observing
a bunch of calls to anonymous function or to `parser` or `requireFor` and
shit?
### | 140212 | # (c) <NAME>. MIT License.
# parser0.6.coffee
"use strict"
util = Object.freeze # This is frozen as it is publicly added to makeParser.
"y": (le) -> # Y-Combinator
((f) ->
f(f)
) (f) ->
le (args...) ->
f(f).call(this, args...)
"loop": (f) -> util.y(f)() # For functional/expressional loops. Interface: util.loop (repeat) => (i = 0, data, ...) => ...
"makeObject": (proto, ownProps) ->
result = Object.create proto
for prop, val of ownProps
if ownProps.hasOwnProperty prop
result[prop] = val
result
"typeOf": (x) ->
if Array.isArray(x) then "array"
else if Object.prototype.toString.call(x) is "[object RegExp]" then "regexp"
else if x is null then "null"
else `typeof x`
cons = (x, xs) -> [x, xs...]
head = (xs) -> xs[0]
tail = (xs) -> xs.slice(1)
last = (xs) -> xs[xs.length - 1]
empty = (l) -> l.length is 0
concat = (xs, ys) -> xs.concat(ys)
parsingError = (msg) -> {"name": "Parsing Error", "msg": msg}
isParsingError = (err) -> err?.name is "Parsing Error"
makeParsingInstance = do -> # The parsing instance contains methods for parsing, and the information for parsing (e.g. source being parsed and index/position in it).
isParser = (x) -> util.typeOf(x) is "function" and x.isParser
makeSureParser = (x) -> if isParser(x) then x else makeParser(x) # This must be used here to prevent infinite recursion between @require and makeParser, but does not need to be used anywhere else.
parserUsage = (which) -> # This is the abstraction for @optional and @require
(a, b, c) ->
# Handle all arrangements of arguments:
# (The arrangements are (lookFor, found, notFound)
# (lookFor, {found, notFound, dontAdvance, args})
# ({lookFor, found, notFound, dontAdvance, args}))
if util.typeOf(a) in ["number", "null", "undefined"]
@throw "Invalid argument to @#{which}.\n\nJSON.stringify(the argument) is #{JSON.stringify a}.\n\n\nString(the argument) is #{a}"
if util.typeOf(a) is "object"
prsr = makeSureParser a.lookFor
other = a
else if util.typeOf(b) is "object"
prsr = makeSureParser a
other = b
else
prsr = makeSureParser a
other = {
found: b
notFound: c
args: []
}
if which is "optional"
other.notFound ?= -> # This makes nothing happen if the thing isnt found.
prsr.requireFor(@, other)
proto = Object.freeze
# FIRST ARE THE UTIL-LIKE FUNCTIONS:
"loop": util.loop
"beginsWith": (s) -> # TODO: test @test so that it can be used instead.
if util.typeOf(@str.startsWith) is "function"
@str.slice(@index).startsWith(s)
else
@loop (repeat) => (i = @index) =>
if @str.slice(0, i + 1) is s
true
else
repeat(i + 1)
"char": (x = 0) -> @str[@index + x] # Gets the current char, or one before/after it depending on -x-.
"soFar": -> @str[@startIndex...@index]
"until": (x) -> if util.typeOf(x) is "string" then @str[@index...@str.indexOf(x, @index)] else @str[@index...(@index + x)]
# NOW THE PARSING FUNCTIONS:
"test": (prsr) ->
!!(prsr.opt()(@str, startAt: @index))
"advance": (x) ->
@index += switch util.typeOf x
when "string" then x.length
when "number" then x
when "undefined" then 1
"advancePast": (s) -> # Consider removing this. I have never had a need for it and do not see one.
while not @beginsWith(s)
@advance()
@advance(s)
"throw": (msg) ->
throw parsingError msg
"reqBool": (bool, err) ->
if !bool
throw parsingError(err ? "Test to @reqBool failed.")
"caseParse": (o, onNoMatch) -> # Each key of -o- is a string to optionally parse.
@loop (repeat) => (keys = Object.keys o) =>
if empty(keys)
if onNoMatch then onNoMatch()
else @throw("Expected one of the following: " + JSON.stringify(Object.keys(o), null, 4))
else
fn = o[head(keys)]
@optional head(keys),
found: => if fn.isParser then @require fn else fn() # TODO: Should it always do @require(fn), never just fn()?
notFound: => repeat tail keys
"white": (req) ->
if req is "req"
@require makeParser.getWhite
else
@optional makeParser.getWhite
"optional": parserUsage("optional")
"require": parserUsage("require")
"end": ->
if @char() isnt undefined
throw parsingError("Expected end of input, instead found '#{@char()}'")
(str, i = 0) ->
if util.typeOf(str) isnt "string"
throw new Error "First argument to a parser must be a string. Instead,\n#{str}\nof type #{util.typeOf(str)} was found."
util.makeObject(proto, {
"index": i
"str": str
"startIndex": i
})
parserListToString = (arr) -> # For stringifying an array of parser options as to display the name of each parser.
util.loop (repeat) => (options = arr, str = "[") =>
if empty options
str.slice(0, -2) + "]" # The slice gets rid of the ", " at the end.
else
x = head options
switch util.typeOf(x)
when "function"
if x.isParser
repeat(tail(options), str + "#{x.parserName}, ")
else
repeat(tail(options), str + "(not-yet-named fn parser), ")
when "regexp"
repeat(tail(options), str + x.toString() + ", ")
when "undefined"
repeat(tail(options), str + "(undefined), ")
else
repeat(tail(options), str + JSON.stringify(x) + ", ")
@makeParser = (args...) ->
# Handle args combos:
if args.length > 1
name = args[0]
x = args[1]
else if args.length is 1
x = args[0]
else
throw new Error("makeParser requires arguments")
### Overloads of x in -makeParser-:
string: x is the string to be required
array: x is a list of options, where one must match. Earlier elements have higher precedence.
regexp: if the string doesn't match x, it throws, otherwise x.exec(@str) is skipped returned.
parser-function: x is just returned
plain function: x is used to make a parser-function. (Returns a NEW function that is a parser
function which USES the input function by applying it to an instance. It also
sets some methods (requireFor, ...) and sets .isParser to true).
###
# Handles all overloads:
if util.typeOf(x) is "string" # This is the simplest overload: just require it (and advance past it). RETURNS the string.
makeParser (name ? "string-form-parser: #{JSON.stringify x}"), ->
if @beginsWith x
@advance x.length # (Can't do @require(x) here because x is a string and this is the def of @require(x) for strings).
@soFar()
else
@throw """Expected "#{x}" at index #{@index} and instead found #{@char()} in string:\n#{JSON.stringify(@str)}"""
else if util.typeOf(x) is "array" # Each element of the array is an OPTION, and this requires one to match. RETURNS whatever the matched option returns.
makeParser (name ? "array-form-parser: #{parserListToString x}"), (args) ->
errors = []
@loop (repeat) => (i = 0, errors = []) =>
if i >= x.length
@throw "(From #{name}) Expected one of the following: #{parserListToString(x)} in string:\n#{@str}\nat index #{@index}. Errors were #{JSON.stringify errors, null, 4}"
@optional x[i],
args: args
notFound: (err) ->
repeat(i + 1, errors.concat([err]))
else if util.typeOf(x) is "regexp"
makeParser (name ? "regexp-form-parser: #{x}"), ->
val = x.exec @str[@index...]
if val is null
@throw "Expected the regexp pattern " + x.toString() + " in string ``#{@str}'' at index #{@index}"
else
@require val[0]
else if util.typeOf(x) is "function" # This is the primary form, which every other overload is defined in terms of.
if x.isParser
makeParser (name ? "copy of: #{x.parserName}"), ->
@require x
else # This is the usual case. -x- is a function intended to be made into a parser.
### The parser (in the variable -parser-), can have the following arrangments of arguments:
1. "string"
2. ({string, startAt, args, found, notFound})
3. (string, {startAt, args, found, notFound})
4. (string, startAt, {args, found, notFound})
5. (string, startAt, args, {found, notFound})
6. (string, startAt, args, found, notFound)
###
useParserAbstraction = (callback) ->
(args...) ->
if args.length is 1
if util.typeOf(args[0]) is "string"
str = args[0]
else
str = args[0].lookFor
startAt = args[0].startAt
other = args[0]
else if args.length is 2
str = args[0]
if util.typeOf(args[1]) is "object"
startAt = args[1].startAt
other = args[1]
else
startAt = args[1]
else if args.length is 3
str = args[0]
startAt = args[1]
other = args[2]
callback(str, startAt, other)
parser = useParserAbstraction (str, startAt, other) ->
parser.requireFor(makeParsingInstance(str, startAt), other)
parser.wholeStr = useParserAbstraction (str, startAt, other) ->
parsingInstance = makeParsingInstance(str, startAt)
result = parser.requireFor(parsingInstance, other)
if parsingInstance.index isnt parsingInstance.str.length
throw parsingError("Expected end of string index #{parsingInstance.index}.")
else
result
parser.requireFor = (origInstance, other = {}) ->
if util.typeOf(other.notFound) is "function"
# This branch is effectively the @optional function.
instance = makeParsingInstance(origInstance.str, origInstance.index)
try
val = x.call(instance, other.args)
catch e
if isParsingError e
err = e
else
throw e
if err
other.notFound(err)
else
origInstance.index = instance.index unless other.dontAdvance # This is what synchronizes the referenced instance with the one it's used in, so that @require()ing another function also advances @index in the current instance.
if other.found
other.found(val)
else
val
else
instance = makeParsingInstance(origInstance.str, origInstance.index)
val = x.call(instance, other.args)
if not other.dontAdvance
origInstance.index = instance.index
if other.found
other.found(val)
else
val
parser.makeNamed = (name) ->
newPrsr = makeParser name, (args) ->
@require parser, args: args
parser.return = (fn) ->
makeParser parser.parserName, (args) ->
@require parser, args: args, found: fn
parser.then = (x) -> # Note: This returns the result of the LAST parser (e.g. in getFoo.then(getBar).then(getBaz)), while makeParser.seq returns a list of the results.
makeParser ->
@require parser
@require x
parser.opt = ->
makeParser (parser.parserName + "--opt"), ->
@optional parser
parser.isParser = true
parser.parserName = name ? "a parser"
Object.freeze parser
else
throw new Error("The -makeParser- function requires argument(s).")
makeParser.util = util
makeParser.seq = (xs...) ->
makeParser ->
findings = []
@loop (repeat) => (i = 0) =>
if i < xs.length
findings.push(@require xs[i])
repeat(i + 1)
else
findings.src = @soFar()
findings
makeParser.many = (x, other) -> # TODO: test other.amt. TODO: CHANGE THE ARGS PROPERTY: make it take an ARRAY of args, not an object of args.
if not x?
throw new Error "Invalid first argument to makeParser.many: #{x}"
if util.typeOf(other.amt) is "number" # TODO: Consider putting this case into another function, like -makeParser.several-, so it is very clear which is being used.
parseInner = makeParser (args = {}) ->
args.amtLeft ?= other?.amt
if args.amtLeft <= 0
[]
else
first = @require x
rest = []
if args.amtLeft is 1
[first]
else
@optional ->
if other?.sep
@require other?.sep
rest = @require parseInner,
args:
amtLeft: args.amtLeft - 1
cons first, rest
makeParser ->
if other?.start
@require other.start
findings = @require parseInner
if other?.end
@require other.end
findings
else
parseInner = makeParser -> # This parses it without the start- and end-sequence so that it can have a simple recursive structure.
first = @require x
rest = []
@optional ->
if other?.sep
@require other?.sep
rest = @require parseInner
cons first, rest
makeParser ->
if other?.start
@require other.start
findings = if other?.allowEmpty then @optional(parseInner) else @require(parseInner)
if other?.end
@require other.end
findings ? [] # When elements are optional and not there, findings will be `undefined` as it is returned from the @optional(parserInner) invoc above.
makeParser.cases = (a, b) ->
if util.typeOf(a) is "string"
name = a
o = b
else
o = a
makeParser name, ->
@caseParse(o)
makeParser.getWhite = makeParser "getWhite", /^[\s]+/
makeParser.jsonParsers = do -> # TODO: REALLY TEST ALL OF THESE INDIVIDUALLY AND CAREFULLY (I just (6/20/14) fixed a stupid, glaring would-be reference error). Also, CHANGE THE NAME. Its too long. make it makeParser.json
getString = makeParser "getString", (args) ->
@require '"'
@loop (repeat) => =>
if @char() is undefined
@throw "Unterminated JSON string literal"
if @char() is '"' and (@char(-1) isnt "\\" or @char(-2) is "\\") # This means that the string has been terminated.
@advance()
JSON.parse(@soFar())
else
@advance()
repeat()
getDigits = makeParser "getDigits", /^[\d]+/ # This returns the string of digits.
getNumber = makeParser "getNumber", ->
@optional ["+", "-"]
@require getDigits
@optional ->
@require "."
@require getDigits
@optional ->
@require "e"
@optional ["+", "-"]
@require getDigits
JSON.parse(@soFar())
getBoolean = makeParser "getBoolean", ->
@caseParse
"true": -> true
"false": -> false
getNull = makeParser("getNull", "null").return(-> null)
getArray = do ->
arrayBody = makeParser ->
# This parses everything betweten the [ and ] in an array, and requires there to be at least one item.
fst = @require getJson
@white()
@optional ",",
found: => # Handles the rest of the elements.
@white()
rest = @require arrayBody
cons fst, rest
notFound: => [fst] # This means -fst- is the only element.
makeParser "getArray", ->
@require "["
@white()
val = @optional arrayBody
@white()
@require "]"
val ? []
getObject = do ->
getObjectPair = makeParser -> # Parses "PROP": VAL forms.
prop = @require string
@require ":"
@white()
val = @require getJson
{"prop": prop, "val": val, "src": @soFar()}
getObjectBody = makeParser ->
@optional getObjectPair,
found: (fst) =>
rest = [] # This is the default if there is nothing else in the object
@optional ->
@white()
@require ","
@white()
rest = @require getObjectBody
cons fst, rest
notFound: => [] # No pairs in the list corresponds to an empty object.
makeParser "getObject", ->
# Validate structure and get data:
@require "{"
@white()
pairs = @require getObjectBody
@white()
@require "}"
# Construct object:
obj = {}
@loop (repeat) => (i = 0) =>
if i < pairs.length
obj[pairs[i].prop] = pairs[i].val
repeat(i + 1)
obj
getJson = makeParser "getJson", [getNumber, getNull, getBoolean, getArray, getString, getObject]
Object.freeze {
"getJson": getJson
"getString": getString
"getNumber": getNumber
"getBoolean": getBoolean
"getNull": getNull
"getArray": getArray
"getObject": getObject
}
Object.freeze makeParser
### TODO: Things to THINK ABOUT for the future:
-Allowing some easier way of creating parsers like ones from makeParser
out of parsers which are just plain functions (i.e. some functions
which create makeParser-parsers from something like JSON.parse).
-Making parsers be objects (not functions) with these methods:
"atStart": Just run the parser (equivalent to what a parser is now)
"wholeStr": Runs the parser and makes sure that the content occupies the ENTIRE string (and there is no shit at the end of the string).
or at least having the latter feature.
-Make the "args" property of the options object in @require and @optional calls be an array of args
rather than an object to be passed as the first arg.
-ADDITIONALLY: Should makeParser record a stack so that when this error
function is called, you can expand the error and observe which parsers
were called, without having to look at the actual stack and just observing
a bunch of calls to anonymous function or to `parser` or `requireFor` and
shit?
### | true | # (c) PI:NAME:<NAME>END_PI. MIT License.
# parser0.6.coffee
"use strict"
util = Object.freeze # This is frozen as it is publicly added to makeParser.
"y": (le) -> # Y-Combinator
((f) ->
f(f)
) (f) ->
le (args...) ->
f(f).call(this, args...)
"loop": (f) -> util.y(f)() # For functional/expressional loops. Interface: util.loop (repeat) => (i = 0, data, ...) => ...
"makeObject": (proto, ownProps) ->
result = Object.create proto
for prop, val of ownProps
if ownProps.hasOwnProperty prop
result[prop] = val
result
"typeOf": (x) ->
if Array.isArray(x) then "array"
else if Object.prototype.toString.call(x) is "[object RegExp]" then "regexp"
else if x is null then "null"
else `typeof x`
cons = (x, xs) -> [x, xs...]
head = (xs) -> xs[0]
tail = (xs) -> xs.slice(1)
last = (xs) -> xs[xs.length - 1]
empty = (l) -> l.length is 0
concat = (xs, ys) -> xs.concat(ys)
parsingError = (msg) -> {"name": "Parsing Error", "msg": msg}
isParsingError = (err) -> err?.name is "Parsing Error"
makeParsingInstance = do -> # The parsing instance contains methods for parsing, and the information for parsing (e.g. source being parsed and index/position in it).
isParser = (x) -> util.typeOf(x) is "function" and x.isParser
makeSureParser = (x) -> if isParser(x) then x else makeParser(x) # This must be used here to prevent infinite recursion between @require and makeParser, but does not need to be used anywhere else.
parserUsage = (which) -> # This is the abstraction for @optional and @require
(a, b, c) ->
# Handle all arrangements of arguments:
# (The arrangements are (lookFor, found, notFound)
# (lookFor, {found, notFound, dontAdvance, args})
# ({lookFor, found, notFound, dontAdvance, args}))
if util.typeOf(a) in ["number", "null", "undefined"]
@throw "Invalid argument to @#{which}.\n\nJSON.stringify(the argument) is #{JSON.stringify a}.\n\n\nString(the argument) is #{a}"
if util.typeOf(a) is "object"
prsr = makeSureParser a.lookFor
other = a
else if util.typeOf(b) is "object"
prsr = makeSureParser a
other = b
else
prsr = makeSureParser a
other = {
found: b
notFound: c
args: []
}
if which is "optional"
other.notFound ?= -> # This makes nothing happen if the thing isnt found.
prsr.requireFor(@, other)
proto = Object.freeze
# FIRST ARE THE UTIL-LIKE FUNCTIONS:
"loop": util.loop
"beginsWith": (s) -> # TODO: test @test so that it can be used instead.
if util.typeOf(@str.startsWith) is "function"
@str.slice(@index).startsWith(s)
else
@loop (repeat) => (i = @index) =>
if @str.slice(0, i + 1) is s
true
else
repeat(i + 1)
"char": (x = 0) -> @str[@index + x] # Gets the current char, or one before/after it depending on -x-.
"soFar": -> @str[@startIndex...@index]
"until": (x) -> if util.typeOf(x) is "string" then @str[@index...@str.indexOf(x, @index)] else @str[@index...(@index + x)]
# NOW THE PARSING FUNCTIONS:
"test": (prsr) ->
!!(prsr.opt()(@str, startAt: @index))
"advance": (x) ->
@index += switch util.typeOf x
when "string" then x.length
when "number" then x
when "undefined" then 1
"advancePast": (s) -> # Consider removing this. I have never had a need for it and do not see one.
while not @beginsWith(s)
@advance()
@advance(s)
"throw": (msg) ->
throw parsingError msg
"reqBool": (bool, err) ->
if !bool
throw parsingError(err ? "Test to @reqBool failed.")
"caseParse": (o, onNoMatch) -> # Each key of -o- is a string to optionally parse.
@loop (repeat) => (keys = Object.keys o) =>
if empty(keys)
if onNoMatch then onNoMatch()
else @throw("Expected one of the following: " + JSON.stringify(Object.keys(o), null, 4))
else
fn = o[head(keys)]
@optional head(keys),
found: => if fn.isParser then @require fn else fn() # TODO: Should it always do @require(fn), never just fn()?
notFound: => repeat tail keys
"white": (req) ->
if req is "req"
@require makeParser.getWhite
else
@optional makeParser.getWhite
"optional": parserUsage("optional")
"require": parserUsage("require")
"end": ->
if @char() isnt undefined
throw parsingError("Expected end of input, instead found '#{@char()}'")
(str, i = 0) ->
if util.typeOf(str) isnt "string"
throw new Error "First argument to a parser must be a string. Instead,\n#{str}\nof type #{util.typeOf(str)} was found."
util.makeObject(proto, {
"index": i
"str": str
"startIndex": i
})
parserListToString = (arr) -> # For stringifying an array of parser options as to display the name of each parser.
util.loop (repeat) => (options = arr, str = "[") =>
if empty options
str.slice(0, -2) + "]" # The slice gets rid of the ", " at the end.
else
x = head options
switch util.typeOf(x)
when "function"
if x.isParser
repeat(tail(options), str + "#{x.parserName}, ")
else
repeat(tail(options), str + "(not-yet-named fn parser), ")
when "regexp"
repeat(tail(options), str + x.toString() + ", ")
when "undefined"
repeat(tail(options), str + "(undefined), ")
else
repeat(tail(options), str + JSON.stringify(x) + ", ")
@makeParser = (args...) ->
# Handle args combos:
if args.length > 1
name = args[0]
x = args[1]
else if args.length is 1
x = args[0]
else
throw new Error("makeParser requires arguments")
### Overloads of x in -makeParser-:
string: x is the string to be required
array: x is a list of options, where one must match. Earlier elements have higher precedence.
regexp: if the string doesn't match x, it throws, otherwise x.exec(@str) is skipped returned.
parser-function: x is just returned
plain function: x is used to make a parser-function. (Returns a NEW function that is a parser
function which USES the input function by applying it to an instance. It also
sets some methods (requireFor, ...) and sets .isParser to true).
###
# Handles all overloads:
if util.typeOf(x) is "string" # This is the simplest overload: just require it (and advance past it). RETURNS the string.
makeParser (name ? "string-form-parser: #{JSON.stringify x}"), ->
if @beginsWith x
@advance x.length # (Can't do @require(x) here because x is a string and this is the def of @require(x) for strings).
@soFar()
else
@throw """Expected "#{x}" at index #{@index} and instead found #{@char()} in string:\n#{JSON.stringify(@str)}"""
else if util.typeOf(x) is "array" # Each element of the array is an OPTION, and this requires one to match. RETURNS whatever the matched option returns.
makeParser (name ? "array-form-parser: #{parserListToString x}"), (args) ->
errors = []
@loop (repeat) => (i = 0, errors = []) =>
if i >= x.length
@throw "(From #{name}) Expected one of the following: #{parserListToString(x)} in string:\n#{@str}\nat index #{@index}. Errors were #{JSON.stringify errors, null, 4}"
@optional x[i],
args: args
notFound: (err) ->
repeat(i + 1, errors.concat([err]))
else if util.typeOf(x) is "regexp"
makeParser (name ? "regexp-form-parser: #{x}"), ->
val = x.exec @str[@index...]
if val is null
@throw "Expected the regexp pattern " + x.toString() + " in string ``#{@str}'' at index #{@index}"
else
@require val[0]
else if util.typeOf(x) is "function" # This is the primary form, which every other overload is defined in terms of.
if x.isParser
makeParser (name ? "copy of: #{x.parserName}"), ->
@require x
else # This is the usual case. -x- is a function intended to be made into a parser.
### The parser (in the variable -parser-), can have the following arrangments of arguments:
1. "string"
2. ({string, startAt, args, found, notFound})
3. (string, {startAt, args, found, notFound})
4. (string, startAt, {args, found, notFound})
5. (string, startAt, args, {found, notFound})
6. (string, startAt, args, found, notFound)
###
useParserAbstraction = (callback) ->
(args...) ->
if args.length is 1
if util.typeOf(args[0]) is "string"
str = args[0]
else
str = args[0].lookFor
startAt = args[0].startAt
other = args[0]
else if args.length is 2
str = args[0]
if util.typeOf(args[1]) is "object"
startAt = args[1].startAt
other = args[1]
else
startAt = args[1]
else if args.length is 3
str = args[0]
startAt = args[1]
other = args[2]
callback(str, startAt, other)
parser = useParserAbstraction (str, startAt, other) ->
parser.requireFor(makeParsingInstance(str, startAt), other)
parser.wholeStr = useParserAbstraction (str, startAt, other) ->
parsingInstance = makeParsingInstance(str, startAt)
result = parser.requireFor(parsingInstance, other)
if parsingInstance.index isnt parsingInstance.str.length
throw parsingError("Expected end of string index #{parsingInstance.index}.")
else
result
parser.requireFor = (origInstance, other = {}) ->
if util.typeOf(other.notFound) is "function"
# This branch is effectively the @optional function.
instance = makeParsingInstance(origInstance.str, origInstance.index)
try
val = x.call(instance, other.args)
catch e
if isParsingError e
err = e
else
throw e
if err
other.notFound(err)
else
origInstance.index = instance.index unless other.dontAdvance # This is what synchronizes the referenced instance with the one it's used in, so that @require()ing another function also advances @index in the current instance.
if other.found
other.found(val)
else
val
else
instance = makeParsingInstance(origInstance.str, origInstance.index)
val = x.call(instance, other.args)
if not other.dontAdvance
origInstance.index = instance.index
if other.found
other.found(val)
else
val
parser.makeNamed = (name) ->
newPrsr = makeParser name, (args) ->
@require parser, args: args
parser.return = (fn) ->
makeParser parser.parserName, (args) ->
@require parser, args: args, found: fn
parser.then = (x) -> # Note: This returns the result of the LAST parser (e.g. in getFoo.then(getBar).then(getBaz)), while makeParser.seq returns a list of the results.
makeParser ->
@require parser
@require x
parser.opt = ->
makeParser (parser.parserName + "--opt"), ->
@optional parser
parser.isParser = true
parser.parserName = name ? "a parser"
Object.freeze parser
else
throw new Error("The -makeParser- function requires argument(s).")
makeParser.util = util
makeParser.seq = (xs...) ->
makeParser ->
findings = []
@loop (repeat) => (i = 0) =>
if i < xs.length
findings.push(@require xs[i])
repeat(i + 1)
else
findings.src = @soFar()
findings
makeParser.many = (x, other) -> # TODO: test other.amt. TODO: CHANGE THE ARGS PROPERTY: make it take an ARRAY of args, not an object of args.
if not x?
throw new Error "Invalid first argument to makeParser.many: #{x}"
if util.typeOf(other.amt) is "number" # TODO: Consider putting this case into another function, like -makeParser.several-, so it is very clear which is being used.
parseInner = makeParser (args = {}) ->
args.amtLeft ?= other?.amt
if args.amtLeft <= 0
[]
else
first = @require x
rest = []
if args.amtLeft is 1
[first]
else
@optional ->
if other?.sep
@require other?.sep
rest = @require parseInner,
args:
amtLeft: args.amtLeft - 1
cons first, rest
makeParser ->
if other?.start
@require other.start
findings = @require parseInner
if other?.end
@require other.end
findings
else
parseInner = makeParser -> # This parses it without the start- and end-sequence so that it can have a simple recursive structure.
first = @require x
rest = []
@optional ->
if other?.sep
@require other?.sep
rest = @require parseInner
cons first, rest
makeParser ->
if other?.start
@require other.start
findings = if other?.allowEmpty then @optional(parseInner) else @require(parseInner)
if other?.end
@require other.end
findings ? [] # When elements are optional and not there, findings will be `undefined` as it is returned from the @optional(parserInner) invoc above.
makeParser.cases = (a, b) ->
if util.typeOf(a) is "string"
name = a
o = b
else
o = a
makeParser name, ->
@caseParse(o)
makeParser.getWhite = makeParser "getWhite", /^[\s]+/
makeParser.jsonParsers = do -> # TODO: REALLY TEST ALL OF THESE INDIVIDUALLY AND CAREFULLY (I just (6/20/14) fixed a stupid, glaring would-be reference error). Also, CHANGE THE NAME. Its too long. make it makeParser.json
getString = makeParser "getString", (args) ->
@require '"'
@loop (repeat) => =>
if @char() is undefined
@throw "Unterminated JSON string literal"
if @char() is '"' and (@char(-1) isnt "\\" or @char(-2) is "\\") # This means that the string has been terminated.
@advance()
JSON.parse(@soFar())
else
@advance()
repeat()
getDigits = makeParser "getDigits", /^[\d]+/ # This returns the string of digits.
getNumber = makeParser "getNumber", ->
@optional ["+", "-"]
@require getDigits
@optional ->
@require "."
@require getDigits
@optional ->
@require "e"
@optional ["+", "-"]
@require getDigits
JSON.parse(@soFar())
getBoolean = makeParser "getBoolean", ->
@caseParse
"true": -> true
"false": -> false
getNull = makeParser("getNull", "null").return(-> null)
getArray = do ->
arrayBody = makeParser ->
# This parses everything betweten the [ and ] in an array, and requires there to be at least one item.
fst = @require getJson
@white()
@optional ",",
found: => # Handles the rest of the elements.
@white()
rest = @require arrayBody
cons fst, rest
notFound: => [fst] # This means -fst- is the only element.
makeParser "getArray", ->
@require "["
@white()
val = @optional arrayBody
@white()
@require "]"
val ? []
getObject = do ->
getObjectPair = makeParser -> # Parses "PROP": VAL forms.
prop = @require string
@require ":"
@white()
val = @require getJson
{"prop": prop, "val": val, "src": @soFar()}
getObjectBody = makeParser ->
@optional getObjectPair,
found: (fst) =>
rest = [] # This is the default if there is nothing else in the object
@optional ->
@white()
@require ","
@white()
rest = @require getObjectBody
cons fst, rest
notFound: => [] # No pairs in the list corresponds to an empty object.
makeParser "getObject", ->
# Validate structure and get data:
@require "{"
@white()
pairs = @require getObjectBody
@white()
@require "}"
# Construct object:
obj = {}
@loop (repeat) => (i = 0) =>
if i < pairs.length
obj[pairs[i].prop] = pairs[i].val
repeat(i + 1)
obj
getJson = makeParser "getJson", [getNumber, getNull, getBoolean, getArray, getString, getObject]
Object.freeze {
"getJson": getJson
"getString": getString
"getNumber": getNumber
"getBoolean": getBoolean
"getNull": getNull
"getArray": getArray
"getObject": getObject
}
Object.freeze makeParser
### TODO: Things to THINK ABOUT for the future:
-Allowing some easier way of creating parsers like ones from makeParser
out of parsers which are just plain functions (i.e. some functions
which create makeParser-parsers from something like JSON.parse).
-Making parsers be objects (not functions) with these methods:
"atStart": Just run the parser (equivalent to what a parser is now)
"wholeStr": Runs the parser and makes sure that the content occupies the ENTIRE string (and there is no shit at the end of the string).
or at least having the latter feature.
-Make the "args" property of the options object in @require and @optional calls be an array of args
rather than an object to be passed as the first arg.
-ADDITIONALLY: Should makeParser record a stack so that when this error
function is called, you can expand the error and observe which parsers
were called, without having to look at the actual stack and just observing
a bunch of calls to anonymous function or to `parser` or `requireFor` and
shit?
### |
[
{
"context": ".opensource.org/licenses/mit-license.php\n# Author: Mario Klingemann\n# http:#www.quasimondo.com\n\n# RGB to Luminance co",
"end": 138,
"score": 0.9998847842216492,
"start": 122,
"tag": "NAME",
"value": "Mario Klingemann"
},
{
"context": "ting RGB\n# values to Luminance... | src.old/geom/ColorMatrix.coffee | minodisk/muon | 1 | # ColorMatrix Class v2.1
# released under MIT License (X11)
# http:#www.opensource.org/licenses/mit-license.php
# Author: Mario Klingemann
# http:#www.quasimondo.com
# RGB to Luminance conversion constants as found on
# Charles A. Poynton's colorspace-faq:
# http:#www.faqs.org/faqs/graphics/colorspace-faq/
_LUMA_R = 0.212671
_LUMA_G = 0.71516
_LUMA_B = 0.072169
# There seem different standards for converting RGB
# values to Luminance. This is the one by Paul Haeberli:
_LUMA_R2 = 0.3086
_LUMA_G2 = 0.6094
_LUMA_B2 = 0.0820
_ONETHIRD = 1 / 3
_IDENTITY = [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, 1, 0
]
_RAD = Math.PI / 180
exports.geom.ColorMatrix = class ColorMatrix
constructor: (matrix)->
if matrix instanceof ColorMatrix
@matrix = matrix.matrix.concat()
else if Array.isArray(matrix)
@matrix = matrix.concat()
else
@reset()
toString: ->
tmp = []
for v, i in @matrix
t = [] if i % 5 is 0
t.push String(v)
tmp.push t if i % 5 is 4 or i is @matrix.length - 1
for x in [0...5]
l = 0
for y in [0...tmp.length] by 1
l = Math.max l, tmp[y][x].length
for y in [0...tmp.length] by 1
tmp[y][x] = StringUtil.padLeft tmp[y][x], l
for y in [0...tmp.length] by 1
tmp[y] = tmp[y].join ', '
tmp[y] += ',' if y != tmp.length - 1
tmp.join '\n'
clone: ->
new ColorMatrix @matrix
reset: ->
@matrix = _IDENTITY.concat()
concat: (src)->
dst = @matrix
out = []
for y in [0...4]
i = 5 * y
for x in [0...5]
out[i + x] = src[i] * dst[x] + src[i + 1] * dst[x + 5] + src[i + 2] * dst[x + 10] + src[i + 3] * dst[x + 15]
out[i + 4] += src[i + 4]
@matrix = out
@
invert: ->
@concat [
-1, 0, 0, 0, 0xff
0, -1, 0, 0, 0xff
0, 0, -1, 0, 0xff
0, 0, 0, 1, 0
]
adjustSaturation: (s)->
irlum = -s * _LUMA_R
iglum = -s * _LUMA_G
iblum = -s * _LUMA_B
++s
@concat [
irlum + s, iglum, iblum, 0, 0
irlum, iglum + s, iblum, 0, 0
irlum, iglum, iblum + s, 0, 0
0, 0, 0, 1, 0
]
adjustContrast: (r, g = r, b = r)->
@concat [
1 + r, 0, 0, 0, -0x80 * r
0, 1 + g, 0, 0, -0x80 * g
0, 0, 1 + b, 0, -0x80 * b
0, 0, 0, 1, 0
]
adjustBrightness: (r, g = r, b = r)->
@concat [
1, 0, 0, 0, 0xff * r
0, 1, 0, 0, 0xff * g
0, 0, 1, 0, 0xff * b
0, 0, 0, 1, 0
]
adjustHue: (degree)->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
degree *= _RAD
c = Math.cos degree
s = Math.sin degree
l = 1 - c
m = l - s
n = l + s
@concat [
R * m + c, G * m, B * m + s, 0, 0
R * l + s * 0.143, G * l + c + s * 0.14, B * l + s * -0.283, 0, 0
R * n - s, G * n, B * n + c, 0, 0
0, 0, 0, 1, 0
]
rotateHue: (degree)->
@_initHue()
@concat @_preHue.matrix
@rotateBlue degree
@concat @_postHue.matrix
luminance2Alpha: ->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
_LUMA_R, _LUMA_G, _LUMA_B, 0, 0
]
adjustAlphaContrast: (amount)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, amount + 1, -0x80 * amount
]
colorize: (rgb, amount = 1)->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
r = ((rgb >> 16) & 0xFF) / 0xFF
g = ((rgb >> 8) & 0xFF) / 0xFF
b = (rgb & 0xFF) / 0xFF
invAmount = 1 - amount
@concat [
invAmount + amount * r * R, amount * r * G, amount * r * B, 0, 0,
amount * g * R, invAmount + amount * g * G, amount * g * B, 0, 0,
amount * b * R, amount * b * G, invAmount + amount * b * B, 0, 0,
0, 0, 0, 1, 0
]
setChannels: (r = 1, g = 2, b = 4, a = 8)->
rf = (if ((r & 1) is 1) then 1 else 0) + (if ((r & 2) is 2) then 1 else 0) + (if ((r & 4) is 4) then 1 else 0) + (if ((r & 8) is 8) then 1 else 0)
rf = (1 / rf) if rf > 0
gf = (if ((g & 1) is 1) then 1 else 0) + (if ((g & 2) is 2) then 1 else 0) + (if ((g & 4) is 4) then 1 else 0) + (if ((g & 8) is 8) then 1 else 0)
gf = (1 / gf) if gf > 0
bf = (if ((b & 1) is 1) then 1 else 0) + (if ((b & 2) is 2) then 1 else 0) + (if ((b & 4) is 4) then 1 else 0) + (if ((b & 8) is 8) then 1 else 0)
bf = (1 / bf) if bf > 0
af = (if ((a & 1) is 1) then 1 else 0) + (if ((a & 2) is 2) then 1 else 0) + (if ((a & 4) is 4) then 1 else 0) + (if ((a & 8) is 8) then 1 else 0)
af = (1 / af) if af > 0
@concat [
(if ((r & 1) is 1) then rf else 0), (if ((r & 2) is 2) then rf else 0), (if ((r & 4) is 4) then rf else 0), (if ((r & 8) is 8) then rf else 0), 0
(if ((g & 1) is 1) then gf else 0), (if ((g & 2) is 2) then gf else 0), (if ((g & 4) is 4) then gf else 0), (if ((g & 8) is 8) then gf else 0), 0
(if ((b & 1) is 1) then bf else 0), (if ((b & 2) is 2) then bf else 0), (if ((b & 4) is 4) then bf else 0), (if ((b & 8) is 8) then bf else 0), 0
(if ((a & 1) is 1) then af else 0), (if ((a & 2) is 2) then af else 0), (if ((a & 4) is 4) then af else 0), (if ((a & 8) is 8) then af else 0), 0
]
blend: (matrix, amount)->
for v, i in matrix.matrix
@matrix[i] = @matrix[i] * (1 - amount) + v * amount
@
average: (r = _ONETHIRD, g = _ONETHIRD, b = _ONETHIRD)->
@concat [
r, g, b, 0, 0
r, g, b, 0, 0
r, g, b, 0, 0
0, 0, 0, 1, 0
]
threshold: (threshold, factor = 0x100)->
R = factor * _LUMA_R
G = factor * _LUMA_G
B = factor * _LUMA_B
t = -factor * threshold
@concat [
R, G, B, 0, t
R, G, B, 0, t
R, G, B, 0, t
0, 0, 0, 1, 0
]
desaturate: ->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
@concat [
R, G, B, 0, 0
R, G, B, 0, 0
R, G, B, 0, 0
0, 0, 0, 1, 0
]
randomize: (amount = 1)->
inv_amount = (1 - amount)
r1 = (inv_amount + (amount * (Math.random() - Math.random())))
g1 = (amount * (Math.random() - Math.random()))
b1 = (amount * (Math.random() - Math.random()))
o1 = ((amount * 0xFF) * (Math.random() - Math.random()))
r2 = (amount * (Math.random() - Math.random()))
g2 = (inv_amount + (amount * (Math.random() - Math.random())))
b2 = (amount * (Math.random() - Math.random()))
o2 = ((amount * 0xFF) * (Math.random() - Math.random()))
r3 = (amount * (Math.random() - Math.random()))
g3 = (amount * (Math.random() - Math.random()))
b3 = (inv_amount + (amount * (Math.random() - Math.random())))
o3 = ((amount * 0xFF) * (Math.random() - Math.random()))
@concat [
r1, g1, b1, 0, o1
r2, g2, b2, 0, o2
r3, g3, b3, 0, o3
0, 0, 0, 1, 0
]
setMultiplicators: (r = 1, g = 1, b = 1, a = 1)->
@concat [
r, 0, 0, 0, 0
0, g, 0, 0, 0
0, 0, b, 0, 0
0, 0, 0, a, 0
]
clearChannels: (r = false, g = false, b = false, a = false)->
@matrix[0] = @matrix[1] = @matrix[2] = @matrix[3] = @matrix[4] = 0 if r
@matrix[5] = @matrix[6] = @matrix[7] = @matrix[8] = @matrix[9] = 0 if g
@matrix[10] = @matrix[11] = @matrix[12] = @matrix[13] = @matrix[14] = 0 if b
@matrix[15] = @matrix[16] = @matrix[17] = @matrix[18] = @matrix[19] = 0 if a
thresholdAlpha: (threshold, factor = 0x100)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, factor, -factor * threshold
]
averageRGB2Alpha: ->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
_ONETHIRD, _ONETHIRD, _ONETHIRD, 0, 0
]
invertAlpha: ->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, -1, 0xff
]
rgb2Alpha: (r, g, b)->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
r, g, b, 0, 0
]
setAlpha: (alpha)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, alpha, 0
]
rotateRed: (degree)->
@_rotateColor degree, 2, 1
rotateGreen: (degree)->
@_rotateColor degree, 0, 2
rotateBlue: (degree)->
@_rotateColor degree, 1, 0
_rotateColor: (degree, x, y)->
degree *= _RAD
mat = _IDENTITY.concat()
mat[x + x * 5] = mat[y + y * 5] = Math.cos degree
mat[y + x * 5] = Math.sin degree
mat[x + y * 5] = -Math.sin degree
@concat mat
shearRed: (green, blue)->
@_shearColor 0, 1, green, 2, blue
shearGreen: (red, blue)->
@_shearColor 1, 0, red, 2, blue
shearBlue: (red, green)->
@_shearColor 2, 0, red, 1, green
_shearColor: (x, y1, d1, y2, d2)->
mat = _IDENTITY.concat()
mat[y1 + x * 5] = d1
mat[y2 + x * 5] = d2
@concat mat
applyColorDeficiency: (type)->
# the values of this method are copied from http:#www.nofunc.com/Color_Matrix_Library/
switch type
when 'Protanopia'
@concat [
0.567, 0.433, 0.0, 0.0, 0.0
0.558, 0.442, 0.0, 0.0, 0.0
0.0, 0.242, 0.758, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Protanomaly'
@concat [
0.817, 0.183, 0.0, 0.0, 0.0
0.333, 0.667, 0.0, 0.0, 0.0
0.0, 0.125, 0.875, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Deuteranopia'
@concat [
0.625, 0.375, 0.0, 0.0, 0.0
0.7, 0.3, 0.0, 0.0, 0.0
0.0, 0.3, 0.7, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Deuteranomaly'
@concat [
0.8, 0.2, 0.0, 0.0, 0.0
0.258, 0.742, 0.0, 0.0, 0.0
0.0, 0.142, 0.858, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Tritanopia'
@concat [
0.95, 0.05, 0.0, 0.0, 0.0
0.0, 0.433, 0.567, 0.0, 0.0
0.0, 0.475, 0.525, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Tritanomaly'
@concat [
0.967, 0.033, 0.0, 0.0, 0.0
0.0, 0.733, 0.267, 0.0, 0.0
0.0, 0.183, 0.817, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Achromatopsia'
@concat [
0.299, 0.587, 0.114, 0.0, 0.0
0.299, 0.587, 0.114, 0.0, 0.0
0.299, 0.587, 0.114, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Achromatomaly'
@concat [
0.618, 0.320, 0.062, 0.0, 0.0
0.163, 0.775, 0.062, 0.0, 0.0
0.163, 0.320, 0.516, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
applyMatrix: (rgba)->
a = ( rgba >>> 24 ) & 0xff
r = ( rgba >>> 16 ) & 0xff
g = ( rgba >>> 8 ) & 0xff
b = rgba & 0xff
m = @matrix
r2 = 0.5 + r * m[0] + g * m[1] + b * m[2] + a * m[3] + m[4]
g2 = 0.5 + r * m[5] + g * m[6] + b * m[7] + a * m[8] + m[9]
b2 = 0.5 + r * m[10] + g * m[11] + b * m[12] + a * m[13] + m[14]
a2 = 0.5 + r * m[15] + g * m[16] + b * m[17] + a * m[18] + m[19]
a2 = 0 if a2 < 0
a2 = 0xff if a2 > 0xff
r2 = 0 if r2 < 0
r2 = 0xff if r2 > 0xff
g2 = 0 if g2 < 0
g2 = 0xff if g2 > 0xff
b2 = 0 if b2 < 0
b2 = 0xff if b2 > 0xff
a2 << 24 | r2 << 16 | g2 << 8 | b2
transformVector: (values)->
throw new ErrorMessage "values length isn't 4" if values.length isnt 4
m = @matrix
sR = values[0]
sG = values[1]
sB = values[2]
sA = values[3]
oR = sR * m[0] + sG * m[1] + sB * m[2] + sA * m[3] + m[4]
oG = sR * m[5] + sG * m[6] + sB * m[7] + sA * m[8] + m[9]
oB = sR * m[10] + sG * m[11] + sB * m[12] + sA * m[13] + m[14]
oA = sR * m[15] + sG * m[16] + sB * m[17] + sA * m[18] + m[19]
values[0] = oR
values[1] = oG
values[2] = oB
values[3] = oA
_initHue: ->
#greenRotation = 35.0
greenRotation = 39.182655
unless @_hueInitialized
@_hueInitialized = true
@_preHue = new ColorMatrix()
@_preHue.rotateRed(45)
@_preHue.rotateGreen(-greenRotation)
lum = [
_LUMA_R2
_LUMA_G2
_LUMA_B2
1.0
]
@_preHue.transformVector(lum)
red = lum[0] / lum[2]
green = lum[1] / lum[2]
@_preHue.shearBlue red, green
@_postHue = new ColorMatrix()
@_postHue.shearBlue(-red, -green)
@_postHue.rotateGreen greenRotation
@_postHue.rotateRed -45.0
| 163829 | # ColorMatrix Class v2.1
# released under MIT License (X11)
# http:#www.opensource.org/licenses/mit-license.php
# Author: <NAME>
# http:#www.quasimondo.com
# RGB to Luminance conversion constants as found on
# Charles A. Poynton's colorspace-faq:
# http:#www.faqs.org/faqs/graphics/colorspace-faq/
_LUMA_R = 0.212671
_LUMA_G = 0.71516
_LUMA_B = 0.072169
# There seem different standards for converting RGB
# values to Luminance. This is the one by <NAME>:
_LUMA_R2 = 0.3086
_LUMA_G2 = 0.6094
_LUMA_B2 = 0.0820
_ONETHIRD = 1 / 3
_IDENTITY = [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, 1, 0
]
_RAD = Math.PI / 180
exports.geom.ColorMatrix = class ColorMatrix
constructor: (matrix)->
if matrix instanceof ColorMatrix
@matrix = matrix.matrix.concat()
else if Array.isArray(matrix)
@matrix = matrix.concat()
else
@reset()
toString: ->
tmp = []
for v, i in @matrix
t = [] if i % 5 is 0
t.push String(v)
tmp.push t if i % 5 is 4 or i is @matrix.length - 1
for x in [0...5]
l = 0
for y in [0...tmp.length] by 1
l = Math.max l, tmp[y][x].length
for y in [0...tmp.length] by 1
tmp[y][x] = StringUtil.padLeft tmp[y][x], l
for y in [0...tmp.length] by 1
tmp[y] = tmp[y].join ', '
tmp[y] += ',' if y != tmp.length - 1
tmp.join '\n'
clone: ->
new ColorMatrix @matrix
reset: ->
@matrix = _IDENTITY.concat()
concat: (src)->
dst = @matrix
out = []
for y in [0...4]
i = 5 * y
for x in [0...5]
out[i + x] = src[i] * dst[x] + src[i + 1] * dst[x + 5] + src[i + 2] * dst[x + 10] + src[i + 3] * dst[x + 15]
out[i + 4] += src[i + 4]
@matrix = out
@
invert: ->
@concat [
-1, 0, 0, 0, 0xff
0, -1, 0, 0, 0xff
0, 0, -1, 0, 0xff
0, 0, 0, 1, 0
]
adjustSaturation: (s)->
irlum = -s * _LUMA_R
iglum = -s * _LUMA_G
iblum = -s * _LUMA_B
++s
@concat [
irlum + s, iglum, iblum, 0, 0
irlum, iglum + s, iblum, 0, 0
irlum, iglum, iblum + s, 0, 0
0, 0, 0, 1, 0
]
adjustContrast: (r, g = r, b = r)->
@concat [
1 + r, 0, 0, 0, -0x80 * r
0, 1 + g, 0, 0, -0x80 * g
0, 0, 1 + b, 0, -0x80 * b
0, 0, 0, 1, 0
]
adjustBrightness: (r, g = r, b = r)->
@concat [
1, 0, 0, 0, 0xff * r
0, 1, 0, 0, 0xff * g
0, 0, 1, 0, 0xff * b
0, 0, 0, 1, 0
]
adjustHue: (degree)->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
degree *= _RAD
c = Math.cos degree
s = Math.sin degree
l = 1 - c
m = l - s
n = l + s
@concat [
R * m + c, G * m, B * m + s, 0, 0
R * l + s * 0.143, G * l + c + s * 0.14, B * l + s * -0.283, 0, 0
R * n - s, G * n, B * n + c, 0, 0
0, 0, 0, 1, 0
]
rotateHue: (degree)->
@_initHue()
@concat @_preHue.matrix
@rotateBlue degree
@concat @_postHue.matrix
luminance2Alpha: ->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
_LUMA_R, _LUMA_G, _LUMA_B, 0, 0
]
adjustAlphaContrast: (amount)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, amount + 1, -0x80 * amount
]
colorize: (rgb, amount = 1)->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
r = ((rgb >> 16) & 0xFF) / 0xFF
g = ((rgb >> 8) & 0xFF) / 0xFF
b = (rgb & 0xFF) / 0xFF
invAmount = 1 - amount
@concat [
invAmount + amount * r * R, amount * r * G, amount * r * B, 0, 0,
amount * g * R, invAmount + amount * g * G, amount * g * B, 0, 0,
amount * b * R, amount * b * G, invAmount + amount * b * B, 0, 0,
0, 0, 0, 1, 0
]
setChannels: (r = 1, g = 2, b = 4, a = 8)->
rf = (if ((r & 1) is 1) then 1 else 0) + (if ((r & 2) is 2) then 1 else 0) + (if ((r & 4) is 4) then 1 else 0) + (if ((r & 8) is 8) then 1 else 0)
rf = (1 / rf) if rf > 0
gf = (if ((g & 1) is 1) then 1 else 0) + (if ((g & 2) is 2) then 1 else 0) + (if ((g & 4) is 4) then 1 else 0) + (if ((g & 8) is 8) then 1 else 0)
gf = (1 / gf) if gf > 0
bf = (if ((b & 1) is 1) then 1 else 0) + (if ((b & 2) is 2) then 1 else 0) + (if ((b & 4) is 4) then 1 else 0) + (if ((b & 8) is 8) then 1 else 0)
bf = (1 / bf) if bf > 0
af = (if ((a & 1) is 1) then 1 else 0) + (if ((a & 2) is 2) then 1 else 0) + (if ((a & 4) is 4) then 1 else 0) + (if ((a & 8) is 8) then 1 else 0)
af = (1 / af) if af > 0
@concat [
(if ((r & 1) is 1) then rf else 0), (if ((r & 2) is 2) then rf else 0), (if ((r & 4) is 4) then rf else 0), (if ((r & 8) is 8) then rf else 0), 0
(if ((g & 1) is 1) then gf else 0), (if ((g & 2) is 2) then gf else 0), (if ((g & 4) is 4) then gf else 0), (if ((g & 8) is 8) then gf else 0), 0
(if ((b & 1) is 1) then bf else 0), (if ((b & 2) is 2) then bf else 0), (if ((b & 4) is 4) then bf else 0), (if ((b & 8) is 8) then bf else 0), 0
(if ((a & 1) is 1) then af else 0), (if ((a & 2) is 2) then af else 0), (if ((a & 4) is 4) then af else 0), (if ((a & 8) is 8) then af else 0), 0
]
blend: (matrix, amount)->
for v, i in matrix.matrix
@matrix[i] = @matrix[i] * (1 - amount) + v * amount
@
average: (r = _ONETHIRD, g = _ONETHIRD, b = _ONETHIRD)->
@concat [
r, g, b, 0, 0
r, g, b, 0, 0
r, g, b, 0, 0
0, 0, 0, 1, 0
]
threshold: (threshold, factor = 0x100)->
R = factor * _LUMA_R
G = factor * _LUMA_G
B = factor * _LUMA_B
t = -factor * threshold
@concat [
R, G, B, 0, t
R, G, B, 0, t
R, G, B, 0, t
0, 0, 0, 1, 0
]
desaturate: ->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
@concat [
R, G, B, 0, 0
R, G, B, 0, 0
R, G, B, 0, 0
0, 0, 0, 1, 0
]
randomize: (amount = 1)->
inv_amount = (1 - amount)
r1 = (inv_amount + (amount * (Math.random() - Math.random())))
g1 = (amount * (Math.random() - Math.random()))
b1 = (amount * (Math.random() - Math.random()))
o1 = ((amount * 0xFF) * (Math.random() - Math.random()))
r2 = (amount * (Math.random() - Math.random()))
g2 = (inv_amount + (amount * (Math.random() - Math.random())))
b2 = (amount * (Math.random() - Math.random()))
o2 = ((amount * 0xFF) * (Math.random() - Math.random()))
r3 = (amount * (Math.random() - Math.random()))
g3 = (amount * (Math.random() - Math.random()))
b3 = (inv_amount + (amount * (Math.random() - Math.random())))
o3 = ((amount * 0xFF) * (Math.random() - Math.random()))
@concat [
r1, g1, b1, 0, o1
r2, g2, b2, 0, o2
r3, g3, b3, 0, o3
0, 0, 0, 1, 0
]
setMultiplicators: (r = 1, g = 1, b = 1, a = 1)->
@concat [
r, 0, 0, 0, 0
0, g, 0, 0, 0
0, 0, b, 0, 0
0, 0, 0, a, 0
]
clearChannels: (r = false, g = false, b = false, a = false)->
@matrix[0] = @matrix[1] = @matrix[2] = @matrix[3] = @matrix[4] = 0 if r
@matrix[5] = @matrix[6] = @matrix[7] = @matrix[8] = @matrix[9] = 0 if g
@matrix[10] = @matrix[11] = @matrix[12] = @matrix[13] = @matrix[14] = 0 if b
@matrix[15] = @matrix[16] = @matrix[17] = @matrix[18] = @matrix[19] = 0 if a
thresholdAlpha: (threshold, factor = 0x100)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, factor, -factor * threshold
]
averageRGB2Alpha: ->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
_ONETHIRD, _ONETHIRD, _ONETHIRD, 0, 0
]
invertAlpha: ->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, -1, 0xff
]
rgb2Alpha: (r, g, b)->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
r, g, b, 0, 0
]
setAlpha: (alpha)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, alpha, 0
]
rotateRed: (degree)->
@_rotateColor degree, 2, 1
rotateGreen: (degree)->
@_rotateColor degree, 0, 2
rotateBlue: (degree)->
@_rotateColor degree, 1, 0
_rotateColor: (degree, x, y)->
degree *= _RAD
mat = _IDENTITY.concat()
mat[x + x * 5] = mat[y + y * 5] = Math.cos degree
mat[y + x * 5] = Math.sin degree
mat[x + y * 5] = -Math.sin degree
@concat mat
shearRed: (green, blue)->
@_shearColor 0, 1, green, 2, blue
shearGreen: (red, blue)->
@_shearColor 1, 0, red, 2, blue
shearBlue: (red, green)->
@_shearColor 2, 0, red, 1, green
_shearColor: (x, y1, d1, y2, d2)->
mat = _IDENTITY.concat()
mat[y1 + x * 5] = d1
mat[y2 + x * 5] = d2
@concat mat
applyColorDeficiency: (type)->
# the values of this method are copied from http:#www.nofunc.com/Color_Matrix_Library/
switch type
when 'Protanopia'
@concat [
0.567, 0.433, 0.0, 0.0, 0.0
0.558, 0.442, 0.0, 0.0, 0.0
0.0, 0.242, 0.758, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Protanomaly'
@concat [
0.817, 0.183, 0.0, 0.0, 0.0
0.333, 0.667, 0.0, 0.0, 0.0
0.0, 0.125, 0.875, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Deuteranopia'
@concat [
0.625, 0.375, 0.0, 0.0, 0.0
0.7, 0.3, 0.0, 0.0, 0.0
0.0, 0.3, 0.7, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Deuteranomaly'
@concat [
0.8, 0.2, 0.0, 0.0, 0.0
0.258, 0.742, 0.0, 0.0, 0.0
0.0, 0.142, 0.858, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Tritanopia'
@concat [
0.95, 0.05, 0.0, 0.0, 0.0
0.0, 0.433, 0.567, 0.0, 0.0
0.0, 0.475, 0.525, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Tritanomaly'
@concat [
0.967, 0.033, 0.0, 0.0, 0.0
0.0, 0.733, 0.267, 0.0, 0.0
0.0, 0.183, 0.817, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Achromatopsia'
@concat [
0.299, 0.587, 0.114, 0.0, 0.0
0.299, 0.587, 0.114, 0.0, 0.0
0.299, 0.587, 0.114, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Achromatomaly'
@concat [
0.618, 0.320, 0.062, 0.0, 0.0
0.163, 0.775, 0.062, 0.0, 0.0
0.163, 0.320, 0.516, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
applyMatrix: (rgba)->
a = ( rgba >>> 24 ) & 0xff
r = ( rgba >>> 16 ) & 0xff
g = ( rgba >>> 8 ) & 0xff
b = rgba & 0xff
m = @matrix
r2 = 0.5 + r * m[0] + g * m[1] + b * m[2] + a * m[3] + m[4]
g2 = 0.5 + r * m[5] + g * m[6] + b * m[7] + a * m[8] + m[9]
b2 = 0.5 + r * m[10] + g * m[11] + b * m[12] + a * m[13] + m[14]
a2 = 0.5 + r * m[15] + g * m[16] + b * m[17] + a * m[18] + m[19]
a2 = 0 if a2 < 0
a2 = 0xff if a2 > 0xff
r2 = 0 if r2 < 0
r2 = 0xff if r2 > 0xff
g2 = 0 if g2 < 0
g2 = 0xff if g2 > 0xff
b2 = 0 if b2 < 0
b2 = 0xff if b2 > 0xff
a2 << 24 | r2 << 16 | g2 << 8 | b2
transformVector: (values)->
throw new ErrorMessage "values length isn't 4" if values.length isnt 4
m = @matrix
sR = values[0]
sG = values[1]
sB = values[2]
sA = values[3]
oR = sR * m[0] + sG * m[1] + sB * m[2] + sA * m[3] + m[4]
oG = sR * m[5] + sG * m[6] + sB * m[7] + sA * m[8] + m[9]
oB = sR * m[10] + sG * m[11] + sB * m[12] + sA * m[13] + m[14]
oA = sR * m[15] + sG * m[16] + sB * m[17] + sA * m[18] + m[19]
values[0] = oR
values[1] = oG
values[2] = oB
values[3] = oA
_initHue: ->
#greenRotation = 35.0
greenRotation = 39.182655
unless @_hueInitialized
@_hueInitialized = true
@_preHue = new ColorMatrix()
@_preHue.rotateRed(45)
@_preHue.rotateGreen(-greenRotation)
lum = [
_LUMA_R2
_LUMA_G2
_LUMA_B2
1.0
]
@_preHue.transformVector(lum)
red = lum[0] / lum[2]
green = lum[1] / lum[2]
@_preHue.shearBlue red, green
@_postHue = new ColorMatrix()
@_postHue.shearBlue(-red, -green)
@_postHue.rotateGreen greenRotation
@_postHue.rotateRed -45.0
| true | # ColorMatrix Class v2.1
# released under MIT License (X11)
# http:#www.opensource.org/licenses/mit-license.php
# Author: PI:NAME:<NAME>END_PI
# http:#www.quasimondo.com
# RGB to Luminance conversion constants as found on
# Charles A. Poynton's colorspace-faq:
# http:#www.faqs.org/faqs/graphics/colorspace-faq/
_LUMA_R = 0.212671
_LUMA_G = 0.71516
_LUMA_B = 0.072169
# There seem different standards for converting RGB
# values to Luminance. This is the one by PI:NAME:<NAME>END_PI:
_LUMA_R2 = 0.3086
_LUMA_G2 = 0.6094
_LUMA_B2 = 0.0820
_ONETHIRD = 1 / 3
_IDENTITY = [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, 1, 0
]
_RAD = Math.PI / 180
exports.geom.ColorMatrix = class ColorMatrix
constructor: (matrix)->
if matrix instanceof ColorMatrix
@matrix = matrix.matrix.concat()
else if Array.isArray(matrix)
@matrix = matrix.concat()
else
@reset()
toString: ->
tmp = []
for v, i in @matrix
t = [] if i % 5 is 0
t.push String(v)
tmp.push t if i % 5 is 4 or i is @matrix.length - 1
for x in [0...5]
l = 0
for y in [0...tmp.length] by 1
l = Math.max l, tmp[y][x].length
for y in [0...tmp.length] by 1
tmp[y][x] = StringUtil.padLeft tmp[y][x], l
for y in [0...tmp.length] by 1
tmp[y] = tmp[y].join ', '
tmp[y] += ',' if y != tmp.length - 1
tmp.join '\n'
clone: ->
new ColorMatrix @matrix
reset: ->
@matrix = _IDENTITY.concat()
concat: (src)->
dst = @matrix
out = []
for y in [0...4]
i = 5 * y
for x in [0...5]
out[i + x] = src[i] * dst[x] + src[i + 1] * dst[x + 5] + src[i + 2] * dst[x + 10] + src[i + 3] * dst[x + 15]
out[i + 4] += src[i + 4]
@matrix = out
@
invert: ->
@concat [
-1, 0, 0, 0, 0xff
0, -1, 0, 0, 0xff
0, 0, -1, 0, 0xff
0, 0, 0, 1, 0
]
adjustSaturation: (s)->
irlum = -s * _LUMA_R
iglum = -s * _LUMA_G
iblum = -s * _LUMA_B
++s
@concat [
irlum + s, iglum, iblum, 0, 0
irlum, iglum + s, iblum, 0, 0
irlum, iglum, iblum + s, 0, 0
0, 0, 0, 1, 0
]
adjustContrast: (r, g = r, b = r)->
@concat [
1 + r, 0, 0, 0, -0x80 * r
0, 1 + g, 0, 0, -0x80 * g
0, 0, 1 + b, 0, -0x80 * b
0, 0, 0, 1, 0
]
adjustBrightness: (r, g = r, b = r)->
@concat [
1, 0, 0, 0, 0xff * r
0, 1, 0, 0, 0xff * g
0, 0, 1, 0, 0xff * b
0, 0, 0, 1, 0
]
adjustHue: (degree)->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
degree *= _RAD
c = Math.cos degree
s = Math.sin degree
l = 1 - c
m = l - s
n = l + s
@concat [
R * m + c, G * m, B * m + s, 0, 0
R * l + s * 0.143, G * l + c + s * 0.14, B * l + s * -0.283, 0, 0
R * n - s, G * n, B * n + c, 0, 0
0, 0, 0, 1, 0
]
rotateHue: (degree)->
@_initHue()
@concat @_preHue.matrix
@rotateBlue degree
@concat @_postHue.matrix
luminance2Alpha: ->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
_LUMA_R, _LUMA_G, _LUMA_B, 0, 0
]
adjustAlphaContrast: (amount)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, amount + 1, -0x80 * amount
]
colorize: (rgb, amount = 1)->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
r = ((rgb >> 16) & 0xFF) / 0xFF
g = ((rgb >> 8) & 0xFF) / 0xFF
b = (rgb & 0xFF) / 0xFF
invAmount = 1 - amount
@concat [
invAmount + amount * r * R, amount * r * G, amount * r * B, 0, 0,
amount * g * R, invAmount + amount * g * G, amount * g * B, 0, 0,
amount * b * R, amount * b * G, invAmount + amount * b * B, 0, 0,
0, 0, 0, 1, 0
]
setChannels: (r = 1, g = 2, b = 4, a = 8)->
rf = (if ((r & 1) is 1) then 1 else 0) + (if ((r & 2) is 2) then 1 else 0) + (if ((r & 4) is 4) then 1 else 0) + (if ((r & 8) is 8) then 1 else 0)
rf = (1 / rf) if rf > 0
gf = (if ((g & 1) is 1) then 1 else 0) + (if ((g & 2) is 2) then 1 else 0) + (if ((g & 4) is 4) then 1 else 0) + (if ((g & 8) is 8) then 1 else 0)
gf = (1 / gf) if gf > 0
bf = (if ((b & 1) is 1) then 1 else 0) + (if ((b & 2) is 2) then 1 else 0) + (if ((b & 4) is 4) then 1 else 0) + (if ((b & 8) is 8) then 1 else 0)
bf = (1 / bf) if bf > 0
af = (if ((a & 1) is 1) then 1 else 0) + (if ((a & 2) is 2) then 1 else 0) + (if ((a & 4) is 4) then 1 else 0) + (if ((a & 8) is 8) then 1 else 0)
af = (1 / af) if af > 0
@concat [
(if ((r & 1) is 1) then rf else 0), (if ((r & 2) is 2) then rf else 0), (if ((r & 4) is 4) then rf else 0), (if ((r & 8) is 8) then rf else 0), 0
(if ((g & 1) is 1) then gf else 0), (if ((g & 2) is 2) then gf else 0), (if ((g & 4) is 4) then gf else 0), (if ((g & 8) is 8) then gf else 0), 0
(if ((b & 1) is 1) then bf else 0), (if ((b & 2) is 2) then bf else 0), (if ((b & 4) is 4) then bf else 0), (if ((b & 8) is 8) then bf else 0), 0
(if ((a & 1) is 1) then af else 0), (if ((a & 2) is 2) then af else 0), (if ((a & 4) is 4) then af else 0), (if ((a & 8) is 8) then af else 0), 0
]
blend: (matrix, amount)->
for v, i in matrix.matrix
@matrix[i] = @matrix[i] * (1 - amount) + v * amount
@
average: (r = _ONETHIRD, g = _ONETHIRD, b = _ONETHIRD)->
@concat [
r, g, b, 0, 0
r, g, b, 0, 0
r, g, b, 0, 0
0, 0, 0, 1, 0
]
threshold: (threshold, factor = 0x100)->
R = factor * _LUMA_R
G = factor * _LUMA_G
B = factor * _LUMA_B
t = -factor * threshold
@concat [
R, G, B, 0, t
R, G, B, 0, t
R, G, B, 0, t
0, 0, 0, 1, 0
]
desaturate: ->
R = _LUMA_R
G = _LUMA_G
B = _LUMA_B
@concat [
R, G, B, 0, 0
R, G, B, 0, 0
R, G, B, 0, 0
0, 0, 0, 1, 0
]
randomize: (amount = 1)->
inv_amount = (1 - amount)
r1 = (inv_amount + (amount * (Math.random() - Math.random())))
g1 = (amount * (Math.random() - Math.random()))
b1 = (amount * (Math.random() - Math.random()))
o1 = ((amount * 0xFF) * (Math.random() - Math.random()))
r2 = (amount * (Math.random() - Math.random()))
g2 = (inv_amount + (amount * (Math.random() - Math.random())))
b2 = (amount * (Math.random() - Math.random()))
o2 = ((amount * 0xFF) * (Math.random() - Math.random()))
r3 = (amount * (Math.random() - Math.random()))
g3 = (amount * (Math.random() - Math.random()))
b3 = (inv_amount + (amount * (Math.random() - Math.random())))
o3 = ((amount * 0xFF) * (Math.random() - Math.random()))
@concat [
r1, g1, b1, 0, o1
r2, g2, b2, 0, o2
r3, g3, b3, 0, o3
0, 0, 0, 1, 0
]
setMultiplicators: (r = 1, g = 1, b = 1, a = 1)->
@concat [
r, 0, 0, 0, 0
0, g, 0, 0, 0
0, 0, b, 0, 0
0, 0, 0, a, 0
]
clearChannels: (r = false, g = false, b = false, a = false)->
@matrix[0] = @matrix[1] = @matrix[2] = @matrix[3] = @matrix[4] = 0 if r
@matrix[5] = @matrix[6] = @matrix[7] = @matrix[8] = @matrix[9] = 0 if g
@matrix[10] = @matrix[11] = @matrix[12] = @matrix[13] = @matrix[14] = 0 if b
@matrix[15] = @matrix[16] = @matrix[17] = @matrix[18] = @matrix[19] = 0 if a
thresholdAlpha: (threshold, factor = 0x100)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, factor, -factor * threshold
]
averageRGB2Alpha: ->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
_ONETHIRD, _ONETHIRD, _ONETHIRD, 0, 0
]
invertAlpha: ->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, -1, 0xff
]
rgb2Alpha: (r, g, b)->
@concat [
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
0, 0, 0, 0, 0xff
r, g, b, 0, 0
]
setAlpha: (alpha)->
@concat [
1, 0, 0, 0, 0
0, 1, 0, 0, 0
0, 0, 1, 0, 0
0, 0, 0, alpha, 0
]
rotateRed: (degree)->
@_rotateColor degree, 2, 1
rotateGreen: (degree)->
@_rotateColor degree, 0, 2
rotateBlue: (degree)->
@_rotateColor degree, 1, 0
_rotateColor: (degree, x, y)->
degree *= _RAD
mat = _IDENTITY.concat()
mat[x + x * 5] = mat[y + y * 5] = Math.cos degree
mat[y + x * 5] = Math.sin degree
mat[x + y * 5] = -Math.sin degree
@concat mat
shearRed: (green, blue)->
@_shearColor 0, 1, green, 2, blue
shearGreen: (red, blue)->
@_shearColor 1, 0, red, 2, blue
shearBlue: (red, green)->
@_shearColor 2, 0, red, 1, green
_shearColor: (x, y1, d1, y2, d2)->
mat = _IDENTITY.concat()
mat[y1 + x * 5] = d1
mat[y2 + x * 5] = d2
@concat mat
applyColorDeficiency: (type)->
# the values of this method are copied from http:#www.nofunc.com/Color_Matrix_Library/
switch type
when 'Protanopia'
@concat [
0.567, 0.433, 0.0, 0.0, 0.0
0.558, 0.442, 0.0, 0.0, 0.0
0.0, 0.242, 0.758, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Protanomaly'
@concat [
0.817, 0.183, 0.0, 0.0, 0.0
0.333, 0.667, 0.0, 0.0, 0.0
0.0, 0.125, 0.875, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Deuteranopia'
@concat [
0.625, 0.375, 0.0, 0.0, 0.0
0.7, 0.3, 0.0, 0.0, 0.0
0.0, 0.3, 0.7, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Deuteranomaly'
@concat [
0.8, 0.2, 0.0, 0.0, 0.0
0.258, 0.742, 0.0, 0.0, 0.0
0.0, 0.142, 0.858, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Tritanopia'
@concat [
0.95, 0.05, 0.0, 0.0, 0.0
0.0, 0.433, 0.567, 0.0, 0.0
0.0, 0.475, 0.525, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Tritanomaly'
@concat [
0.967, 0.033, 0.0, 0.0, 0.0
0.0, 0.733, 0.267, 0.0, 0.0
0.0, 0.183, 0.817, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Achromatopsia'
@concat [
0.299, 0.587, 0.114, 0.0, 0.0
0.299, 0.587, 0.114, 0.0, 0.0
0.299, 0.587, 0.114, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
when 'Achromatomaly'
@concat [
0.618, 0.320, 0.062, 0.0, 0.0
0.163, 0.775, 0.062, 0.0, 0.0
0.163, 0.320, 0.516, 0.0, 0.0
0.0, 0.0, 0.0, 1.0, 0.0
]
break
applyMatrix: (rgba)->
a = ( rgba >>> 24 ) & 0xff
r = ( rgba >>> 16 ) & 0xff
g = ( rgba >>> 8 ) & 0xff
b = rgba & 0xff
m = @matrix
r2 = 0.5 + r * m[0] + g * m[1] + b * m[2] + a * m[3] + m[4]
g2 = 0.5 + r * m[5] + g * m[6] + b * m[7] + a * m[8] + m[9]
b2 = 0.5 + r * m[10] + g * m[11] + b * m[12] + a * m[13] + m[14]
a2 = 0.5 + r * m[15] + g * m[16] + b * m[17] + a * m[18] + m[19]
a2 = 0 if a2 < 0
a2 = 0xff if a2 > 0xff
r2 = 0 if r2 < 0
r2 = 0xff if r2 > 0xff
g2 = 0 if g2 < 0
g2 = 0xff if g2 > 0xff
b2 = 0 if b2 < 0
b2 = 0xff if b2 > 0xff
a2 << 24 | r2 << 16 | g2 << 8 | b2
transformVector: (values)->
throw new ErrorMessage "values length isn't 4" if values.length isnt 4
m = @matrix
sR = values[0]
sG = values[1]
sB = values[2]
sA = values[3]
oR = sR * m[0] + sG * m[1] + sB * m[2] + sA * m[3] + m[4]
oG = sR * m[5] + sG * m[6] + sB * m[7] + sA * m[8] + m[9]
oB = sR * m[10] + sG * m[11] + sB * m[12] + sA * m[13] + m[14]
oA = sR * m[15] + sG * m[16] + sB * m[17] + sA * m[18] + m[19]
values[0] = oR
values[1] = oG
values[2] = oB
values[3] = oA
_initHue: ->
#greenRotation = 35.0
greenRotation = 39.182655
unless @_hueInitialized
@_hueInitialized = true
@_preHue = new ColorMatrix()
@_preHue.rotateRed(45)
@_preHue.rotateGreen(-greenRotation)
lum = [
_LUMA_R2
_LUMA_G2
_LUMA_B2
1.0
]
@_preHue.transformVector(lum)
red = lum[0] / lum[2]
green = lum[1] / lum[2]
@_preHue.shearBlue red, green
@_postHue = new ColorMatrix()
@_postHue.shearBlue(-red, -green)
@_postHue.rotateGreen greenRotation
@_postHue.rotateRed -45.0
|
[
{
"context": "xt: \"Test 1\"\n headers:\n \"message-id\": \"<mail1@example.org>\"\n\nMAIL2 =\n flags:[]\n subject: \"RE: Test 1\"",
"end": 1788,
"score": 0.9999204277992249,
"start": 1771,
"tag": "EMAIL",
"value": "mail1@example.org"
},
{
"context": " \"RE: Test 1\"\n... | tests/08_conversations.coffee | gelnior/cozy-emails | 58 |
should = require('should')
Message = require '../server/models/message'
Mailbox = require '../server/models/mailbox'
Account = require '../server/models/account'
async = require 'async'
TESTBOXID = 'test-conversations-mailbox2'
describe 'Conversation tests', ->
testMailbox = null
testAccount = null
ids = []
before ->
testMailbox = new Mailbox
accountID: 'test-conversations-account'
id: TESTBOXID
_id: TESTBOXID
path: '/yolo'
testAccount = new Account
id: 'test-conversations-account'
after (done) ->
async.eachSeries ids, (id, cb) ->
Message.destroy id, cb
, done
it 'handle a conversation fetched in reverse order', (done) ->
async.series [
(cb) -> Message.createFromImapMessage MAIL3, testMailbox, 3, cb
(cb) -> Message.createFromImapMessage MAIL2, testMailbox, 2, cb
(cb) -> Message.createFromImapMessage MAIL1, testMailbox, 1, cb
(cb) -> testAccount.applyPatchConversation cb
], (err, [mail1, mail2, mail3]) ->
return done err if err
Message.rawRequest 'byMailboxRequest',
startkey: ['uid', TESTBOXID]
endkey: ['uid', TESTBOXID, {}]
reduce: false
include_docs: true
, (err, rows) ->
return done err if err
ids = rows.map (row) -> row.id
conversationID = rows[0].doc.conversationID
for row in rows
conversationID.should.equal row.doc.conversationID
done null
MAIL1 =
flags:[]
subject: "Test 1"
text: "Test 1"
headers:
"message-id": "<mail1@example.org>"
MAIL2 =
flags:[]
subject: "RE: Test 1"
text: "Test 2"
inReplyTo: ["mail1@example.org"]
references: ["mail1@example.org"]
headers:
"message-id": "<mail2@example.org>"
MAIL3 =
flags:[]
subject: "RE: RE: Test 1"
text: "Test 3"
inReplyTo: ["mail2@example.org"]
references: ["mail1@example.org", "mail2@example.org"]
headers:
"message-id": "<mail3@example.org>"
| 90353 |
should = require('should')
Message = require '../server/models/message'
Mailbox = require '../server/models/mailbox'
Account = require '../server/models/account'
async = require 'async'
TESTBOXID = 'test-conversations-mailbox2'
describe 'Conversation tests', ->
testMailbox = null
testAccount = null
ids = []
before ->
testMailbox = new Mailbox
accountID: 'test-conversations-account'
id: TESTBOXID
_id: TESTBOXID
path: '/yolo'
testAccount = new Account
id: 'test-conversations-account'
after (done) ->
async.eachSeries ids, (id, cb) ->
Message.destroy id, cb
, done
it 'handle a conversation fetched in reverse order', (done) ->
async.series [
(cb) -> Message.createFromImapMessage MAIL3, testMailbox, 3, cb
(cb) -> Message.createFromImapMessage MAIL2, testMailbox, 2, cb
(cb) -> Message.createFromImapMessage MAIL1, testMailbox, 1, cb
(cb) -> testAccount.applyPatchConversation cb
], (err, [mail1, mail2, mail3]) ->
return done err if err
Message.rawRequest 'byMailboxRequest',
startkey: ['uid', TESTBOXID]
endkey: ['uid', TESTBOXID, {}]
reduce: false
include_docs: true
, (err, rows) ->
return done err if err
ids = rows.map (row) -> row.id
conversationID = rows[0].doc.conversationID
for row in rows
conversationID.should.equal row.doc.conversationID
done null
MAIL1 =
flags:[]
subject: "Test 1"
text: "Test 1"
headers:
"message-id": "<<EMAIL>>"
MAIL2 =
flags:[]
subject: "RE: Test 1"
text: "Test 2"
inReplyTo: ["<EMAIL>"]
references: ["<EMAIL>"]
headers:
"message-id": "<<EMAIL>>"
MAIL3 =
flags:[]
subject: "RE: RE: Test 1"
text: "Test 3"
inReplyTo: ["<EMAIL>"]
references: ["<EMAIL>", "<EMAIL>"]
headers:
"message-id": "<<EMAIL>>"
| true |
should = require('should')
Message = require '../server/models/message'
Mailbox = require '../server/models/mailbox'
Account = require '../server/models/account'
async = require 'async'
TESTBOXID = 'test-conversations-mailbox2'
describe 'Conversation tests', ->
testMailbox = null
testAccount = null
ids = []
before ->
testMailbox = new Mailbox
accountID: 'test-conversations-account'
id: TESTBOXID
_id: TESTBOXID
path: '/yolo'
testAccount = new Account
id: 'test-conversations-account'
after (done) ->
async.eachSeries ids, (id, cb) ->
Message.destroy id, cb
, done
it 'handle a conversation fetched in reverse order', (done) ->
async.series [
(cb) -> Message.createFromImapMessage MAIL3, testMailbox, 3, cb
(cb) -> Message.createFromImapMessage MAIL2, testMailbox, 2, cb
(cb) -> Message.createFromImapMessage MAIL1, testMailbox, 1, cb
(cb) -> testAccount.applyPatchConversation cb
], (err, [mail1, mail2, mail3]) ->
return done err if err
Message.rawRequest 'byMailboxRequest',
startkey: ['uid', TESTBOXID]
endkey: ['uid', TESTBOXID, {}]
reduce: false
include_docs: true
, (err, rows) ->
return done err if err
ids = rows.map (row) -> row.id
conversationID = rows[0].doc.conversationID
for row in rows
conversationID.should.equal row.doc.conversationID
done null
MAIL1 =
flags:[]
subject: "Test 1"
text: "Test 1"
headers:
"message-id": "<PI:EMAIL:<EMAIL>END_PI>"
MAIL2 =
flags:[]
subject: "RE: Test 1"
text: "Test 2"
inReplyTo: ["PI:EMAIL:<EMAIL>END_PI"]
references: ["PI:EMAIL:<EMAIL>END_PI"]
headers:
"message-id": "<PI:EMAIL:<EMAIL>END_PI>"
MAIL3 =
flags:[]
subject: "RE: RE: Test 1"
text: "Test 3"
inReplyTo: ["PI:EMAIL:<EMAIL>END_PI"]
references: ["PI:EMAIL:<EMAIL>END_PI", "PI:EMAIL:<EMAIL>END_PI"]
headers:
"message-id": "<PI:EMAIL:<EMAIL>END_PI>"
|
[
{
"context": " q=\n userid: uid\n password: pass\n ss.rpc \"user.newentry\", q,(result)->\n ",
"end": 814,
"score": 0.9981934428215027,
"start": 810,
"tag": "PASSWORD",
"value": "pass"
},
{
"context": " localStorage.setItem \"password... | client/code/pages/top.coffee | YagamiMirai/jinrou | 0 | exports.start=->
$("#loginform").submit (je)->
je.preventDefault()
form=je.target
Index.app.login form.elements["userid"].value, form.elements["password"].value,(result)->
if result
if form.elements["remember_me"].checked
# 記憶
localStorage.setItem "userid",form.elements["userid"].value
localStorage.setItem "password", form.elements["password"].value
Index.app.showUrl "/my"
else
$("#loginerror").text "账号或密码错误。"
$("#newentryform").submit (je)->
je.preventDefault()
form=je.target
uid = form.elements["userid"].value
pass = form.elements["password"].value
q=
userid: uid
password: pass
ss.rpc "user.newentry", q,(result)->
if result?.error?
$("#newentryerror").text result.error
return
Index.app.processLoginResult uid, result, (success)->
if success
localStorage.setItem "userid", uid
localStorage.setItem "password", pass
Index.app.showUrl "/my"
| 47492 | exports.start=->
$("#loginform").submit (je)->
je.preventDefault()
form=je.target
Index.app.login form.elements["userid"].value, form.elements["password"].value,(result)->
if result
if form.elements["remember_me"].checked
# 記憶
localStorage.setItem "userid",form.elements["userid"].value
localStorage.setItem "password", form.elements["password"].value
Index.app.showUrl "/my"
else
$("#loginerror").text "账号或密码错误。"
$("#newentryform").submit (je)->
je.preventDefault()
form=je.target
uid = form.elements["userid"].value
pass = form.elements["password"].value
q=
userid: uid
password: <PASSWORD>
ss.rpc "user.newentry", q,(result)->
if result?.error?
$("#newentryerror").text result.error
return
Index.app.processLoginResult uid, result, (success)->
if success
localStorage.setItem "userid", uid
localStorage.setItem "password", <PASSWORD>
Index.app.showUrl "/my"
| true | exports.start=->
$("#loginform").submit (je)->
je.preventDefault()
form=je.target
Index.app.login form.elements["userid"].value, form.elements["password"].value,(result)->
if result
if form.elements["remember_me"].checked
# 記憶
localStorage.setItem "userid",form.elements["userid"].value
localStorage.setItem "password", form.elements["password"].value
Index.app.showUrl "/my"
else
$("#loginerror").text "账号或密码错误。"
$("#newentryform").submit (je)->
je.preventDefault()
form=je.target
uid = form.elements["userid"].value
pass = form.elements["password"].value
q=
userid: uid
password: PI:PASSWORD:<PASSWORD>END_PI
ss.rpc "user.newentry", q,(result)->
if result?.error?
$("#newentryerror").text result.error
return
Index.app.processLoginResult uid, result, (success)->
if success
localStorage.setItem "userid", uid
localStorage.setItem "password", PI:PASSWORD:<PASSWORD>END_PI
Index.app.showUrl "/my"
|
[
{
"context": "ly link, images, summary) from Wikipedia\n# @author Torstein Thune\nhttp = require('http')\nfs = require('node-fs')\nWi",
"end": 102,
"score": 0.9998835921287537,
"start": 88,
"tag": "NAME",
"value": "Torstein Thune"
}
] | scraper.coffee | team-fez/wikipedia-scraper | 0 | # Get more data about a species (namely link, images, summary) from Wikipedia
# @author Torstein Thune
http = require('http')
fs = require('node-fs')
Wiki = require('wikijs')
species = []
speciesWithData = []
getPageInfo = (thing) ->
if not thing
writeFile()
else
console.log "\n\n#{thing?.Name} (#{species.length} left)"
thing.wikipediaUrl = ""
thing.summary = ""
thing.images = []
try
Wiki.page(thing.Name, (err, page) ->
unless page?
console.log ' -> no page'
speciesWithData.push thing
getPageInfo(species.shift())
else
thing.wikipediaUrl = page?.url or= ""
page?.summary((err,summary) ->
thing.summary = summary or= ""
page?.images((err, images) ->
thing.images = images or= []
speciesWithData.push thing
getPageInfo(species.shift())
# page?.categories((err, categories) ->
# thing.categories = categories
# )
# try
# page?.infobox((err, info) ->
# unless err
# console.log 'info'
# thing.info = info
# speciesWithData.push(thing)
# if species.length > 0
# getPageInfo(species.shift())
# )
# catch e
)
)
)
catch e
console.log ' -> error'
speciesWithData.push thing
getPageInfo(species.shift())
writeFile = () ->
fs.writeFile('wikiscrape.json', JSON.stringify(speciesWithData, false, '\t'), (err) ->
console.log 'we are done =)' unless err
)
fs.readFile('species.json', 'utf-8', (err, data) =>
species = JSON.parse(data)
console.log "We have #{species.length} species"
speciesWithData = []
getPageInfo(species.shift())
)
| 38663 | # Get more data about a species (namely link, images, summary) from Wikipedia
# @author <NAME>
http = require('http')
fs = require('node-fs')
Wiki = require('wikijs')
species = []
speciesWithData = []
getPageInfo = (thing) ->
if not thing
writeFile()
else
console.log "\n\n#{thing?.Name} (#{species.length} left)"
thing.wikipediaUrl = ""
thing.summary = ""
thing.images = []
try
Wiki.page(thing.Name, (err, page) ->
unless page?
console.log ' -> no page'
speciesWithData.push thing
getPageInfo(species.shift())
else
thing.wikipediaUrl = page?.url or= ""
page?.summary((err,summary) ->
thing.summary = summary or= ""
page?.images((err, images) ->
thing.images = images or= []
speciesWithData.push thing
getPageInfo(species.shift())
# page?.categories((err, categories) ->
# thing.categories = categories
# )
# try
# page?.infobox((err, info) ->
# unless err
# console.log 'info'
# thing.info = info
# speciesWithData.push(thing)
# if species.length > 0
# getPageInfo(species.shift())
# )
# catch e
)
)
)
catch e
console.log ' -> error'
speciesWithData.push thing
getPageInfo(species.shift())
writeFile = () ->
fs.writeFile('wikiscrape.json', JSON.stringify(speciesWithData, false, '\t'), (err) ->
console.log 'we are done =)' unless err
)
fs.readFile('species.json', 'utf-8', (err, data) =>
species = JSON.parse(data)
console.log "We have #{species.length} species"
speciesWithData = []
getPageInfo(species.shift())
)
| true | # Get more data about a species (namely link, images, summary) from Wikipedia
# @author PI:NAME:<NAME>END_PI
http = require('http')
fs = require('node-fs')
Wiki = require('wikijs')
species = []
speciesWithData = []
getPageInfo = (thing) ->
if not thing
writeFile()
else
console.log "\n\n#{thing?.Name} (#{species.length} left)"
thing.wikipediaUrl = ""
thing.summary = ""
thing.images = []
try
Wiki.page(thing.Name, (err, page) ->
unless page?
console.log ' -> no page'
speciesWithData.push thing
getPageInfo(species.shift())
else
thing.wikipediaUrl = page?.url or= ""
page?.summary((err,summary) ->
thing.summary = summary or= ""
page?.images((err, images) ->
thing.images = images or= []
speciesWithData.push thing
getPageInfo(species.shift())
# page?.categories((err, categories) ->
# thing.categories = categories
# )
# try
# page?.infobox((err, info) ->
# unless err
# console.log 'info'
# thing.info = info
# speciesWithData.push(thing)
# if species.length > 0
# getPageInfo(species.shift())
# )
# catch e
)
)
)
catch e
console.log ' -> error'
speciesWithData.push thing
getPageInfo(species.shift())
writeFile = () ->
fs.writeFile('wikiscrape.json', JSON.stringify(speciesWithData, false, '\t'), (err) ->
console.log 'we are done =)' unless err
)
fs.readFile('species.json', 'utf-8', (err, data) =>
species = JSON.parse(data)
console.log "We have #{species.length} species"
speciesWithData = []
getPageInfo(species.shift())
)
|
[
{
"context": "sed under the MIT License\nDate: 11-08-2015\nAuthor: Julio Cesar Fausto\nSource: https://github.com/jcfausto/jcfausto-com-",
"end": 108,
"score": 0.9998710751533508,
"start": 90,
"tag": "NAME",
"value": "Julio Cesar Fausto"
},
{
"context": "or: Julio Cesar Fausto\nSourc... | app/assets/javascripts/components/portfolio_item.js.jsx.coffee | jcfausto/jcfausto-rails-website | 1 | ###
PortfolioItem React Component
Released under the MIT License
Date: 11-08-2015
Author: Julio Cesar Fausto
Source: https://github.com/jcfausto/jcfausto-com-rails
###
@PortfolioItem = React.createClass
getInitialState: ->
name: this.props.name
url: this.props.url
image_url: this.props.image_url
order: this.props.order
getImageUrl: ->
image_path(this.state.image_url)
render: ->
`<div className="item">
<a href={this.state.url} target="_blank">
<img src={this.getImageUrl()} />
</a>
<footer>
<span>
<a href={this.state.url} target="_blank">{this.state.name}</a>
</span>
</footer>
</div>`
| 81814 | ###
PortfolioItem React Component
Released under the MIT License
Date: 11-08-2015
Author: <NAME>
Source: https://github.com/jcfausto/jcfausto-com-rails
###
@PortfolioItem = React.createClass
getInitialState: ->
name: this.props.name
url: this.props.url
image_url: this.props.image_url
order: this.props.order
getImageUrl: ->
image_path(this.state.image_url)
render: ->
`<div className="item">
<a href={this.state.url} target="_blank">
<img src={this.getImageUrl()} />
</a>
<footer>
<span>
<a href={this.state.url} target="_blank">{this.state.name}</a>
</span>
</footer>
</div>`
| true | ###
PortfolioItem React Component
Released under the MIT License
Date: 11-08-2015
Author: PI:NAME:<NAME>END_PI
Source: https://github.com/jcfausto/jcfausto-com-rails
###
@PortfolioItem = React.createClass
getInitialState: ->
name: this.props.name
url: this.props.url
image_url: this.props.image_url
order: this.props.order
getImageUrl: ->
image_path(this.state.image_url)
render: ->
`<div className="item">
<a href={this.state.url} target="_blank">
<img src={this.getImageUrl()} />
</a>
<footer>
<span>
<a href={this.state.url} target="_blank">{this.state.name}</a>
</span>
</footer>
</div>`
|
[
{
"context": "###\n@authors\nNicholas McCready - https://twitter.com/nmccready\n# Brunt of the wo",
"end": 30,
"score": 0.9998947978019714,
"start": 13,
"tag": "NAME",
"value": "Nicholas McCready"
},
{
"context": "\n@authors\nNicholas McCready - https://twitter.com/nmccready\n# Brunt... | public/bower_components/angular-google-maps/src/coffee/directives/free-draw-polygons.coffee | arslannaseem/notasoft | 1 | ###
@authors
Nicholas McCready - https://twitter.com/nmccready
# Brunt of the work is in DrawFreeHandChildModel
###
angular.module('uiGmapgoogle-maps').directive 'uiGmapFreeDrawPolygons', [
'uiGmapApiFreeDrawPolygons',(FreeDrawPolygons) ->
new FreeDrawPolygons()
]
| 38315 | ###
@authors
<NAME> - https://twitter.com/nmccready
# Brunt of the work is in DrawFreeHandChildModel
###
angular.module('uiGmapgoogle-maps').directive 'uiGmapFreeDrawPolygons', [
'uiGmapApiFreeDrawPolygons',(FreeDrawPolygons) ->
new FreeDrawPolygons()
]
| true | ###
@authors
PI:NAME:<NAME>END_PI - https://twitter.com/nmccready
# Brunt of the work is in DrawFreeHandChildModel
###
angular.module('uiGmapgoogle-maps').directive 'uiGmapFreeDrawPolygons', [
'uiGmapApiFreeDrawPolygons',(FreeDrawPolygons) ->
new FreeDrawPolygons()
]
|
[
{
"context": "out_me']\n extraSignupFields: [\n { fieldName: 'firstName', fieldLabel: 'First name' }\n { fieldName: 'la",
"end": 284,
"score": 0.9967923760414124,
"start": 275,
"tag": "NAME",
"value": "firstName"
},
{
"context": "lds: [\n { fieldName: 'firstName', fieldLabe... | app/client/app.coffee | zocoi/hoahoa | 0 | # Client App Namespace
_.extend App, {}
App.helpers = {}
_.each App.helpers, (helper, key) ->
Handlebars.registerHelper key, helper
# Global config
Accounts.ui.config(
requestPermissions:
facebook: ['email', 'user_about_me']
extraSignupFields: [
{ fieldName: 'firstName', fieldLabel: 'First name' }
{ fieldName: 'lastName', fieldLabel: 'Last name' }
]
)
| 134853 | # Client App Namespace
_.extend App, {}
App.helpers = {}
_.each App.helpers, (helper, key) ->
Handlebars.registerHelper key, helper
# Global config
Accounts.ui.config(
requestPermissions:
facebook: ['email', 'user_about_me']
extraSignupFields: [
{ fieldName: '<NAME>', fieldLabel: '<NAME>' }
{ fieldName: '<NAME>', fieldLabel: '<NAME>' }
]
)
| true | # Client App Namespace
_.extend App, {}
App.helpers = {}
_.each App.helpers, (helper, key) ->
Handlebars.registerHelper key, helper
# Global config
Accounts.ui.config(
requestPermissions:
facebook: ['email', 'user_about_me']
extraSignupFields: [
{ fieldName: 'PI:NAME:<NAME>END_PI', fieldLabel: 'PI:NAME:<NAME>END_PI' }
{ fieldName: 'PI:NAME:<NAME>END_PI', fieldLabel: 'PI:NAME:<NAME>END_PI' }
]
)
|
[
{
"context": "###\n * @author \t\tAbdelhakim RAFIK\n * @version \tv1.0.1\n * @license \tMIT License\n * @",
"end": 33,
"score": 0.999893069267273,
"start": 17,
"tag": "NAME",
"value": "Abdelhakim RAFIK"
},
{
"context": "nse \tMIT License\n * @copyright \tCopyright (c) 2021 Abdelhakim... | src/database/migrations/2021060500005-create-article-tags.coffee | AbdelhakimRafik/Project | 1 | ###
* @author Abdelhakim RAFIK
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 Abdelhakim RAFIK
* @date June 2021
###
###
Create article-tags table migration
###
module.exports =
up: (queryInterface, Sequelize) ->
queryInterface.createTable 'Article-tags',
id:
allowNull: false
autoIncrement: true
primaryKey: true
type: Sequelize.INTEGER
articleId:
allowNull: false
type: Sequelize.INTEGER
references:
model: 'Articles'
key: 'id'
tagId:
allowNull: false
type: Sequelize.INTEGER
references:
model: 'Tags'
key: 'id'
status:
allowNull: false
type: Sequelize.BOOLEAN
defaultValue: true
createdAt:
allowNull: false
type: Sequelize.DATE
updatedAt:
allowNull: false
type: Sequelize.DATE
down: (queryInterface, Sequelize) ->
queryInterface.dropTable 'Article-tags' | 109059 | ###
* @author <NAME>
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 <NAME>
* @date June 2021
###
###
Create article-tags table migration
###
module.exports =
up: (queryInterface, Sequelize) ->
queryInterface.createTable 'Article-tags',
id:
allowNull: false
autoIncrement: true
primaryKey: true
type: Sequelize.INTEGER
articleId:
allowNull: false
type: Sequelize.INTEGER
references:
model: 'Articles'
key: 'id'
tagId:
allowNull: false
type: Sequelize.INTEGER
references:
model: 'Tags'
key: '<KEY>'
status:
allowNull: false
type: Sequelize.BOOLEAN
defaultValue: true
createdAt:
allowNull: false
type: Sequelize.DATE
updatedAt:
allowNull: false
type: Sequelize.DATE
down: (queryInterface, Sequelize) ->
queryInterface.dropTable 'Article-tags' | true | ###
* @author PI:NAME:<NAME>END_PI
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 PI:NAME:<NAME>END_PI
* @date June 2021
###
###
Create article-tags table migration
###
module.exports =
up: (queryInterface, Sequelize) ->
queryInterface.createTable 'Article-tags',
id:
allowNull: false
autoIncrement: true
primaryKey: true
type: Sequelize.INTEGER
articleId:
allowNull: false
type: Sequelize.INTEGER
references:
model: 'Articles'
key: 'id'
tagId:
allowNull: false
type: Sequelize.INTEGER
references:
model: 'Tags'
key: 'PI:KEY:<KEY>END_PI'
status:
allowNull: false
type: Sequelize.BOOLEAN
defaultValue: true
createdAt:
allowNull: false
type: Sequelize.DATE
updatedAt:
allowNull: false
type: Sequelize.DATE
down: (queryInterface, Sequelize) ->
queryInterface.dropTable 'Article-tags' |
[
{
"context": "rs', ->\n\n player = new Player new Account(15, \"Myself\"), false\n expect(currentPlayers.contains playe",
"end": 363,
"score": 0.7559561729431152,
"start": 357,
"tag": "NAME",
"value": "Myself"
},
{
"context": "rs', ->\n\n player = new Player new Account(10, ... | test/dummy/spec/javascripts/CurrentPlayers_spec.js.coffee | railsc0d0r/global8ball_game | 0 | #= require game/Game
Account = global8ball.Account
Player = global8ball.Player
CurrentPlayers = global8ball.config.CurrentPlayers
describe 'CurrentPlayers', ->
currentPlayers = new CurrentPlayers [ { user_id: 5 }, { user_id: 15 } ]
it 'contains players when their user ID is one of the current players', ->
player = new Player new Account(15, "Myself"), false
expect(currentPlayers.contains player).toBeTruthy()
it 'does not contain a player whose user ID is not one of the current players', ->
player = new Player new Account(10, "Someone"), false
expect(currentPlayers.contains player).toBeFalsy()
| 183162 | #= require game/Game
Account = global8ball.Account
Player = global8ball.Player
CurrentPlayers = global8ball.config.CurrentPlayers
describe 'CurrentPlayers', ->
currentPlayers = new CurrentPlayers [ { user_id: 5 }, { user_id: 15 } ]
it 'contains players when their user ID is one of the current players', ->
player = new Player new Account(15, "<NAME>"), false
expect(currentPlayers.contains player).toBeTruthy()
it 'does not contain a player whose user ID is not one of the current players', ->
player = new Player new Account(10, "Some<NAME>"), false
expect(currentPlayers.contains player).toBeFalsy()
| true | #= require game/Game
Account = global8ball.Account
Player = global8ball.Player
CurrentPlayers = global8ball.config.CurrentPlayers
describe 'CurrentPlayers', ->
currentPlayers = new CurrentPlayers [ { user_id: 5 }, { user_id: 15 } ]
it 'contains players when their user ID is one of the current players', ->
player = new Player new Account(15, "PI:NAME:<NAME>END_PI"), false
expect(currentPlayers.contains player).toBeTruthy()
it 'does not contain a player whose user ID is not one of the current players', ->
player = new Player new Account(10, "SomePI:NAME:<NAME>END_PI"), false
expect(currentPlayers.contains player).toBeFalsy()
|
[
{
"context": "#\t> File Name: authorization.coffee\n#\t> Author: LY\n#\t> Mail: ly.franky@gmail.com\n#\t> Created Time: W",
"end": 50,
"score": 0.9989620447158813,
"start": 48,
"tag": "USERNAME",
"value": "LY"
},
{
"context": "ame: authorization.coffee\n#\t> Author: LY\n#\t> Mail: ly... | server/routes/helpers/authorization.coffee | Booker-Z/MIAC-website | 0 | # > File Name: authorization.coffee
# > Author: LY
# > Mail: ly.franky@gmail.com
# > Created Time: Wednesday, November 19, 2014 PM03:00:57 CST
authorization = exports
###
* to validate if the user has logged in
* if not, return fail with status 401
###
authorization.requireLogin = (req, res, next)->
if not req.session.user
res.status(401).json {result: 'fail', msg: 'Please log in first.'}
else
next()
###
* to validate if the user has already logged in
* if logged, return fail with status 409
###
authorization.checkUserConflict = (req, res, next)->
if req.session and req.session.user
res.status(409).json {result: 'fail', msg: 'User conflict.'}
else
next()
| 75864 | # > File Name: authorization.coffee
# > Author: LY
# > Mail: <EMAIL>
# > Created Time: Wednesday, November 19, 2014 PM03:00:57 CST
authorization = exports
###
* to validate if the user has logged in
* if not, return fail with status 401
###
authorization.requireLogin = (req, res, next)->
if not req.session.user
res.status(401).json {result: 'fail', msg: 'Please log in first.'}
else
next()
###
* to validate if the user has already logged in
* if logged, return fail with status 409
###
authorization.checkUserConflict = (req, res, next)->
if req.session and req.session.user
res.status(409).json {result: 'fail', msg: 'User conflict.'}
else
next()
| true | # > File Name: authorization.coffee
# > Author: LY
# > Mail: PI:EMAIL:<EMAIL>END_PI
# > Created Time: Wednesday, November 19, 2014 PM03:00:57 CST
authorization = exports
###
* to validate if the user has logged in
* if not, return fail with status 401
###
authorization.requireLogin = (req, res, next)->
if not req.session.user
res.status(401).json {result: 'fail', msg: 'Please log in first.'}
else
next()
###
* to validate if the user has already logged in
* if logged, return fail with status 409
###
authorization.checkUserConflict = (req, res, next)->
if req.session and req.session.user
res.status(409).json {result: 'fail', msg: 'User conflict.'}
else
next()
|
[
{
"context": " unit-tests\n\n echo '#---> BUILDING GO WORKERS (@farslan) <---#'\n $KONFIG_PROJECTROOT/go/build.sh\n\n ",
"end": 595,
"score": 0.9997041821479797,
"start": 586,
"tag": "USERNAME",
"value": "(@farslan"
},
{
"context": "/go/build.sh\n\n echo '#---> BUILDI... | config/generateRunFile.coffee | lionheart1022/koding | 0 | traverse = require 'traverse'
log = console.log
fs = require 'fs'
os = require 'os'
path = require 'path'
{ isAllowed } = require '../deployment/grouptoenvmapping'
generateDev = (KONFIG, options) ->
options.requirementCommands ?= []
installScript = """
pushd $KONFIG_PROJECTROOT
git submodule update --init
npm install --unsafe-perm
echo '#---> BUILDING CLIENT <---#'
make -C $KONFIG_PROJECTROOT/client unit-tests
echo '#---> BUILDING GO WORKERS (@farslan) <---#'
$KONFIG_PROJECTROOT/go/build.sh
echo '#---> BUILDING SOCIALAPI (@cihangir) <---#'
pushd $KONFIG_PROJECTROOT/go/src/socialapi
make configure
# make install
echo '#---> AUTHORIZING THIS COMPUTER WITH MATCHING KITE.KEY (@farslan) <---#'
KITE_KEY=$KONFIG_KITEHOME/kite.key
mkdir $HOME/.kite &>/dev/null
echo copying $KITE_KEY to $HOME/.kite/kite.key
cp -f $KITE_KEY $HOME/.kite/kite.key
echo
echo
echo 'ALL DONE. Enjoy! :)'
echo
echo
"""
run = """
#!/bin/bash
# ------ THIS FILE IS AUTO-GENERATED ON EACH BUILD ----- #
export KONFIG_PROJECTROOT=$(cd $(dirname $0); pwd)
ENV_SHELL_FILE=${ENV_SHELL_FILE:-$(dirname $0)/.env.sh}
if [ -f "$ENV_SHELL_FILE" ]; then
source $ENV_SHELL_FILE
else
echo "error: shell environment file does not exist"
exit 1
fi
function is_ready () {
exit 0
}
mkdir $KONFIG_PROJECTROOT/.logs &>/dev/null
SERVICES="mongo redis postgres rabbitmq imply"
NGINX_CONF="$KONFIG_PROJECTROOT/nginx.conf"
NGINX_PID="$KONFIG_PROJECTROOT/nginx.pid"
#{options.requirementCommands?.join "\n"}
trap ctrl_c INT
function ctrl_c () {
supervisorctl shutdown
exit 1;
}
function nginxstop () {
if [ -a $NGINX_PID ]; then
echo "stopping nginx"
nginx -c $NGINX_CONF -g "pid $NGINX_PID;" -s quit
fi
}
function nginxrun () {
nginxstop
echo "starting nginx"
nginx -c $NGINX_CONF -g "pid $NGINX_PID;"
}
function checkrunfile () {
if [ "$KONFIG_PROJECTROOT/run" -ot "$KONFIG_PROJECTROOT/config/main.$KONFIG_CONFIGNAME.coffee" ]; then
echo your run file is older than your config file. doing ./configure.
sleep 1
./configure
echo -e "\n\nPlease do ./run again\n"
exit 1;
fi
if [ "$KONFIG_PROJECTROOT/run" -ot "$KONFIG_PROJECTROOT/configure" ]; then
echo your run file is older than your configure file. doing ./configure.
sleep 1
./configure
echo -e "\n\nPlease do ./run again\n"
exit 1;
fi
}
function apply_custom_pg_migrations () {
# we can remove these after https://github.com/mattes/migrate/issues/13
export PGPASSWORD=$KONFIG_POSTGRES_PASSWORD
PSQL_COMMAND="psql -tA -h $KONFIG_POSTGRES_HOST $KONFIG_POSTGRES_DBNAME -U $KONFIG_POSTGRES_USERNAME"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'collaboration';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_participant_status_constant_enum\" ADD VALUE IF NOT EXISTS 'blocked';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'linkedtopic';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'bot';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_message_type_constant_enum\" ADD VALUE IF NOT EXISTS 'bot';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_message_type_constant_enum\" ADD VALUE IF NOT EXISTS 'system';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'bootstrap';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'startup';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'enterprise';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'team_base';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'team_free';"
}
function run () {
# Check if PG DB schema update required
go run $KONFIG_PROJECTROOT/go/src/socialapi/tests/pg-update.go $KONFIG_POSTGRES_HOST $KONFIG_POSTGRES_PORT
RESULT=$?
if [ $RESULT -ne 0 ]; then
exit 1
fi
# Update node modules
if ! scripts/check-node_modules.sh; then
npm install --silent
fi
# Check everything else
check
# Remove old watcher files (do we still need this?)
rm -rf $KONFIG_PROJECTROOT/go/bin/goldorf-main-*
rm -rf $KONFIG_PROJECTROOT/go/bin/watcher-*
# Run Go builder
$KONFIG_PROJECTROOT/go/build.sh
# Run Social Api builder
make -C $KONFIG_PROJECTROOT/go/src/socialapi configure
# Do PG Migration if necessary
migrate up
# Sanitize email addresses
node $KONFIG_PROJECTROOT/scripts/sanitize-email
supervisord && sleep 1
# Show the all logs of workers
tail -fq ./.logs/*.log
}
function docker_compose() {
if ! which docker-compose; then
echo 'error: docker-compose is not found'
echo '$ pip install docker-compose'
exit 1
fi
local ENTRYPOINT="/opt/koding/scripts/bootstrap-container $@"
docker-compose run --entrypoint $ENTRYPOINT backend
}
function printHelp (){
echo "Usage: "
echo ""
echo " run : to start koding"
echo " run docker-compose : to start koding in docker-compose environment"
echo " run exec : to exec arbitrary commands"
echo " run install : to compile/install client and "
echo " run buildclient : to see of specified worker logs only"
echo " run logs : to see all workers logs"
echo " run log [worker] : to see of specified worker logs only"
echo " run buildservices : to initialize and start services"
echo " run resetdb : to reset databases"
echo " run services : to stop and restart services"
echo " run worker : to list workers"
echo " run printconfig : to print koding config environment variables (output in json via --json flag)"
echo " run worker [worker] : to run a single worker"
echo " run migrate [command] : to apply/revert database changes (command: [create|up|down|version|reset|redo|to|goto])"
echo " run mongomigrate [command]: to apply/revert mongo database changes (command: [create|up|down])"
echo " run importusers : to import koding user data"
echo " run nodeservertests : to run tests for node.js web server"
echo " run socialworkertests : to run tests for social worker"
echo " run nodetestfiles : to run a single test or all test files in a directory"
echo " run sanitize-email : to sanitize email"
echo " run help : to show this list"
echo ""
}
function migrate () {
apply_custom_pg_migrations
params=(create up down version reset redo to goto)
param=$1
case "${params[@]}" in *"$param"*)
;;
*)
echo "Error: Command not found: $param"
echo "Usage: run migrate COMMAND [arg]"
echo ""
echo "Commands: "
echo " create [filename] : create new migration file in path"
echo " up : apply all available migrations"
echo " down : roll back all migrations"
echo " redo : roll back the most recently applied migration, then run it again"
echo " reset : run down and then up command"
echo " version : show the current migration version"
echo " to [n] : (+n) apply the next n / (-n) roll back the previous n migrations"
echo " goto [n] : go to specific migration"
echo ""
exit 1
;;
esac
if [ "$param" == "to" ]; then
param="migrate"
elif [ "$param" == "create" ] && [ -z "$2" ]; then
echo "Please choose a migration file name. (ex. add_created_at_column_account)"
echo "Usage: run migrate create [filename]"
echo ""
exit 1
fi
$GOBIN/migrate -url "postgres://$KONFIG_POSTGRES_HOST:$KONFIG_POSTGRES_PORT/$KONFIG_POSTGRES_DBNAME?user=social_superuser&password=social_superuser" -path "$KONFIG_PROJECTROOT/go/src/socialapi/db/sql/migrations" $param $2
if [ "$param" == "create" ]; then
echo "Please edit created script files and add them to your repository."
fi
}
function mongomigrate () {
params=(create up down)
param=$1
echo $1
case "${params[@]}" in *"$param"*)
;;
*)
echo "Error: Command not found: $param"
echo "Usage: run migrate COMMAND [arg]"
echo ""
echo "Commands: "
echo " create [filename] : create new migration file under ./workers/migrations (ids will increase by 5)"
echo " up : apply all available migrations"
echo " down [id] : roll back to id (if not given roll back all migrations)"
echo ""
exit 1
;;
esac
if [ "$param" == "create" ] && [ -z "$2" ]; then
echo "Please choose a migration file name. (ex. add_super_user)"
echo "Usage: ./run mongomigrate create [filename]"
echo ""
exit 1
fi
node $KONFIG_PROJECTROOT/node_modules/mongodb-migrate -runmm --config ../deployment/generated_files/mongomigration.json --dbPropName conn -c $KONFIG_PROJECTROOT/workers $1 $2
if [ "$param" == "create" ]; then
echo "Please edit created script files and add them to your repository."
fi
}
function check (){
check_service_dependencies
if [[ `uname` == 'Darwin' ]]; then
if [ -z "$DOCKER_HOST" ]; then
echo "You need to export DOCKER_HOST, run 'boot2docker up' and follow the instructions. (or run 'eval $(docker-machine env default)')"
exit 1
fi
fi
mongo $KONFIG_MONGO --eval "db.stats()" > /dev/null # do a simple harmless command of some sort
RESULT=$? # returns 0 if mongo eval succeeds
if [ $RESULT -ne 0 ]; then
echo ""
echo "Can't talk to mongodb at $KONFIG_MONGO, is it not running? exiting."
exit 1
fi
EXISTS=$(PGPASSWORD=$KONFIG_POSTGRES_PASSWORD psql -tA -h $KONFIG_POSTGRES_HOST social -U $KONFIG_POSTGRES_USERNAME -c "Select 1 from pg_tables where tablename = 'key' AND schemaname = 'kite';")
if [[ $EXISTS != '1' ]]; then
echo ""
echo "You don't have the new Kontrol Postgres. Please call ./run buildservices."
exit 1
fi
}
function check_psql () {
command -v psql >/dev/null 2>&1 || { echo >&2 "I require psql but it's not installed. (brew install postgresql) Aborting."; exit 1; }
}
function check_service_dependencies () {
echo "checking required services: nginx, docker, mongo, graphicsmagick..."
command -v go >/dev/null 2>&1 || { echo >&2 "I require go but it's not installed. Aborting."; exit 1; }
command -v docker >/dev/null 2>&1 || { echo >&2 "I require docker but it's not installed. Aborting."; exit 1; }
command -v nginx >/dev/null 2>&1 || { echo >&2 "I require nginx but it's not installed. (brew install nginx maybe?) Aborting."; exit 1; }
command -v node >/dev/null 2>&1 || { echo >&2 "I require node but it's not installed. Aborting."; exit 1; }
command -v npm >/dev/null 2>&1 || { echo >&2 "I require npm but it's not installed. Aborting."; exit 1; }
command -v gulp >/dev/null 2>&1 || { echo >&2 "I require gulp but it's not installed. (npm i gulp -g) Aborting."; exit 1; }
command -v coffee >/dev/null 2>&1 || { echo >&2 "I require coffee-script but it's not installed. (npm i coffee-script -g) Aborting."; exit 1; }
check_psql
if [[ `uname` == 'Darwin' ]]; then
brew info graphicsmagick >/dev/null 2>&1 || { echo >&2 "I require graphicsmagick but it's not installed. Aborting."; exit 1; }
command -v boot2docker >/dev/null 2>&1 || command -v docker-machine >/dev/null 2>&1 || { echo >&2 "I require boot2docker but it's not installed. Aborting."; exit 1; }
elif [[ `uname` == 'Linux' ]]; then
command -v gm >/dev/null 2>&1 || { echo >&2 "I require graphicsmagick but it's not installed. Aborting."; exit 1; }
fi
set -o errexit
scripts/check-node-version.sh
scripts/check-npm-version.sh
scripts/check-gulp-version.sh
scripts/check-go-version.sh
scripts/check-supervisor.sh
set +o errexit
}
function waitPostgresReady() {
retries=60
while ! pg_isready -h $KONFIG_POSTGRES_HOST -U $KONFIG_POSTGRES_USERNAME; do
sleep 1
let retries--
if [ $retries == 0 ]; then
echo "time out while waiting for pg_isready"
exit 1
fi
echo "."
done
}
function waitMongoReady() {
retries=60
while ! mongo $KONFIG_MONGO --eval "db.stats()" > /dev/null 2>&1; do
sleep 1
let retries--
if [ $retries == 0 ]; then
echo "time out while waiting for mongo is ready"
exit 1
fi
echo "mongo is not reachable, trying again "
done
}
function runMongoDocker () {
docker run -d -p 27017:27017 --name=mongo koding/mongo-auto:latest
waitMongoReady
}
function runPostgresqlDocker () {
docker run -d -p 5432:5432 --name=postgres koding/postgres
waitPostgresReady
}
function run_docker_wrapper () {
if [[ `uname` == 'Darwin' ]]; then
command -v boot2docker >/dev/null 2>&1 && boot2docker up
command -v docker-machine >/dev/null 2>&1 && docker-machine start default || echo 1
fi
}
function build_services () {
run_docker_wrapper
echo "Stopping services: $SERVICES"
docker stop $SERVICES
echo "Removing services: $SERVICES"
docker rm $SERVICES
# Build postgres
pushd $KONFIG_PROJECTROOT/go/src/socialapi/db/sql
mkdir -p kontrol
sed -i -e "s/USER kontrolapplication/USER $KONFIG_KONTROL_POSTGRES_USERNAME/" kontrol/001-schema.sql
sed -i -e "s/PASSWORD 'kontrolapplication'/PASSWORD '$KONFIG_KONTROL_POSTGRES_PASSWORD'/" kontrol/001-schema.sql
sed -i -e "s/GRANT kontrol TO kontrolapplication/GRANT kontrol TO $KONFIG_KONTROL_POSTGRES_USERNAME/" kontrol/001-schema.sql
docker build -t koding/postgres .
git checkout kontrol/001-schema.sql
popd
runMongoDocker
docker run -d -p 5672:5672 -p 15672:15672 --name=rabbitmq rabbitmq:3-management
docker run -d -p 6379:6379 --name=redis redis
runPostgresqlDocker
docker run -d -p 18081-18110:8081-8110 -p 18200:8200 -p 19095:9095 --name=imply imply/imply:1.2.1
echo "#---> CLEARING ALGOLIA INDEXES: @chris <---#"
pushd $KONFIG_PROJECTROOT
./scripts/clear-algolia-index.sh -i "accounts$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
./scripts/clear-algolia-index.sh -i "topics$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
./scripts/clear-algolia-index.sh -i "messages$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
migrate up
}
function services () {
run_docker_wrapper
EXISTS=$(docker inspect --format="{{ .State.Running }}" $SERVICES 2> /dev/null)
if [ $? -eq 1 ]; then
echo ""
echo "Some of containers are missing, please do ./run buildservices"
exit 1
fi
echo "Stopping services: $SERVICES"
docker stop $SERVICES
echo "Starting services: $SERVICES"
docker start $SERVICES
nginxrun
}
function importusers () {
node $KONFIG_PROJECTROOT/scripts/user-importer -c dev
migrateusers
}
function migrateusers () {
go run $KONFIG_PROJECTROOT/go/src/socialapi/workers/cmd/migrator/main.go -c $KONFIG_SOCIALAPI_CONFIGFILEPATH
}
function removeDockerByName () {
docker ps -all --quiet --filter name=$1 | xargs docker rm -f && echo deleted $1 image
}
function restoredefaultmongodump () {
removeDockerByName mongo
runMongoDocker
mongomigrate up
}
function restoredefaultpostgresdump () {
removeDockerByName postgres
runPostgresqlDocker
migrate up
migrateusers
}
function updatePermissions () {
echo '#---> UPDATING MONGO DATABASE ACCORDING TO LATEST CHANGES IN CODE (UPDATE PERMISSIONS @gokmen) <---#'
node $KONFIG_PROJECTROOT/scripts/permission-updater -c dev --reset
}
if [ "$#" == "0" ]; then
checkrunfile
run $1
elif [ "$1" == "is_ready" ]; then
is_ready
elif [ "$1" == "docker-compose" ]; then
shift
docker_compose
elif [ "$1" == "exec" ]; then
shift
exec "$@"
elif [ "$1" == "install" ]; then
check_service_dependencies
#{installScript}
elif [ "$1" == "printconfig" ]; then
printconfig $@
elif [[ "$1" == "log" || "$1" == "logs" ]]; then
trap - INT
trap
if [ "$2" == "" ]; then
tail -fq ./.logs/*.log
else
tail -fq ./.logs/$2.log
fi
elif [ "$1" == "cleanup" ]; then
./cleanup $@
elif [ "$1" == "buildclient" ]; then
make -C $KONFIG_PROJECTROOT/client dist
elif [ "$1" == "services" ]; then
check_service_dependencies
services
elif [ "$1" == "updatepermissions" ]; then
updatePermissions
elif [ "$1" == "resetdb" ]; then
if [ "$2" == "--yes" ]; then
restoredefaultmongodump
restoredefaultpostgresdump
exit 0
fi
read -p "This will reset current databases, all data will be lost! (y/N)" -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]
then
exit 1
fi
restoredefaultmongodump
restoredefaultpostgresdump
elif [ "$1" == "buildservices" ]; then
check_service_dependencies
if [ "$2" != "force" ]; then
read -p "This will destroy existing images, do you want to continue? (y/N)" -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
exit 1
fi
fi
build_services
migrate up
elif [ "$1" == "help" ]; then
printHelp
elif [ "$1" == "importusers" ]; then
importusers
elif [ "$1" == "worker" ]; then
if [ "$2" == "" ]; then
echo Available workers:
echo "-------------------"
supervisorctl status | awk '${print $1} | sort'
else
trap - INT
trap
exec supervisorctl start $2
fi
elif [ "$1" == "migrate" ]; then
check_psql
if [ -z "$2" ]; then
echo "Please choose a migrate command [create|up|down|version|reset|redo|to|goto]"
echo ""
else
pushd $GOPATH/src/socialapi
make install-migrate
migrate $2 $3
fi
elif [ "$1" == "vmwatchertests" ]; then
go test koding/vmwatcher -test.v=true
elif [ "$1" == "gokodingconfigtests" ]; then
go test -v --race koding/kites/config
elif [ "$1" == "janitortests" ]; then
pushd $KONFIG_PROJECTROOT/go/src/koding/workers/janitor
./test.sh
elif [ "$1" == "gatheringestortests" ]; then
go test koding/workers/gatheringestor -test.v=true
elif [ "$1" == "gomodeltests" ]; then
go test koding/db/mongodb/modelhelper -test.v=true
elif [ "$1" == "kontroltests" ]; then
go test koding/kites/kontrol/kontrol -v
elif [ "$1" == "socialworkertests" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner "$KONFIG_PROJECTROOT/workers/social"
elif [ "$1" == "nodeservertests" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner "$KONFIG_PROJECTROOT/servers/lib/server"
# To run specific test directory or a single test file
elif [ "$1" == "nodetestfiles" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner $2
elif [ "$1" == "sanitize-email" ]; then
node $KONFIG_PROJECTROOT/scripts/sanitize-email
elif [ "$1" == "apply_custom_pg_migrations" ]; then
apply_custom_pg_migrations
elif [ "$1" == "is_pgready" ]; then
waitPostgresReady
elif [ "$1" == "is_mongoready" ]; then
waitMongoReady
elif [ "$1" == "mongomigrate" ]; then
mongomigrate $2 $3
else
echo "Unknown command: $1"
printHelp
fi
# ------ THIS FILE IS AUTO-GENERATED BY ./configure ----- #\n
"""
return run
generateSandbox = generateRunFile = (KONFIG) ->
return """
#!/bin/bash
export HOME=/home/ec2-user
ENV_SHELL_FILE=${ENV_SHELL_FILE:-$(dirname $0)/.env.sh}
if [ -f "$ENV_SHELL_FILE" ]; then
source $ENV_SHELL_FILE
else
echo "error: shell environment file does not exist"
exit 1
fi
COMMAND=$1
shift
case "$COMMAND" in
exec) exec "$@";;
esac
"""
module.exports = { dev: generateDev, default: generateDev, sandbox: generateSandbox, prod: generateSandbox }
| 26203 | traverse = require 'traverse'
log = console.log
fs = require 'fs'
os = require 'os'
path = require 'path'
{ isAllowed } = require '../deployment/grouptoenvmapping'
generateDev = (KONFIG, options) ->
options.requirementCommands ?= []
installScript = """
pushd $KONFIG_PROJECTROOT
git submodule update --init
npm install --unsafe-perm
echo '#---> BUILDING CLIENT <---#'
make -C $KONFIG_PROJECTROOT/client unit-tests
echo '#---> BUILDING GO WORKERS (@farslan) <---#'
$KONFIG_PROJECTROOT/go/build.sh
echo '#---> BUILDING SOCIALAPI (@cihangir) <---#'
pushd $KONFIG_PROJECTROOT/go/src/socialapi
make configure
# make install
echo '#---> AUTHORIZING THIS COMPUTER WITH MATCHING KITE.KEY (@farslan) <---#'
KITE_KEY=$KONFIG_KITEHOME/kite.key
mkdir $HOME/.kite &>/dev/null
echo copying $KITE_KEY to $HOME/.kite/kite.key
cp -f $KITE_KEY $HOME/.kite/kite.key
echo
echo
echo 'ALL DONE. Enjoy! :)'
echo
echo
"""
run = """
#!/bin/bash
# ------ THIS FILE IS AUTO-GENERATED ON EACH BUILD ----- #
export KONFIG_PROJECTROOT=$(cd $(dirname $0); pwd)
ENV_SHELL_FILE=${ENV_SHELL_FILE:-$(dirname $0)/.env.sh}
if [ -f "$ENV_SHELL_FILE" ]; then
source $ENV_SHELL_FILE
else
echo "error: shell environment file does not exist"
exit 1
fi
function is_ready () {
exit 0
}
mkdir $KONFIG_PROJECTROOT/.logs &>/dev/null
SERVICES="mongo redis postgres rabbitmq imply"
NGINX_CONF="$KONFIG_PROJECTROOT/nginx.conf"
NGINX_PID="$KONFIG_PROJECTROOT/nginx.pid"
#{options.requirementCommands?.join "\n"}
trap ctrl_c INT
function ctrl_c () {
supervisorctl shutdown
exit 1;
}
function nginxstop () {
if [ -a $NGINX_PID ]; then
echo "stopping nginx"
nginx -c $NGINX_CONF -g "pid $NGINX_PID;" -s quit
fi
}
function nginxrun () {
nginxstop
echo "starting nginx"
nginx -c $NGINX_CONF -g "pid $NGINX_PID;"
}
function checkrunfile () {
if [ "$KONFIG_PROJECTROOT/run" -ot "$KONFIG_PROJECTROOT/config/main.$KONFIG_CONFIGNAME.coffee" ]; then
echo your run file is older than your config file. doing ./configure.
sleep 1
./configure
echo -e "\n\nPlease do ./run again\n"
exit 1;
fi
if [ "$KONFIG_PROJECTROOT/run" -ot "$KONFIG_PROJECTROOT/configure" ]; then
echo your run file is older than your configure file. doing ./configure.
sleep 1
./configure
echo -e "\n\nPlease do ./run again\n"
exit 1;
fi
}
function apply_custom_pg_migrations () {
# we can remove these after https://github.com/mattes/migrate/issues/13
export PGPASSWORD=$KONFIG_POSTGRES_PASSWORD
PSQL_COMMAND="psql -tA -h $KONFIG_POSTGRES_HOST $KONFIG_POSTGRES_DBNAME -U $KONFIG_POSTGRES_USERNAME"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'collaboration';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_participant_status_constant_enum\" ADD VALUE IF NOT EXISTS 'blocked';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'linkedtopic';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'bot';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_message_type_constant_enum\" ADD VALUE IF NOT EXISTS 'bot';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_message_type_constant_enum\" ADD VALUE IF NOT EXISTS 'system';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'bootstrap';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'startup';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'enterprise';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'team_base';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'team_free';"
}
function run () {
# Check if PG DB schema update required
go run $KONFIG_PROJECTROOT/go/src/socialapi/tests/pg-update.go $KONFIG_POSTGRES_HOST $KONFIG_POSTGRES_PORT
RESULT=$?
if [ $RESULT -ne 0 ]; then
exit 1
fi
# Update node modules
if ! scripts/check-node_modules.sh; then
npm install --silent
fi
# Check everything else
check
# Remove old watcher files (do we still need this?)
rm -rf $KONFIG_PROJECTROOT/go/bin/goldorf-main-*
rm -rf $KONFIG_PROJECTROOT/go/bin/watcher-*
# Run Go builder
$KONFIG_PROJECTROOT/go/build.sh
# Run Social Api builder
make -C $KONFIG_PROJECTROOT/go/src/socialapi configure
# Do PG Migration if necessary
migrate up
# Sanitize email addresses
node $KONFIG_PROJECTROOT/scripts/sanitize-email
supervisord && sleep 1
# Show the all logs of workers
tail -fq ./.logs/*.log
}
function docker_compose() {
if ! which docker-compose; then
echo 'error: docker-compose is not found'
echo '$ pip install docker-compose'
exit 1
fi
local ENTRYPOINT="/opt/koding/scripts/bootstrap-container $@"
docker-compose run --entrypoint $ENTRYPOINT backend
}
function printHelp (){
echo "Usage: "
echo ""
echo " run : to start koding"
echo " run docker-compose : to start koding in docker-compose environment"
echo " run exec : to exec arbitrary commands"
echo " run install : to compile/install client and "
echo " run buildclient : to see of specified worker logs only"
echo " run logs : to see all workers logs"
echo " run log [worker] : to see of specified worker logs only"
echo " run buildservices : to initialize and start services"
echo " run resetdb : to reset databases"
echo " run services : to stop and restart services"
echo " run worker : to list workers"
echo " run printconfig : to print koding config environment variables (output in json via --json flag)"
echo " run worker [worker] : to run a single worker"
echo " run migrate [command] : to apply/revert database changes (command: [create|up|down|version|reset|redo|to|goto])"
echo " run mongomigrate [command]: to apply/revert mongo database changes (command: [create|up|down])"
echo " run importusers : to import koding user data"
echo " run nodeservertests : to run tests for node.js web server"
echo " run socialworkertests : to run tests for social worker"
echo " run nodetestfiles : to run a single test or all test files in a directory"
echo " run sanitize-email : to sanitize email"
echo " run help : to show this list"
echo ""
}
function migrate () {
apply_custom_pg_migrations
params=(create up down version reset redo to goto)
param=$1
case "${params[@]}" in *"$param"*)
;;
*)
echo "Error: Command not found: $param"
echo "Usage: run migrate COMMAND [arg]"
echo ""
echo "Commands: "
echo " create [filename] : create new migration file in path"
echo " up : apply all available migrations"
echo " down : roll back all migrations"
echo " redo : roll back the most recently applied migration, then run it again"
echo " reset : run down and then up command"
echo " version : show the current migration version"
echo " to [n] : (+n) apply the next n / (-n) roll back the previous n migrations"
echo " goto [n] : go to specific migration"
echo ""
exit 1
;;
esac
if [ "$param" == "to" ]; then
param="migrate"
elif [ "$param" == "create" ] && [ -z "$2" ]; then
echo "Please choose a migration file name. (ex. add_created_at_column_account)"
echo "Usage: run migrate create [filename]"
echo ""
exit 1
fi
$GOBIN/migrate -url "postgres://$KONFIG_POSTGRES_HOST:$KONFIG_POSTGRES_PORT/$KONFIG_POSTGRES_DBNAME?user=social_superuser&password=<PASSWORD>" -path "$KONFIG_PROJECTROOT/go/src/socialapi/db/sql/migrations" $param $2
if [ "$param" == "create" ]; then
echo "Please edit created script files and add them to your repository."
fi
}
function mongomigrate () {
params=(create up down)
param=$1
echo $1
case "${params[@]}" in *"$param"*)
;;
*)
echo "Error: Command not found: $param"
echo "Usage: run migrate COMMAND [arg]"
echo ""
echo "Commands: "
echo " create [filename] : create new migration file under ./workers/migrations (ids will increase by 5)"
echo " up : apply all available migrations"
echo " down [id] : roll back to id (if not given roll back all migrations)"
echo ""
exit 1
;;
esac
if [ "$param" == "create" ] && [ -z "$2" ]; then
echo "Please choose a migration file name. (ex. add_super_user)"
echo "Usage: ./run mongomigrate create [filename]"
echo ""
exit 1
fi
node $KONFIG_PROJECTROOT/node_modules/mongodb-migrate -runmm --config ../deployment/generated_files/mongomigration.json --dbPropName conn -c $KONFIG_PROJECTROOT/workers $1 $2
if [ "$param" == "create" ]; then
echo "Please edit created script files and add them to your repository."
fi
}
function check (){
check_service_dependencies
if [[ `uname` == 'Darwin' ]]; then
if [ -z "$DOCKER_HOST" ]; then
echo "You need to export DOCKER_HOST, run 'boot2docker up' and follow the instructions. (or run 'eval $(docker-machine env default)')"
exit 1
fi
fi
mongo $KONFIG_MONGO --eval "db.stats()" > /dev/null # do a simple harmless command of some sort
RESULT=$? # returns 0 if mongo eval succeeds
if [ $RESULT -ne 0 ]; then
echo ""
echo "Can't talk to mongodb at $KONFIG_MONGO, is it not running? exiting."
exit 1
fi
EXISTS=$(PGPASSWORD=$KONFIG_POSTGRES_PASSWORD psql -tA -h $KONFIG_POSTGRES_HOST social -U $KONFIG_POSTGRES_USERNAME -c "Select 1 from pg_tables where tablename = 'key' AND schemaname = 'kite';")
if [[ $EXISTS != '1' ]]; then
echo ""
echo "You don't have the new Kontrol Postgres. Please call ./run buildservices."
exit 1
fi
}
function check_psql () {
command -v psql >/dev/null 2>&1 || { echo >&2 "I require psql but it's not installed. (brew install postgresql) Aborting."; exit 1; }
}
function check_service_dependencies () {
echo "checking required services: nginx, docker, mongo, graphicsmagick..."
command -v go >/dev/null 2>&1 || { echo >&2 "I require go but it's not installed. Aborting."; exit 1; }
command -v docker >/dev/null 2>&1 || { echo >&2 "I require docker but it's not installed. Aborting."; exit 1; }
command -v nginx >/dev/null 2>&1 || { echo >&2 "I require nginx but it's not installed. (brew install nginx maybe?) Aborting."; exit 1; }
command -v node >/dev/null 2>&1 || { echo >&2 "I require node but it's not installed. Aborting."; exit 1; }
command -v npm >/dev/null 2>&1 || { echo >&2 "I require npm but it's not installed. Aborting."; exit 1; }
command -v gulp >/dev/null 2>&1 || { echo >&2 "I require gulp but it's not installed. (npm i gulp -g) Aborting."; exit 1; }
command -v coffee >/dev/null 2>&1 || { echo >&2 "I require coffee-script but it's not installed. (npm i coffee-script -g) Aborting."; exit 1; }
check_psql
if [[ `uname` == 'Darwin' ]]; then
brew info graphicsmagick >/dev/null 2>&1 || { echo >&2 "I require graphicsmagick but it's not installed. Aborting."; exit 1; }
command -v boot2docker >/dev/null 2>&1 || command -v docker-machine >/dev/null 2>&1 || { echo >&2 "I require boot2docker but it's not installed. Aborting."; exit 1; }
elif [[ `uname` == 'Linux' ]]; then
command -v gm >/dev/null 2>&1 || { echo >&2 "I require graphicsmagick but it's not installed. Aborting."; exit 1; }
fi
set -o errexit
scripts/check-node-version.sh
scripts/check-npm-version.sh
scripts/check-gulp-version.sh
scripts/check-go-version.sh
scripts/check-supervisor.sh
set +o errexit
}
function waitPostgresReady() {
retries=60
while ! pg_isready -h $KONFIG_POSTGRES_HOST -U $KONFIG_POSTGRES_USERNAME; do
sleep 1
let retries--
if [ $retries == 0 ]; then
echo "time out while waiting for pg_isready"
exit 1
fi
echo "."
done
}
function waitMongoReady() {
retries=60
while ! mongo $KONFIG_MONGO --eval "db.stats()" > /dev/null 2>&1; do
sleep 1
let retries--
if [ $retries == 0 ]; then
echo "time out while waiting for mongo is ready"
exit 1
fi
echo "mongo is not reachable, trying again "
done
}
function runMongoDocker () {
docker run -d -p 27017:27017 --name=mongo koding/mongo-auto:latest
waitMongoReady
}
function runPostgresqlDocker () {
docker run -d -p 5432:5432 --name=postgres koding/postgres
waitPostgresReady
}
function run_docker_wrapper () {
if [[ `uname` == 'Darwin' ]]; then
command -v boot2docker >/dev/null 2>&1 && boot2docker up
command -v docker-machine >/dev/null 2>&1 && docker-machine start default || echo 1
fi
}
function build_services () {
run_docker_wrapper
echo "Stopping services: $SERVICES"
docker stop $SERVICES
echo "Removing services: $SERVICES"
docker rm $SERVICES
# Build postgres
pushd $KONFIG_PROJECTROOT/go/src/socialapi/db/sql
mkdir -p kontrol
sed -i -e "s/USER kontrolapplication/USER $KONFIG_KONTROL_POSTGRES_USERNAME/" kontrol/001-schema.sql
sed -i -e "s/PASSWORD 'kontrolapplication'/PASSWORD '$KONFIG_KONTROL_POSTGRES_PASSWORD'/" kontrol/001-schema.sql
sed -i -e "s/GRANT kontrol TO kontrolapplication/GRANT kontrol TO $KONFIG_KONTROL_POSTGRES_USERNAME/" kontrol/001-schema.sql
docker build -t koding/postgres .
git checkout kontrol/001-schema.sql
popd
runMongoDocker
docker run -d -p 5672:5672 -p 15672:15672 --name=rabbitmq rabbitmq:3-management
docker run -d -p 6379:6379 --name=redis redis
runPostgresqlDocker
docker run -d -p 18081-18110:8081-8110 -p 18200:8200 -p 19095:9095 --name=imply imply/imply:1.2.1
echo "#---> CLEARING ALGOLIA INDEXES: @chris <---#"
pushd $KONFIG_PROJECTROOT
./scripts/clear-algolia-index.sh -i "accounts$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
./scripts/clear-algolia-index.sh -i "topics$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
./scripts/clear-algolia-index.sh -i "messages$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
migrate up
}
function services () {
run_docker_wrapper
EXISTS=$(docker inspect --format="{{ .State.Running }}" $SERVICES 2> /dev/null)
if [ $? -eq 1 ]; then
echo ""
echo "Some of containers are missing, please do ./run buildservices"
exit 1
fi
echo "Stopping services: $SERVICES"
docker stop $SERVICES
echo "Starting services: $SERVICES"
docker start $SERVICES
nginxrun
}
function importusers () {
node $KONFIG_PROJECTROOT/scripts/user-importer -c dev
migrateusers
}
function migrateusers () {
go run $KONFIG_PROJECTROOT/go/src/socialapi/workers/cmd/migrator/main.go -c $KONFIG_SOCIALAPI_CONFIGFILEPATH
}
function removeDockerByName () {
docker ps -all --quiet --filter name=$1 | xargs docker rm -f && echo deleted $1 image
}
function restoredefaultmongodump () {
removeDockerByName mongo
runMongoDocker
mongomigrate up
}
function restoredefaultpostgresdump () {
removeDockerByName postgres
runPostgresqlDocker
migrate up
migrateusers
}
function updatePermissions () {
echo '#---> UPDATING MONGO DATABASE ACCORDING TO LATEST CHANGES IN CODE (UPDATE PERMISSIONS @gokmen) <---#'
node $KONFIG_PROJECTROOT/scripts/permission-updater -c dev --reset
}
if [ "$#" == "0" ]; then
checkrunfile
run $1
elif [ "$1" == "is_ready" ]; then
is_ready
elif [ "$1" == "docker-compose" ]; then
shift
docker_compose
elif [ "$1" == "exec" ]; then
shift
exec "$@"
elif [ "$1" == "install" ]; then
check_service_dependencies
#{installScript}
elif [ "$1" == "printconfig" ]; then
printconfig $@
elif [[ "$1" == "log" || "$1" == "logs" ]]; then
trap - INT
trap
if [ "$2" == "" ]; then
tail -fq ./.logs/*.log
else
tail -fq ./.logs/$2.log
fi
elif [ "$1" == "cleanup" ]; then
./cleanup $@
elif [ "$1" == "buildclient" ]; then
make -C $KONFIG_PROJECTROOT/client dist
elif [ "$1" == "services" ]; then
check_service_dependencies
services
elif [ "$1" == "updatepermissions" ]; then
updatePermissions
elif [ "$1" == "resetdb" ]; then
if [ "$2" == "--yes" ]; then
restoredefaultmongodump
restoredefaultpostgresdump
exit 0
fi
read -p "This will reset current databases, all data will be lost! (y/N)" -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]
then
exit 1
fi
restoredefaultmongodump
restoredefaultpostgresdump
elif [ "$1" == "buildservices" ]; then
check_service_dependencies
if [ "$2" != "force" ]; then
read -p "This will destroy existing images, do you want to continue? (y/N)" -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
exit 1
fi
fi
build_services
migrate up
elif [ "$1" == "help" ]; then
printHelp
elif [ "$1" == "importusers" ]; then
importusers
elif [ "$1" == "worker" ]; then
if [ "$2" == "" ]; then
echo Available workers:
echo "-------------------"
supervisorctl status | awk '${print $1} | sort'
else
trap - INT
trap
exec supervisorctl start $2
fi
elif [ "$1" == "migrate" ]; then
check_psql
if [ -z "$2" ]; then
echo "Please choose a migrate command [create|up|down|version|reset|redo|to|goto]"
echo ""
else
pushd $GOPATH/src/socialapi
make install-migrate
migrate $2 $3
fi
elif [ "$1" == "vmwatchertests" ]; then
go test koding/vmwatcher -test.v=true
elif [ "$1" == "gokodingconfigtests" ]; then
go test -v --race koding/kites/config
elif [ "$1" == "janitortests" ]; then
pushd $KONFIG_PROJECTROOT/go/src/koding/workers/janitor
./test.sh
elif [ "$1" == "gatheringestortests" ]; then
go test koding/workers/gatheringestor -test.v=true
elif [ "$1" == "gomodeltests" ]; then
go test koding/db/mongodb/modelhelper -test.v=true
elif [ "$1" == "kontroltests" ]; then
go test koding/kites/kontrol/kontrol -v
elif [ "$1" == "socialworkertests" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner "$KONFIG_PROJECTROOT/workers/social"
elif [ "$1" == "nodeservertests" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner "$KONFIG_PROJECTROOT/servers/lib/server"
# To run specific test directory or a single test file
elif [ "$1" == "nodetestfiles" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner $2
elif [ "$1" == "sanitize-email" ]; then
node $KONFIG_PROJECTROOT/scripts/sanitize-email
elif [ "$1" == "apply_custom_pg_migrations" ]; then
apply_custom_pg_migrations
elif [ "$1" == "is_pgready" ]; then
waitPostgresReady
elif [ "$1" == "is_mongoready" ]; then
waitMongoReady
elif [ "$1" == "mongomigrate" ]; then
mongomigrate $2 $3
else
echo "Unknown command: $1"
printHelp
fi
# ------ THIS FILE IS AUTO-GENERATED BY ./configure ----- #\n
"""
return run
generateSandbox = generateRunFile = (KONFIG) ->
return """
#!/bin/bash
export HOME=/home/ec2-user
ENV_SHELL_FILE=${ENV_SHELL_FILE:-$(dirname $0)/.env.sh}
if [ -f "$ENV_SHELL_FILE" ]; then
source $ENV_SHELL_FILE
else
echo "error: shell environment file does not exist"
exit 1
fi
COMMAND=$1
shift
case "$COMMAND" in
exec) exec "$@";;
esac
"""
module.exports = { dev: generateDev, default: generateDev, sandbox: generateSandbox, prod: generateSandbox }
| true | traverse = require 'traverse'
log = console.log
fs = require 'fs'
os = require 'os'
path = require 'path'
{ isAllowed } = require '../deployment/grouptoenvmapping'
generateDev = (KONFIG, options) ->
options.requirementCommands ?= []
installScript = """
pushd $KONFIG_PROJECTROOT
git submodule update --init
npm install --unsafe-perm
echo '#---> BUILDING CLIENT <---#'
make -C $KONFIG_PROJECTROOT/client unit-tests
echo '#---> BUILDING GO WORKERS (@farslan) <---#'
$KONFIG_PROJECTROOT/go/build.sh
echo '#---> BUILDING SOCIALAPI (@cihangir) <---#'
pushd $KONFIG_PROJECTROOT/go/src/socialapi
make configure
# make install
echo '#---> AUTHORIZING THIS COMPUTER WITH MATCHING KITE.KEY (@farslan) <---#'
KITE_KEY=$KONFIG_KITEHOME/kite.key
mkdir $HOME/.kite &>/dev/null
echo copying $KITE_KEY to $HOME/.kite/kite.key
cp -f $KITE_KEY $HOME/.kite/kite.key
echo
echo
echo 'ALL DONE. Enjoy! :)'
echo
echo
"""
run = """
#!/bin/bash
# ------ THIS FILE IS AUTO-GENERATED ON EACH BUILD ----- #
export KONFIG_PROJECTROOT=$(cd $(dirname $0); pwd)
ENV_SHELL_FILE=${ENV_SHELL_FILE:-$(dirname $0)/.env.sh}
if [ -f "$ENV_SHELL_FILE" ]; then
source $ENV_SHELL_FILE
else
echo "error: shell environment file does not exist"
exit 1
fi
function is_ready () {
exit 0
}
mkdir $KONFIG_PROJECTROOT/.logs &>/dev/null
SERVICES="mongo redis postgres rabbitmq imply"
NGINX_CONF="$KONFIG_PROJECTROOT/nginx.conf"
NGINX_PID="$KONFIG_PROJECTROOT/nginx.pid"
#{options.requirementCommands?.join "\n"}
trap ctrl_c INT
function ctrl_c () {
supervisorctl shutdown
exit 1;
}
function nginxstop () {
if [ -a $NGINX_PID ]; then
echo "stopping nginx"
nginx -c $NGINX_CONF -g "pid $NGINX_PID;" -s quit
fi
}
function nginxrun () {
nginxstop
echo "starting nginx"
nginx -c $NGINX_CONF -g "pid $NGINX_PID;"
}
function checkrunfile () {
if [ "$KONFIG_PROJECTROOT/run" -ot "$KONFIG_PROJECTROOT/config/main.$KONFIG_CONFIGNAME.coffee" ]; then
echo your run file is older than your config file. doing ./configure.
sleep 1
./configure
echo -e "\n\nPlease do ./run again\n"
exit 1;
fi
if [ "$KONFIG_PROJECTROOT/run" -ot "$KONFIG_PROJECTROOT/configure" ]; then
echo your run file is older than your configure file. doing ./configure.
sleep 1
./configure
echo -e "\n\nPlease do ./run again\n"
exit 1;
fi
}
function apply_custom_pg_migrations () {
# we can remove these after https://github.com/mattes/migrate/issues/13
export PGPASSWORD=$KONFIG_POSTGRES_PASSWORD
PSQL_COMMAND="psql -tA -h $KONFIG_POSTGRES_HOST $KONFIG_POSTGRES_DBNAME -U $KONFIG_POSTGRES_USERNAME"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'collaboration';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_participant_status_constant_enum\" ADD VALUE IF NOT EXISTS 'blocked';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'linkedtopic';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_type_constant_enum\" ADD VALUE IF NOT EXISTS 'bot';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_message_type_constant_enum\" ADD VALUE IF NOT EXISTS 'bot';"
$PSQL_COMMAND -c "ALTER TYPE \"api\".\"channel_message_type_constant_enum\" ADD VALUE IF NOT EXISTS 'system';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'bootstrap';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'startup';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'enterprise';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'team_base';"
$PSQL_COMMAND -c "ALTER TYPE \"payment\".\"plan_title_enum\" ADD VALUE IF NOT EXISTS 'team_free';"
}
function run () {
# Check if PG DB schema update required
go run $KONFIG_PROJECTROOT/go/src/socialapi/tests/pg-update.go $KONFIG_POSTGRES_HOST $KONFIG_POSTGRES_PORT
RESULT=$?
if [ $RESULT -ne 0 ]; then
exit 1
fi
# Update node modules
if ! scripts/check-node_modules.sh; then
npm install --silent
fi
# Check everything else
check
# Remove old watcher files (do we still need this?)
rm -rf $KONFIG_PROJECTROOT/go/bin/goldorf-main-*
rm -rf $KONFIG_PROJECTROOT/go/bin/watcher-*
# Run Go builder
$KONFIG_PROJECTROOT/go/build.sh
# Run Social Api builder
make -C $KONFIG_PROJECTROOT/go/src/socialapi configure
# Do PG Migration if necessary
migrate up
# Sanitize email addresses
node $KONFIG_PROJECTROOT/scripts/sanitize-email
supervisord && sleep 1
# Show the all logs of workers
tail -fq ./.logs/*.log
}
function docker_compose() {
if ! which docker-compose; then
echo 'error: docker-compose is not found'
echo '$ pip install docker-compose'
exit 1
fi
local ENTRYPOINT="/opt/koding/scripts/bootstrap-container $@"
docker-compose run --entrypoint $ENTRYPOINT backend
}
function printHelp (){
echo "Usage: "
echo ""
echo " run : to start koding"
echo " run docker-compose : to start koding in docker-compose environment"
echo " run exec : to exec arbitrary commands"
echo " run install : to compile/install client and "
echo " run buildclient : to see of specified worker logs only"
echo " run logs : to see all workers logs"
echo " run log [worker] : to see of specified worker logs only"
echo " run buildservices : to initialize and start services"
echo " run resetdb : to reset databases"
echo " run services : to stop and restart services"
echo " run worker : to list workers"
echo " run printconfig : to print koding config environment variables (output in json via --json flag)"
echo " run worker [worker] : to run a single worker"
echo " run migrate [command] : to apply/revert database changes (command: [create|up|down|version|reset|redo|to|goto])"
echo " run mongomigrate [command]: to apply/revert mongo database changes (command: [create|up|down])"
echo " run importusers : to import koding user data"
echo " run nodeservertests : to run tests for node.js web server"
echo " run socialworkertests : to run tests for social worker"
echo " run nodetestfiles : to run a single test or all test files in a directory"
echo " run sanitize-email : to sanitize email"
echo " run help : to show this list"
echo ""
}
function migrate () {
apply_custom_pg_migrations
params=(create up down version reset redo to goto)
param=$1
case "${params[@]}" in *"$param"*)
;;
*)
echo "Error: Command not found: $param"
echo "Usage: run migrate COMMAND [arg]"
echo ""
echo "Commands: "
echo " create [filename] : create new migration file in path"
echo " up : apply all available migrations"
echo " down : roll back all migrations"
echo " redo : roll back the most recently applied migration, then run it again"
echo " reset : run down and then up command"
echo " version : show the current migration version"
echo " to [n] : (+n) apply the next n / (-n) roll back the previous n migrations"
echo " goto [n] : go to specific migration"
echo ""
exit 1
;;
esac
if [ "$param" == "to" ]; then
param="migrate"
elif [ "$param" == "create" ] && [ -z "$2" ]; then
echo "Please choose a migration file name. (ex. add_created_at_column_account)"
echo "Usage: run migrate create [filename]"
echo ""
exit 1
fi
$GOBIN/migrate -url "postgres://$KONFIG_POSTGRES_HOST:$KONFIG_POSTGRES_PORT/$KONFIG_POSTGRES_DBNAME?user=social_superuser&password=PI:PASSWORD:<PASSWORD>END_PI" -path "$KONFIG_PROJECTROOT/go/src/socialapi/db/sql/migrations" $param $2
if [ "$param" == "create" ]; then
echo "Please edit created script files and add them to your repository."
fi
}
function mongomigrate () {
params=(create up down)
param=$1
echo $1
case "${params[@]}" in *"$param"*)
;;
*)
echo "Error: Command not found: $param"
echo "Usage: run migrate COMMAND [arg]"
echo ""
echo "Commands: "
echo " create [filename] : create new migration file under ./workers/migrations (ids will increase by 5)"
echo " up : apply all available migrations"
echo " down [id] : roll back to id (if not given roll back all migrations)"
echo ""
exit 1
;;
esac
if [ "$param" == "create" ] && [ -z "$2" ]; then
echo "Please choose a migration file name. (ex. add_super_user)"
echo "Usage: ./run mongomigrate create [filename]"
echo ""
exit 1
fi
node $KONFIG_PROJECTROOT/node_modules/mongodb-migrate -runmm --config ../deployment/generated_files/mongomigration.json --dbPropName conn -c $KONFIG_PROJECTROOT/workers $1 $2
if [ "$param" == "create" ]; then
echo "Please edit created script files and add them to your repository."
fi
}
function check (){
check_service_dependencies
if [[ `uname` == 'Darwin' ]]; then
if [ -z "$DOCKER_HOST" ]; then
echo "You need to export DOCKER_HOST, run 'boot2docker up' and follow the instructions. (or run 'eval $(docker-machine env default)')"
exit 1
fi
fi
mongo $KONFIG_MONGO --eval "db.stats()" > /dev/null # do a simple harmless command of some sort
RESULT=$? # returns 0 if mongo eval succeeds
if [ $RESULT -ne 0 ]; then
echo ""
echo "Can't talk to mongodb at $KONFIG_MONGO, is it not running? exiting."
exit 1
fi
EXISTS=$(PGPASSWORD=$KONFIG_POSTGRES_PASSWORD psql -tA -h $KONFIG_POSTGRES_HOST social -U $KONFIG_POSTGRES_USERNAME -c "Select 1 from pg_tables where tablename = 'key' AND schemaname = 'kite';")
if [[ $EXISTS != '1' ]]; then
echo ""
echo "You don't have the new Kontrol Postgres. Please call ./run buildservices."
exit 1
fi
}
function check_psql () {
command -v psql >/dev/null 2>&1 || { echo >&2 "I require psql but it's not installed. (brew install postgresql) Aborting."; exit 1; }
}
function check_service_dependencies () {
echo "checking required services: nginx, docker, mongo, graphicsmagick..."
command -v go >/dev/null 2>&1 || { echo >&2 "I require go but it's not installed. Aborting."; exit 1; }
command -v docker >/dev/null 2>&1 || { echo >&2 "I require docker but it's not installed. Aborting."; exit 1; }
command -v nginx >/dev/null 2>&1 || { echo >&2 "I require nginx but it's not installed. (brew install nginx maybe?) Aborting."; exit 1; }
command -v node >/dev/null 2>&1 || { echo >&2 "I require node but it's not installed. Aborting."; exit 1; }
command -v npm >/dev/null 2>&1 || { echo >&2 "I require npm but it's not installed. Aborting."; exit 1; }
command -v gulp >/dev/null 2>&1 || { echo >&2 "I require gulp but it's not installed. (npm i gulp -g) Aborting."; exit 1; }
command -v coffee >/dev/null 2>&1 || { echo >&2 "I require coffee-script but it's not installed. (npm i coffee-script -g) Aborting."; exit 1; }
check_psql
if [[ `uname` == 'Darwin' ]]; then
brew info graphicsmagick >/dev/null 2>&1 || { echo >&2 "I require graphicsmagick but it's not installed. Aborting."; exit 1; }
command -v boot2docker >/dev/null 2>&1 || command -v docker-machine >/dev/null 2>&1 || { echo >&2 "I require boot2docker but it's not installed. Aborting."; exit 1; }
elif [[ `uname` == 'Linux' ]]; then
command -v gm >/dev/null 2>&1 || { echo >&2 "I require graphicsmagick but it's not installed. Aborting."; exit 1; }
fi
set -o errexit
scripts/check-node-version.sh
scripts/check-npm-version.sh
scripts/check-gulp-version.sh
scripts/check-go-version.sh
scripts/check-supervisor.sh
set +o errexit
}
function waitPostgresReady() {
retries=60
while ! pg_isready -h $KONFIG_POSTGRES_HOST -U $KONFIG_POSTGRES_USERNAME; do
sleep 1
let retries--
if [ $retries == 0 ]; then
echo "time out while waiting for pg_isready"
exit 1
fi
echo "."
done
}
function waitMongoReady() {
retries=60
while ! mongo $KONFIG_MONGO --eval "db.stats()" > /dev/null 2>&1; do
sleep 1
let retries--
if [ $retries == 0 ]; then
echo "time out while waiting for mongo is ready"
exit 1
fi
echo "mongo is not reachable, trying again "
done
}
function runMongoDocker () {
docker run -d -p 27017:27017 --name=mongo koding/mongo-auto:latest
waitMongoReady
}
function runPostgresqlDocker () {
docker run -d -p 5432:5432 --name=postgres koding/postgres
waitPostgresReady
}
function run_docker_wrapper () {
if [[ `uname` == 'Darwin' ]]; then
command -v boot2docker >/dev/null 2>&1 && boot2docker up
command -v docker-machine >/dev/null 2>&1 && docker-machine start default || echo 1
fi
}
function build_services () {
run_docker_wrapper
echo "Stopping services: $SERVICES"
docker stop $SERVICES
echo "Removing services: $SERVICES"
docker rm $SERVICES
# Build postgres
pushd $KONFIG_PROJECTROOT/go/src/socialapi/db/sql
mkdir -p kontrol
sed -i -e "s/USER kontrolapplication/USER $KONFIG_KONTROL_POSTGRES_USERNAME/" kontrol/001-schema.sql
sed -i -e "s/PASSWORD 'kontrolapplication'/PASSWORD '$KONFIG_KONTROL_POSTGRES_PASSWORD'/" kontrol/001-schema.sql
sed -i -e "s/GRANT kontrol TO kontrolapplication/GRANT kontrol TO $KONFIG_KONTROL_POSTGRES_USERNAME/" kontrol/001-schema.sql
docker build -t koding/postgres .
git checkout kontrol/001-schema.sql
popd
runMongoDocker
docker run -d -p 5672:5672 -p 15672:15672 --name=rabbitmq rabbitmq:3-management
docker run -d -p 6379:6379 --name=redis redis
runPostgresqlDocker
docker run -d -p 18081-18110:8081-8110 -p 18200:8200 -p 19095:9095 --name=imply imply/imply:1.2.1
echo "#---> CLEARING ALGOLIA INDEXES: @chris <---#"
pushd $KONFIG_PROJECTROOT
./scripts/clear-algolia-index.sh -i "accounts$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
./scripts/clear-algolia-index.sh -i "topics$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
./scripts/clear-algolia-index.sh -i "messages$KONFIG_SOCIALAPI_ALGOLIA_INDEXSUFFIX"
migrate up
}
function services () {
run_docker_wrapper
EXISTS=$(docker inspect --format="{{ .State.Running }}" $SERVICES 2> /dev/null)
if [ $? -eq 1 ]; then
echo ""
echo "Some of containers are missing, please do ./run buildservices"
exit 1
fi
echo "Stopping services: $SERVICES"
docker stop $SERVICES
echo "Starting services: $SERVICES"
docker start $SERVICES
nginxrun
}
function importusers () {
node $KONFIG_PROJECTROOT/scripts/user-importer -c dev
migrateusers
}
function migrateusers () {
go run $KONFIG_PROJECTROOT/go/src/socialapi/workers/cmd/migrator/main.go -c $KONFIG_SOCIALAPI_CONFIGFILEPATH
}
function removeDockerByName () {
docker ps -all --quiet --filter name=$1 | xargs docker rm -f && echo deleted $1 image
}
function restoredefaultmongodump () {
removeDockerByName mongo
runMongoDocker
mongomigrate up
}
function restoredefaultpostgresdump () {
removeDockerByName postgres
runPostgresqlDocker
migrate up
migrateusers
}
function updatePermissions () {
echo '#---> UPDATING MONGO DATABASE ACCORDING TO LATEST CHANGES IN CODE (UPDATE PERMISSIONS @gokmen) <---#'
node $KONFIG_PROJECTROOT/scripts/permission-updater -c dev --reset
}
if [ "$#" == "0" ]; then
checkrunfile
run $1
elif [ "$1" == "is_ready" ]; then
is_ready
elif [ "$1" == "docker-compose" ]; then
shift
docker_compose
elif [ "$1" == "exec" ]; then
shift
exec "$@"
elif [ "$1" == "install" ]; then
check_service_dependencies
#{installScript}
elif [ "$1" == "printconfig" ]; then
printconfig $@
elif [[ "$1" == "log" || "$1" == "logs" ]]; then
trap - INT
trap
if [ "$2" == "" ]; then
tail -fq ./.logs/*.log
else
tail -fq ./.logs/$2.log
fi
elif [ "$1" == "cleanup" ]; then
./cleanup $@
elif [ "$1" == "buildclient" ]; then
make -C $KONFIG_PROJECTROOT/client dist
elif [ "$1" == "services" ]; then
check_service_dependencies
services
elif [ "$1" == "updatepermissions" ]; then
updatePermissions
elif [ "$1" == "resetdb" ]; then
if [ "$2" == "--yes" ]; then
restoredefaultmongodump
restoredefaultpostgresdump
exit 0
fi
read -p "This will reset current databases, all data will be lost! (y/N)" -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]
then
exit 1
fi
restoredefaultmongodump
restoredefaultpostgresdump
elif [ "$1" == "buildservices" ]; then
check_service_dependencies
if [ "$2" != "force" ]; then
read -p "This will destroy existing images, do you want to continue? (y/N)" -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
exit 1
fi
fi
build_services
migrate up
elif [ "$1" == "help" ]; then
printHelp
elif [ "$1" == "importusers" ]; then
importusers
elif [ "$1" == "worker" ]; then
if [ "$2" == "" ]; then
echo Available workers:
echo "-------------------"
supervisorctl status | awk '${print $1} | sort'
else
trap - INT
trap
exec supervisorctl start $2
fi
elif [ "$1" == "migrate" ]; then
check_psql
if [ -z "$2" ]; then
echo "Please choose a migrate command [create|up|down|version|reset|redo|to|goto]"
echo ""
else
pushd $GOPATH/src/socialapi
make install-migrate
migrate $2 $3
fi
elif [ "$1" == "vmwatchertests" ]; then
go test koding/vmwatcher -test.v=true
elif [ "$1" == "gokodingconfigtests" ]; then
go test -v --race koding/kites/config
elif [ "$1" == "janitortests" ]; then
pushd $KONFIG_PROJECTROOT/go/src/koding/workers/janitor
./test.sh
elif [ "$1" == "gatheringestortests" ]; then
go test koding/workers/gatheringestor -test.v=true
elif [ "$1" == "gomodeltests" ]; then
go test koding/db/mongodb/modelhelper -test.v=true
elif [ "$1" == "kontroltests" ]; then
go test koding/kites/kontrol/kontrol -v
elif [ "$1" == "socialworkertests" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner "$KONFIG_PROJECTROOT/workers/social"
elif [ "$1" == "nodeservertests" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner "$KONFIG_PROJECTROOT/servers/lib/server"
# To run specific test directory or a single test file
elif [ "$1" == "nodetestfiles" ]; then
$KONFIG_PROJECTROOT/scripts/node-testing/mocha-runner $2
elif [ "$1" == "sanitize-email" ]; then
node $KONFIG_PROJECTROOT/scripts/sanitize-email
elif [ "$1" == "apply_custom_pg_migrations" ]; then
apply_custom_pg_migrations
elif [ "$1" == "is_pgready" ]; then
waitPostgresReady
elif [ "$1" == "is_mongoready" ]; then
waitMongoReady
elif [ "$1" == "mongomigrate" ]; then
mongomigrate $2 $3
else
echo "Unknown command: $1"
printHelp
fi
# ------ THIS FILE IS AUTO-GENERATED BY ./configure ----- #\n
"""
return run
generateSandbox = generateRunFile = (KONFIG) ->
return """
#!/bin/bash
export HOME=/home/ec2-user
ENV_SHELL_FILE=${ENV_SHELL_FILE:-$(dirname $0)/.env.sh}
if [ -f "$ENV_SHELL_FILE" ]; then
source $ENV_SHELL_FILE
else
echo "error: shell environment file does not exist"
exit 1
fi
COMMAND=$1
shift
case "$COMMAND" in
exec) exec "$@";;
esac
"""
module.exports = { dev: generateDev, default: generateDev, sandbox: generateSandbox, prod: generateSandbox }
|
[
{
"context": " \"Scales\"\n\nslide.code title, empty_svg, \"\"\"\n// via A. Lex (dataviscourse.net)\nvar data = [0.3, 0.01, 0.8];\n",
"end": 12679,
"score": 0.995832622051239,
"start": 12673,
"tag": "NAME",
"value": "A. Lex"
},
{
"context": "ue\");\n\"\"\"\n\nslide.code title, empty_... | slides.coffee | dakoop/IntroD3 | 4 | empty_svg = ->
d3.select('div.output')
.append('svg')
rect1 = ->
svg = d3.select('div.output')
.append('svg')
svg.append("rect")
.attr("x", 150)
.attr("y", 100)
.attr("width", 60)
.attr("height", 300)
rect3 = ->
svg = d3.select('div.output')
.append('svg')
svg.append("rect")
.attr("x", 200)
.attr("y", 300)
.attr("width", 40)
.attr("height", 50)
svg.append("rect")
.attr("x", 100)
.attr("y", 20)
.attr("width", 30)
.attr("height", 50)
svg.append("rect")
.attr("x", 10)
.attr("y", 200)
.attr("width", 25)
.attr("height", 90)
# ----------------------------------------------------
slide.title "First, some JavaScript"
slide.code "Functions are objects", null, """
// In JS functions are first class citizens.
// This is a very powerful concept!
function sq1(x) {
return x * x
}
var sq2 = function(x) {
return x * x
}
console.log("sq1(4) ==", sq1(4)) // == 16
console.log("sq2(4) ==", sq2(4)) // == 16
sq1.foo = 8
sq2.bar = 3
console.log("Trippy:", sq1(sq1.foo + sq2.bar))
"""
slide.code "Closures bake in state", null, """
// Functions can be used to 'bake in' state
var formatter = function(prefix, fixed) {
if (prefix == null) { prefix = '' }
if (fixed == null) { fixed = 2 }
return function(number) {
return prefix + number.toFixed(fixed)
}
}
var currency = formatter('$', 2)
var roughly = formatter('~', 1)
var roughlyDefault = formatter('~') // no 2nd param!
var defaultFormat = formatter() // no params!
console.log("currency(31/3) ==", currency(31/3))
console.log("roughly(31/3) ==", roughly(31/3))
console.log("roughlyDefault(31/3) ==", roughlyDefault(31/3))
console.log("defaultFormat(31/3) ==", defaultFormat(31/3))
"""
slide.code "Configurable functions", null, """
// D3 has many helper methods
// d3.scaleLinear() returns a function that
// will map the given domain to the given
// range linearly.
// D3 v4 change: d3.scaleLinear was
// d3.scale.linear in v3
var w = 640, h = 320
// x is a function!
var x = d3.scaleLinear()
.domain([-1, 1])
.range([0, w])
// y is also a function!
var y = d3.scaleLinear()
.domain([0, 1])
.range([0, h])
console.log("x(0) ==", x(0)) // == w/2
console.log("y(3) ==", y(3)) // == 3*h
"""
slide.title "Core D3"
# -----------------------------------------------
slide.code_title title = ".select()"
slide.code title, rect1, """
var svg = d3.select("div.output svg")
var myRect = svg.select("rect")
myRect.attr("width", 100)
myRect.attr("height", 100)
myRect.style("fill", "steelblue")
"""
slide.code title, rect1, """
var svg = d3.select("div.output svg")
// Chain style
svg.select("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".selectAll()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.select("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
// Each D3 attr/style function passes two params:
// 1. d: a data item
// 2. i: the index of the data item
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("x", 0)
.attr("y", function(d,i) { return i*90+50 })
.attr("width", function(d,i) {
return i*150+100;
})
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code "Arrow Functions", rect3, """
// Two ways to write anonymous functions
// 1. function(d,i) { return i*90+50; }
// 2. (d,i) => i*90+50
// Arrow functions are more succinct
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", (d,i) => i*150+100)
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".data()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", (d,i) => d) // or d => d
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".enter()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.enter().append("rect")
.attr("x", 10) // let's just put it somewhere
.attr("y", 10)
.attr("width", 30)
.attr("height", 30)
.style("fill", "green")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
// update existing items
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
// add (enter) new
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".merge()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
// Shorter
// Merge merges the enter selection
// with the update selection
selection.enter().append("rect")
.merge(selection)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".join()"
slide.code title, rect3, """
// Join does enter/append automatically
var svg = d3.select("div.output svg");
svg.selectAll("rect")
.data([127, 61, 256, 71]).join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
title = ".enter() // a common pattern"
slide.code title, empty_svg, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".enter() // using join"
slide.code title, empty_svg, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".exit()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.exit()
.remove()
"""
title = ".join()"
slide.code title, rect3, """
// .join removes exit part automatically
var svg = d3.select("div.output svg");
svg.selectAll("rect")
.data([127, 61]).join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
# -----------------------------------------------
slide.code_title title = ".transition()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.transition()
.duration(3000) // 3 seconds
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection.enter().append("rect")
.attr("x", 200)
.attr("y", 200)
.attr("width", 10)
.attr("height", 10)
.style("fill", "red")
.merge(selection)
.transition()
.duration(3000)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.transition()
// delays relative to previous transition
.duration(3000)
.style("fill", "green")
.attr("width", d => d*1.5)
selection.exit()
.attr("opacity", 1)
.transition()
.duration(3000)
.attr("opacity", 0)
.remove()
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg");
const t = svg.transition()
.duration(3000);
function update_bars(s) {
s.transition(t)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.transition()
.duration(3000)
.style("fill", "green")
.attr("width", d => d*1.5)
}
var selection = svg.selectAll("rect")
.data([127, 61]) // add more [256, 71]
.join(enter => enter.append("rect")
.attr("x", 200)
.attr("y", 200)
.attr("width", 10)
.attr("height", 10)
.style("fill", "red")
.call(update_bars),
update => update.call(update_bars),
exit => exit.attr("opacity", 1)
.transition()
.duration(3000)
.attr("opacity", 0)
.remove());
"""
# -----------------------------------------------
slide.code_title title = ".data(..., join)"
init_svg = ->
svg = d3.select("div.output").append("svg")
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) -> i*90+50)
.attr("width", (d,i) -> d)
.attr("height", 20)
.style("fill", "steelblue")
slide.code title, init_svg, """
var svg = d3.select("div.output svg")
// Let's say we start here:
/*
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
*/
// And then we do this:
var selection = svg.selectAll("rect")
.data([61, 256, 71]) // <- incomplete?
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.merge(selection)
.transition()
.duration(3000)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.exit()
.remove()
"""
slide.code title, init_svg, """
// Start the same as before
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([61, 256, 71], d => d)
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => (i+1)*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.style("opacity", 0)
.merge(selection)
.transition()
.duration(3000)
.attr("y", (d,i) => i*90+50)
.attr("height", 20)
.style("opacity", 1)
selection.exit()
.transition()
.duration(3000)
.attr("y", (d,i) => (i-1)*90+50)
.style("opacity", 0)
.remove()
"""
# -----------------------------------------------
slide.title title = "Nested Selections"
slide.code title, empty_svg, """
var myData = [
[15, 20],
[40, 10],
[30, 27]
]
var svg = d3.select("div.output svg")
// First selection (within svg)
var selA = svg.selectAll("g")
.data(myData)
.join("g")
.attr("transform", (d, i) => `translate(70,${i*100+50})`)
// backticks (``) denote template literal
// normal strings except that text inside ${...}
// is evaluated (can be js expression)!
// Second selection (within first selection)
var selB = selA.selectAll('circle')
.data(d => d)
.join("circle")
.attr("cx", (d, i) => i*80)
.attr("r", (d,i) => d)
"""
# -----------------------------------------------
slide.title title = "Scales"
slide.code title, empty_svg, """
// via A. Lex (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
slide.code title, empty_svg, """
// via A. Lex (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
// the scale function
var s = function (input) {
return input * 500;
}
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => s(d))
.attr("height", 20)
.style("fill", "steelblue");
"""
slide.code title, empty_svg, """
// via A. Lex (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
// var xScale = d3.scaleLog() // check domain...
.domain([0,d3.max(data)])
.range([0,300]);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
"""
# -----------------------------------------------
slide.title title = "Axes"
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom(); // axisTop()
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g").call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(0, 300)")
.call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 10)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(10, 300)")
.call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var yScale = d3.scaleBand()
.domain([...data.keys()])
.range([0,300])
.padding(0.7)
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 10)
.attr("y", (d,i) => yScale(i))
.attr("width", d => xScale(d))
.attr("height", yScale.bandwidth())
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(10, 300)")
.call(xAxis);
"""
# -----------------------------------------------
slide.title "Useful Examples"
slide.code "Shuffle", empty_svg, """
var cards = [
"J\\u2665", "J\\u2666", "J\\u2663", "J\\u2660",
"K\\u2665", "K\\u2666", "K\\u2663", "K\\u2660",
"Q\\u2665", "Q\\u2666", "Q\\u2663", "Q\\u2660",
"A\\u2665", "A\\u2666", "A\\u2663", "A\\u2660"]
cards.sort(function() {return Math.random()-.5})
var svg = d3.select("div.output svg")
var selection = svg.selectAll("text")
.data(cards, String)
selection
.transition()
.duration(1000)
.attr("y", (d,i) => i*35+40)
selection.enter().append("text")
.attr("x", 30)
.attr("y", (d,i) => i*35+40)
.style("fill", function(d) {
return "\\u2665\\u2666".indexOf(d[1]) < 0 ?
"black" : "red";
})
.style("font", "20px monospace")
.text(String)
"""
slide.code "Shuffle (v2)", empty_svg, """
var cards = [
"J\\u2665", "J\\u2666", "J\\u2663", "J\\u2660",
"K\\u2665", "K\\u2666", "K\\u2663", "K\\u2660",
"Q\\u2665", "Q\\u2666", "Q\\u2663", "Q\\u2660",
"A\\u2665", "A\\u2666", "A\\u2663", "A\\u2660"]
cards.sort(function() {return Math.random()-.5})
var svg = d3.select("div.output svg")
var selection = svg.selectAll("text")
.data(cards, String)
selection
.transition().duration(500)
.attr("x", (d,i) => (i%8)*30+30)
.transition().duration(500)
.attr("y", (d,i) => i*35+40)
.transition().duration(500)
.attr("x", 30)
selection.enter().append("text")
.attr("x", 30)
.attr("y", (d,i) => i*35+40)
.style("fill", function(d) {
return "\\u2665\\u2666".indexOf(d[1]) < 0 ?
"black" : "red";
})
.style("font", "20px monospace")
.text(String)
"""
# -----------------------------------------------
slide.code "Drawing lines", empty_svg, """
var svg = d3.select("div.output svg")
svg.append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", "M 10 10 L 200 200 "+
"L 200 400 L 300 100 L 400 150")
"""
slide.code "Drawing lines", empty_svg, """
var points = [
{ x: 10, y: 10 },
{ x: 200, y: 200 },
{ x: 200, y: 400 },
{ x: 300, y: 100 },
{ x: 400, y: 150 }
]
var lineFn = d3.line()
.x(d => d.x)
.y(d => d.y)
//.curve(d3.curveCardinal)
var svg = d3.select("div.output svg")
svg.append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", lineFn(points))
"""
slide.code "Drawing lines", empty_svg, """
var pointsSin = d3.range(21).map(function(i) {
return {x: i, y: Math.sin(i/3) }
})
var pointsCos = d3.range(21).map(function(i) {
return {x: i, y: Math.cos(i/3) }
})
var w = 480
var h = 300
var x = d3.scaleLinear()
.domain([0, 20]).range([0, w])
var y = d3.scaleLinear()
.domain([-1, 1]).range([h, 0])
var lineFn = d3.line()
.x(d => x(d.x))
.y(d => y(d.y))
var svg = d3.select("div.output svg")
svg.selectAll("path")
.data([pointsSin, pointsCos])
.enter().append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", lineFn)
"""
| 196086 | empty_svg = ->
d3.select('div.output')
.append('svg')
rect1 = ->
svg = d3.select('div.output')
.append('svg')
svg.append("rect")
.attr("x", 150)
.attr("y", 100)
.attr("width", 60)
.attr("height", 300)
rect3 = ->
svg = d3.select('div.output')
.append('svg')
svg.append("rect")
.attr("x", 200)
.attr("y", 300)
.attr("width", 40)
.attr("height", 50)
svg.append("rect")
.attr("x", 100)
.attr("y", 20)
.attr("width", 30)
.attr("height", 50)
svg.append("rect")
.attr("x", 10)
.attr("y", 200)
.attr("width", 25)
.attr("height", 90)
# ----------------------------------------------------
slide.title "First, some JavaScript"
slide.code "Functions are objects", null, """
// In JS functions are first class citizens.
// This is a very powerful concept!
function sq1(x) {
return x * x
}
var sq2 = function(x) {
return x * x
}
console.log("sq1(4) ==", sq1(4)) // == 16
console.log("sq2(4) ==", sq2(4)) // == 16
sq1.foo = 8
sq2.bar = 3
console.log("Trippy:", sq1(sq1.foo + sq2.bar))
"""
slide.code "Closures bake in state", null, """
// Functions can be used to 'bake in' state
var formatter = function(prefix, fixed) {
if (prefix == null) { prefix = '' }
if (fixed == null) { fixed = 2 }
return function(number) {
return prefix + number.toFixed(fixed)
}
}
var currency = formatter('$', 2)
var roughly = formatter('~', 1)
var roughlyDefault = formatter('~') // no 2nd param!
var defaultFormat = formatter() // no params!
console.log("currency(31/3) ==", currency(31/3))
console.log("roughly(31/3) ==", roughly(31/3))
console.log("roughlyDefault(31/3) ==", roughlyDefault(31/3))
console.log("defaultFormat(31/3) ==", defaultFormat(31/3))
"""
slide.code "Configurable functions", null, """
// D3 has many helper methods
// d3.scaleLinear() returns a function that
// will map the given domain to the given
// range linearly.
// D3 v4 change: d3.scaleLinear was
// d3.scale.linear in v3
var w = 640, h = 320
// x is a function!
var x = d3.scaleLinear()
.domain([-1, 1])
.range([0, w])
// y is also a function!
var y = d3.scaleLinear()
.domain([0, 1])
.range([0, h])
console.log("x(0) ==", x(0)) // == w/2
console.log("y(3) ==", y(3)) // == 3*h
"""
slide.title "Core D3"
# -----------------------------------------------
slide.code_title title = ".select()"
slide.code title, rect1, """
var svg = d3.select("div.output svg")
var myRect = svg.select("rect")
myRect.attr("width", 100)
myRect.attr("height", 100)
myRect.style("fill", "steelblue")
"""
slide.code title, rect1, """
var svg = d3.select("div.output svg")
// Chain style
svg.select("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".selectAll()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.select("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
// Each D3 attr/style function passes two params:
// 1. d: a data item
// 2. i: the index of the data item
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("x", 0)
.attr("y", function(d,i) { return i*90+50 })
.attr("width", function(d,i) {
return i*150+100;
})
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code "Arrow Functions", rect3, """
// Two ways to write anonymous functions
// 1. function(d,i) { return i*90+50; }
// 2. (d,i) => i*90+50
// Arrow functions are more succinct
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", (d,i) => i*150+100)
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".data()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", (d,i) => d) // or d => d
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".enter()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.enter().append("rect")
.attr("x", 10) // let's just put it somewhere
.attr("y", 10)
.attr("width", 30)
.attr("height", 30)
.style("fill", "green")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
// update existing items
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
// add (enter) new
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".merge()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
// Shorter
// Merge merges the enter selection
// with the update selection
selection.enter().append("rect")
.merge(selection)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".join()"
slide.code title, rect3, """
// Join does enter/append automatically
var svg = d3.select("div.output svg");
svg.selectAll("rect")
.data([127, 61, 256, 71]).join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
title = ".enter() // a common pattern"
slide.code title, empty_svg, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".enter() // using join"
slide.code title, empty_svg, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".exit()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.exit()
.remove()
"""
title = ".join()"
slide.code title, rect3, """
// .join removes exit part automatically
var svg = d3.select("div.output svg");
svg.selectAll("rect")
.data([127, 61]).join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
# -----------------------------------------------
slide.code_title title = ".transition()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.transition()
.duration(3000) // 3 seconds
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection.enter().append("rect")
.attr("x", 200)
.attr("y", 200)
.attr("width", 10)
.attr("height", 10)
.style("fill", "red")
.merge(selection)
.transition()
.duration(3000)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.transition()
// delays relative to previous transition
.duration(3000)
.style("fill", "green")
.attr("width", d => d*1.5)
selection.exit()
.attr("opacity", 1)
.transition()
.duration(3000)
.attr("opacity", 0)
.remove()
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg");
const t = svg.transition()
.duration(3000);
function update_bars(s) {
s.transition(t)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.transition()
.duration(3000)
.style("fill", "green")
.attr("width", d => d*1.5)
}
var selection = svg.selectAll("rect")
.data([127, 61]) // add more [256, 71]
.join(enter => enter.append("rect")
.attr("x", 200)
.attr("y", 200)
.attr("width", 10)
.attr("height", 10)
.style("fill", "red")
.call(update_bars),
update => update.call(update_bars),
exit => exit.attr("opacity", 1)
.transition()
.duration(3000)
.attr("opacity", 0)
.remove());
"""
# -----------------------------------------------
slide.code_title title = ".data(..., join)"
init_svg = ->
svg = d3.select("div.output").append("svg")
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) -> i*90+50)
.attr("width", (d,i) -> d)
.attr("height", 20)
.style("fill", "steelblue")
slide.code title, init_svg, """
var svg = d3.select("div.output svg")
// Let's say we start here:
/*
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
*/
// And then we do this:
var selection = svg.selectAll("rect")
.data([61, 256, 71]) // <- incomplete?
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.merge(selection)
.transition()
.duration(3000)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.exit()
.remove()
"""
slide.code title, init_svg, """
// Start the same as before
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([61, 256, 71], d => d)
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => (i+1)*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.style("opacity", 0)
.merge(selection)
.transition()
.duration(3000)
.attr("y", (d,i) => i*90+50)
.attr("height", 20)
.style("opacity", 1)
selection.exit()
.transition()
.duration(3000)
.attr("y", (d,i) => (i-1)*90+50)
.style("opacity", 0)
.remove()
"""
# -----------------------------------------------
slide.title title = "Nested Selections"
slide.code title, empty_svg, """
var myData = [
[15, 20],
[40, 10],
[30, 27]
]
var svg = d3.select("div.output svg")
// First selection (within svg)
var selA = svg.selectAll("g")
.data(myData)
.join("g")
.attr("transform", (d, i) => `translate(70,${i*100+50})`)
// backticks (``) denote template literal
// normal strings except that text inside ${...}
// is evaluated (can be js expression)!
// Second selection (within first selection)
var selB = selA.selectAll('circle')
.data(d => d)
.join("circle")
.attr("cx", (d, i) => i*80)
.attr("r", (d,i) => d)
"""
# -----------------------------------------------
slide.title title = "Scales"
slide.code title, empty_svg, """
// via <NAME> (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
slide.code title, empty_svg, """
// via <NAME> (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
// the scale function
var s = function (input) {
return input * 500;
}
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => s(d))
.attr("height", 20)
.style("fill", "steelblue");
"""
slide.code title, empty_svg, """
// via <NAME> (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
// var xScale = d3.scaleLog() // check domain...
.domain([0,d3.max(data)])
.range([0,300]);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
"""
# -----------------------------------------------
slide.title title = "Axes"
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom(); // axisTop()
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g").call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(0, 300)")
.call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 10)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(10, 300)")
.call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var yScale = d3.scaleBand()
.domain([...data.keys()])
.range([0,300])
.padding(0.7)
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 10)
.attr("y", (d,i) => yScale(i))
.attr("width", d => xScale(d))
.attr("height", yScale.bandwidth())
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(10, 300)")
.call(xAxis);
"""
# -----------------------------------------------
slide.title "Useful Examples"
slide.code "Shuffle", empty_svg, """
var cards = [
"J\\u2665", "J\\u2666", "J\\u2663", "J\\u2660",
"K\\u2665", "K\\u2666", "K\\u2663", "K\\u2660",
"Q\\u2665", "Q\\u2666", "Q\\u2663", "Q\\u2660",
"A\\u2665", "A\\u2666", "A\\u2663", "A\\u2660"]
cards.sort(function() {return Math.random()-.5})
var svg = d3.select("div.output svg")
var selection = svg.selectAll("text")
.data(cards, String)
selection
.transition()
.duration(1000)
.attr("y", (d,i) => i*35+40)
selection.enter().append("text")
.attr("x", 30)
.attr("y", (d,i) => i*35+40)
.style("fill", function(d) {
return "\\u2665\\u2666".indexOf(d[1]) < 0 ?
"black" : "red";
})
.style("font", "20px monospace")
.text(String)
"""
slide.code "Shuffle (v2)", empty_svg, """
var cards = [
"J\\u2665", "J\\u2666", "J\\u2663", "J\\u2660",
"K\\u2665", "K\\u2666", "K\\u2663", "K\\u2660",
"Q\\u2665", "Q\\u2666", "Q\\u2663", "Q\\u2660",
"A\\u2665", "A\\u2666", "A\\u2663", "A\\u2660"]
cards.sort(function() {return Math.random()-.5})
var svg = d3.select("div.output svg")
var selection = svg.selectAll("text")
.data(cards, String)
selection
.transition().duration(500)
.attr("x", (d,i) => (i%8)*30+30)
.transition().duration(500)
.attr("y", (d,i) => i*35+40)
.transition().duration(500)
.attr("x", 30)
selection.enter().append("text")
.attr("x", 30)
.attr("y", (d,i) => i*35+40)
.style("fill", function(d) {
return "\\u2665\\u2666".indexOf(d[1]) < 0 ?
"black" : "red";
})
.style("font", "20px monospace")
.text(String)
"""
# -----------------------------------------------
slide.code "Drawing lines", empty_svg, """
var svg = d3.select("div.output svg")
svg.append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", "M 10 10 L 200 200 "+
"L 200 400 L 300 100 L 400 150")
"""
slide.code "Drawing lines", empty_svg, """
var points = [
{ x: 10, y: 10 },
{ x: 200, y: 200 },
{ x: 200, y: 400 },
{ x: 300, y: 100 },
{ x: 400, y: 150 }
]
var lineFn = d3.line()
.x(d => d.x)
.y(d => d.y)
//.curve(d3.curveCardinal)
var svg = d3.select("div.output svg")
svg.append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", lineFn(points))
"""
slide.code "Drawing lines", empty_svg, """
var pointsSin = d3.range(21).map(function(i) {
return {x: i, y: Math.sin(i/3) }
})
var pointsCos = d3.range(21).map(function(i) {
return {x: i, y: Math.cos(i/3) }
})
var w = 480
var h = 300
var x = d3.scaleLinear()
.domain([0, 20]).range([0, w])
var y = d3.scaleLinear()
.domain([-1, 1]).range([h, 0])
var lineFn = d3.line()
.x(d => x(d.x))
.y(d => y(d.y))
var svg = d3.select("div.output svg")
svg.selectAll("path")
.data([pointsSin, pointsCos])
.enter().append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", lineFn)
"""
| true | empty_svg = ->
d3.select('div.output')
.append('svg')
rect1 = ->
svg = d3.select('div.output')
.append('svg')
svg.append("rect")
.attr("x", 150)
.attr("y", 100)
.attr("width", 60)
.attr("height", 300)
rect3 = ->
svg = d3.select('div.output')
.append('svg')
svg.append("rect")
.attr("x", 200)
.attr("y", 300)
.attr("width", 40)
.attr("height", 50)
svg.append("rect")
.attr("x", 100)
.attr("y", 20)
.attr("width", 30)
.attr("height", 50)
svg.append("rect")
.attr("x", 10)
.attr("y", 200)
.attr("width", 25)
.attr("height", 90)
# ----------------------------------------------------
slide.title "First, some JavaScript"
slide.code "Functions are objects", null, """
// In JS functions are first class citizens.
// This is a very powerful concept!
function sq1(x) {
return x * x
}
var sq2 = function(x) {
return x * x
}
console.log("sq1(4) ==", sq1(4)) // == 16
console.log("sq2(4) ==", sq2(4)) // == 16
sq1.foo = 8
sq2.bar = 3
console.log("Trippy:", sq1(sq1.foo + sq2.bar))
"""
slide.code "Closures bake in state", null, """
// Functions can be used to 'bake in' state
var formatter = function(prefix, fixed) {
if (prefix == null) { prefix = '' }
if (fixed == null) { fixed = 2 }
return function(number) {
return prefix + number.toFixed(fixed)
}
}
var currency = formatter('$', 2)
var roughly = formatter('~', 1)
var roughlyDefault = formatter('~') // no 2nd param!
var defaultFormat = formatter() // no params!
console.log("currency(31/3) ==", currency(31/3))
console.log("roughly(31/3) ==", roughly(31/3))
console.log("roughlyDefault(31/3) ==", roughlyDefault(31/3))
console.log("defaultFormat(31/3) ==", defaultFormat(31/3))
"""
slide.code "Configurable functions", null, """
// D3 has many helper methods
// d3.scaleLinear() returns a function that
// will map the given domain to the given
// range linearly.
// D3 v4 change: d3.scaleLinear was
// d3.scale.linear in v3
var w = 640, h = 320
// x is a function!
var x = d3.scaleLinear()
.domain([-1, 1])
.range([0, w])
// y is also a function!
var y = d3.scaleLinear()
.domain([0, 1])
.range([0, h])
console.log("x(0) ==", x(0)) // == w/2
console.log("y(3) ==", y(3)) // == 3*h
"""
slide.title "Core D3"
# -----------------------------------------------
slide.code_title title = ".select()"
slide.code title, rect1, """
var svg = d3.select("div.output svg")
var myRect = svg.select("rect")
myRect.attr("width", 100)
myRect.attr("height", 100)
myRect.style("fill", "steelblue")
"""
slide.code title, rect1, """
var svg = d3.select("div.output svg")
// Chain style
svg.select("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".selectAll()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.select("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("width", 100)
.attr("height", 100)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
// Each D3 attr/style function passes two params:
// 1. d: a data item
// 2. i: the index of the data item
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("x", 0)
.attr("y", function(d,i) { return i*90+50 })
.attr("width", function(d,i) {
return i*150+100;
})
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code "Arrow Functions", rect3, """
// Two ways to write anonymous functions
// 1. function(d,i) { return i*90+50; }
// 2. (d,i) => i*90+50
// Arrow functions are more succinct
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", (d,i) => i*150+100)
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".data()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", (d,i) => d) // or d => d
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".enter()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.enter().append("rect")
.attr("x", 10) // let's just put it somewhere
.attr("y", 10)
.attr("width", 30)
.attr("height", 30)
.style("fill", "green")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
// update existing items
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
// add (enter) new
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".merge()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
// Shorter
// Merge merges the enter selection
// with the update selection
selection.enter().append("rect")
.merge(selection)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".join()"
slide.code title, rect3, """
// Join does enter/append automatically
var svg = d3.select("div.output svg");
svg.selectAll("rect")
.data([127, 61, 256, 71]).join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
title = ".enter() // a common pattern"
slide.code title, empty_svg, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
title = ".enter() // using join"
slide.code title, empty_svg, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
# -----------------------------------------------
slide.code_title title = ".exit()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61])
selection
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.exit()
.remove()
"""
title = ".join()"
slide.code title, rect3, """
// .join removes exit part automatically
var svg = d3.select("div.output svg");
svg.selectAll("rect")
.data([127, 61]).join("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
# -----------------------------------------------
slide.code_title title = ".transition()"
slide.code title, rect3, """
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data([127, 61, 256])
.transition()
.duration(3000) // 3 seconds
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([127, 61, 256, 71])
selection.enter().append("rect")
.attr("x", 200)
.attr("y", 200)
.attr("width", 10)
.attr("height", 10)
.style("fill", "red")
.merge(selection)
.transition()
.duration(3000)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.transition()
// delays relative to previous transition
.duration(3000)
.style("fill", "green")
.attr("width", d => d*1.5)
selection.exit()
.attr("opacity", 1)
.transition()
.duration(3000)
.attr("opacity", 0)
.remove()
"""
slide.code title, rect3, """
var svg = d3.select("div.output svg");
const t = svg.transition()
.duration(3000);
function update_bars(s) {
s.transition(t)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.transition()
.duration(3000)
.style("fill", "green")
.attr("width", d => d*1.5)
}
var selection = svg.selectAll("rect")
.data([127, 61]) // add more [256, 71]
.join(enter => enter.append("rect")
.attr("x", 200)
.attr("y", 200)
.attr("width", 10)
.attr("height", 10)
.style("fill", "red")
.call(update_bars),
update => update.call(update_bars),
exit => exit.attr("opacity", 1)
.transition()
.duration(3000)
.attr("opacity", 0)
.remove());
"""
# -----------------------------------------------
slide.code_title title = ".data(..., join)"
init_svg = ->
svg = d3.select("div.output").append("svg")
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) -> i*90+50)
.attr("width", (d,i) -> d)
.attr("height", 20)
.style("fill", "steelblue")
slide.code title, init_svg, """
var svg = d3.select("div.output svg")
// Let's say we start here:
/*
svg.selectAll("rect")
.data([127, 61, 256])
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
*/
// And then we do this:
var selection = svg.selectAll("rect")
.data([61, 256, 71]) // <- incomplete?
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.merge(selection)
.transition()
.duration(3000)
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
selection.exit()
.remove()
"""
slide.code title, init_svg, """
// Start the same as before
var svg = d3.select("div.output svg")
var selection = svg.selectAll("rect")
.data([61, 256, 71], d => d)
selection.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => (i+1)*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue")
.style("opacity", 0)
.merge(selection)
.transition()
.duration(3000)
.attr("y", (d,i) => i*90+50)
.attr("height", 20)
.style("opacity", 1)
selection.exit()
.transition()
.duration(3000)
.attr("y", (d,i) => (i-1)*90+50)
.style("opacity", 0)
.remove()
"""
# -----------------------------------------------
slide.title title = "Nested Selections"
slide.code title, empty_svg, """
var myData = [
[15, 20],
[40, 10],
[30, 27]
]
var svg = d3.select("div.output svg")
// First selection (within svg)
var selA = svg.selectAll("g")
.data(myData)
.join("g")
.attr("transform", (d, i) => `translate(70,${i*100+50})`)
// backticks (``) denote template literal
// normal strings except that text inside ${...}
// is evaluated (can be js expression)!
// Second selection (within first selection)
var selB = selA.selectAll('circle')
.data(d => d)
.join("circle")
.attr("cx", (d, i) => i*80)
.attr("r", (d,i) => d)
"""
# -----------------------------------------------
slide.title title = "Scales"
slide.code title, empty_svg, """
// via PI:NAME:<NAME>END_PI (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => d)
.attr("height", 20)
.style("fill", "steelblue");
"""
slide.code title, empty_svg, """
// via PI:NAME:<NAME>END_PI (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
// the scale function
var s = function (input) {
return input * 500;
}
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => s(d))
.attr("height", 20)
.style("fill", "steelblue");
"""
slide.code title, empty_svg, """
// via PI:NAME:<NAME>END_PI (dataviscourse.net)
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
// var xScale = d3.scaleLog() // check domain...
.domain([0,d3.max(data)])
.range([0,300]);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
"""
# -----------------------------------------------
slide.title title = "Axes"
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom(); // axisTop()
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g").call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 0)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(0, 300)")
.call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 10)
.attr("y", (d,i) => i*90+50)
.attr("width", d => xScale(d))
.attr("height", 20)
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(10, 300)")
.call(xAxis);
"""
slide.code title, empty_svg, """
var data = [0.3, 0.01, 0.8];
// the scale function
var xScale = d3.scaleLinear()
.domain([0,d3.max(data)])
.range([0,300]);
var yScale = d3.scaleBand()
.domain([...data.keys()])
.range([0,300])
.padding(0.7)
var xAxis = d3.axisBottom();
xAxis.scale(xScale);
var svg = d3.select("div.output svg")
svg.selectAll("rect")
.data(data)
.enter().append("rect")
.attr("x", 10)
.attr("y", (d,i) => yScale(i))
.attr("width", d => xScale(d))
.attr("height", yScale.bandwidth())
.style("fill", "steelblue");
svg.append("g")
.attr("transform", "translate(10, 300)")
.call(xAxis);
"""
# -----------------------------------------------
slide.title "Useful Examples"
slide.code "Shuffle", empty_svg, """
var cards = [
"J\\u2665", "J\\u2666", "J\\u2663", "J\\u2660",
"K\\u2665", "K\\u2666", "K\\u2663", "K\\u2660",
"Q\\u2665", "Q\\u2666", "Q\\u2663", "Q\\u2660",
"A\\u2665", "A\\u2666", "A\\u2663", "A\\u2660"]
cards.sort(function() {return Math.random()-.5})
var svg = d3.select("div.output svg")
var selection = svg.selectAll("text")
.data(cards, String)
selection
.transition()
.duration(1000)
.attr("y", (d,i) => i*35+40)
selection.enter().append("text")
.attr("x", 30)
.attr("y", (d,i) => i*35+40)
.style("fill", function(d) {
return "\\u2665\\u2666".indexOf(d[1]) < 0 ?
"black" : "red";
})
.style("font", "20px monospace")
.text(String)
"""
slide.code "Shuffle (v2)", empty_svg, """
var cards = [
"J\\u2665", "J\\u2666", "J\\u2663", "J\\u2660",
"K\\u2665", "K\\u2666", "K\\u2663", "K\\u2660",
"Q\\u2665", "Q\\u2666", "Q\\u2663", "Q\\u2660",
"A\\u2665", "A\\u2666", "A\\u2663", "A\\u2660"]
cards.sort(function() {return Math.random()-.5})
var svg = d3.select("div.output svg")
var selection = svg.selectAll("text")
.data(cards, String)
selection
.transition().duration(500)
.attr("x", (d,i) => (i%8)*30+30)
.transition().duration(500)
.attr("y", (d,i) => i*35+40)
.transition().duration(500)
.attr("x", 30)
selection.enter().append("text")
.attr("x", 30)
.attr("y", (d,i) => i*35+40)
.style("fill", function(d) {
return "\\u2665\\u2666".indexOf(d[1]) < 0 ?
"black" : "red";
})
.style("font", "20px monospace")
.text(String)
"""
# -----------------------------------------------
slide.code "Drawing lines", empty_svg, """
var svg = d3.select("div.output svg")
svg.append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", "M 10 10 L 200 200 "+
"L 200 400 L 300 100 L 400 150")
"""
slide.code "Drawing lines", empty_svg, """
var points = [
{ x: 10, y: 10 },
{ x: 200, y: 200 },
{ x: 200, y: 400 },
{ x: 300, y: 100 },
{ x: 400, y: 150 }
]
var lineFn = d3.line()
.x(d => d.x)
.y(d => d.y)
//.curve(d3.curveCardinal)
var svg = d3.select("div.output svg")
svg.append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", lineFn(points))
"""
slide.code "Drawing lines", empty_svg, """
var pointsSin = d3.range(21).map(function(i) {
return {x: i, y: Math.sin(i/3) }
})
var pointsCos = d3.range(21).map(function(i) {
return {x: i, y: Math.cos(i/3) }
})
var w = 480
var h = 300
var x = d3.scaleLinear()
.domain([0, 20]).range([0, w])
var y = d3.scaleLinear()
.domain([-1, 1]).range([h, 0])
var lineFn = d3.line()
.x(d => x(d.x))
.y(d => y(d.y))
var svg = d3.select("div.output svg")
svg.selectAll("path")
.data([pointsSin, pointsCos])
.enter().append("path")
.style("fill", "none")
.style("stroke", "black")
.style("stroke-width", 2)
.attr("d", lineFn)
"""
|
[
{
"context": " Returns who was oncall last week.\n#\n# Author:\n# Juan Pablo Ortiz <pablasso@gmail.com>\n\nclass OnCallRoster\n constr",
"end": 552,
"score": 0.9998903870582581,
"start": 536,
"tag": "NAME",
"value": "Juan Pablo Ortiz"
},
{
"context": "call last week.\n#\n# Author:... | src/oncall.coffee | pablasso/hubot-oncall | 3 | # Description:
# This project tells you who is oncall on the current week and automatically rotates given a defined roster.
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot oncall set <person1>,..,<personN> - Sets people for the oncall roster.
# hubot oncall list - List the current roster available for oncall
# hubot oncall now - Returns who is oncall this week
# hubot oncall next - Returns who will be oncall next week
# hubot oncall last - Returns who was oncall last week.
#
# Author:
# Juan Pablo Ortiz <pablasso@gmail.com>
class OnCallRoster
constructor: () ->
@people = []
# 0 means current week, -1 means last week, 1 next week, and so on.
getWeekNumber: (offset) ->
offset = 7 * offset
today = new Date
date = new Date(today.getFullYear(), today.getMonth(), today.getDate() + offset)
onejan = new Date(date.getFullYear(), 0, 1)
Math.floor((((date - onejan) / 86400000) + onejan.getDay() + 1) / 7)
getDayOfWeek: (weekNumber, dayNumber) ->
year = (new Date).getFullYear()
j10 = new Date(year, 0, 10, 12, 0, 0)
j4 = new Date(year, 0, 4, 12, 0, 0)
mon1 = j4.getTime() - j10.getDay() * 86400000
new Date(mon1 + ((weekNumber - 1) * 7 + dayNumber) * 86400000)
getOnCall: (week) ->
person: @people[week % @people.length]
startDate: @getPrettyDate(@getDayOfWeek(week, 0))
endDate: @getPrettyDate(@getDayOfWeek(week, 7))
getPeopleCount: () ->
@people.length
getPeopleList: () ->
@people.join(',')
setPeopleList: (list) ->
@people = list.split(',')
getPrettyDate: (date) ->
"#{date.getMonth() + 1}/#{date.getDate()}"
module.exports = (robot) ->
onCallRoster = new OnCallRoster()
robot.respond /oncall set (.*)/i, (msg) ->
onCallRoster.setPeopleList(msg.match[1])
msg.send("All set. I don't persist data though so please save the list yourself in case I reboot.")
robot.respond /oncall list/, (msg) ->
if onCallRoster.getPeopleCount() == 0
msg.send("There's no people oncall. You can set them using the command <oncall set> with a comma separated list of people.")
return
msg.send(onCallRoster.getPeopleList())
robot.respond /oncall (now|next|last)/, (msg) ->
if onCallRoster.getPeopleCount() == 0
msg.send("There's no people oncall. You can set them using the command <oncall set> with a comma separated list of people.")
return
if msg.match[1] == 'now'
week_offset = 0
week_verb = 'is'
else if msg.match[1] == 'next'
week_offset = 1
week_verb = 'will be'
else if msg.match[1] == 'last'
week_offset = -1
week_verb = 'was'
week = onCallRoster.getWeekNumber(week_offset)
oncall = onCallRoster.getOnCall(week)
message = "#{oncall.person} #{week_verb} on call from #{oncall.startDate} to #{oncall.endDate}"
msg.reply(message)
| 201209 | # Description:
# This project tells you who is oncall on the current week and automatically rotates given a defined roster.
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot oncall set <person1>,..,<personN> - Sets people for the oncall roster.
# hubot oncall list - List the current roster available for oncall
# hubot oncall now - Returns who is oncall this week
# hubot oncall next - Returns who will be oncall next week
# hubot oncall last - Returns who was oncall last week.
#
# Author:
# <NAME> <<EMAIL>>
class OnCallRoster
constructor: () ->
@people = []
# 0 means current week, -1 means last week, 1 next week, and so on.
getWeekNumber: (offset) ->
offset = 7 * offset
today = new Date
date = new Date(today.getFullYear(), today.getMonth(), today.getDate() + offset)
onejan = new Date(date.getFullYear(), 0, 1)
Math.floor((((date - onejan) / 86400000) + onejan.getDay() + 1) / 7)
getDayOfWeek: (weekNumber, dayNumber) ->
year = (new Date).getFullYear()
j10 = new Date(year, 0, 10, 12, 0, 0)
j4 = new Date(year, 0, 4, 12, 0, 0)
mon1 = j4.getTime() - j10.getDay() * 86400000
new Date(mon1 + ((weekNumber - 1) * 7 + dayNumber) * 86400000)
getOnCall: (week) ->
person: @people[week % @people.length]
startDate: @getPrettyDate(@getDayOfWeek(week, 0))
endDate: @getPrettyDate(@getDayOfWeek(week, 7))
getPeopleCount: () ->
@people.length
getPeopleList: () ->
@people.join(',')
setPeopleList: (list) ->
@people = list.split(',')
getPrettyDate: (date) ->
"#{date.getMonth() + 1}/#{date.getDate()}"
module.exports = (robot) ->
onCallRoster = new OnCallRoster()
robot.respond /oncall set (.*)/i, (msg) ->
onCallRoster.setPeopleList(msg.match[1])
msg.send("All set. I don't persist data though so please save the list yourself in case I reboot.")
robot.respond /oncall list/, (msg) ->
if onCallRoster.getPeopleCount() == 0
msg.send("There's no people oncall. You can set them using the command <oncall set> with a comma separated list of people.")
return
msg.send(onCallRoster.getPeopleList())
robot.respond /oncall (now|next|last)/, (msg) ->
if onCallRoster.getPeopleCount() == 0
msg.send("There's no people oncall. You can set them using the command <oncall set> with a comma separated list of people.")
return
if msg.match[1] == 'now'
week_offset = 0
week_verb = 'is'
else if msg.match[1] == 'next'
week_offset = 1
week_verb = 'will be'
else if msg.match[1] == 'last'
week_offset = -1
week_verb = 'was'
week = onCallRoster.getWeekNumber(week_offset)
oncall = onCallRoster.getOnCall(week)
message = "#{oncall.person} #{week_verb} on call from #{oncall.startDate} to #{oncall.endDate}"
msg.reply(message)
| true | # Description:
# This project tells you who is oncall on the current week and automatically rotates given a defined roster.
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot oncall set <person1>,..,<personN> - Sets people for the oncall roster.
# hubot oncall list - List the current roster available for oncall
# hubot oncall now - Returns who is oncall this week
# hubot oncall next - Returns who will be oncall next week
# hubot oncall last - Returns who was oncall last week.
#
# Author:
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
class OnCallRoster
constructor: () ->
@people = []
# 0 means current week, -1 means last week, 1 next week, and so on.
getWeekNumber: (offset) ->
offset = 7 * offset
today = new Date
date = new Date(today.getFullYear(), today.getMonth(), today.getDate() + offset)
onejan = new Date(date.getFullYear(), 0, 1)
Math.floor((((date - onejan) / 86400000) + onejan.getDay() + 1) / 7)
getDayOfWeek: (weekNumber, dayNumber) ->
year = (new Date).getFullYear()
j10 = new Date(year, 0, 10, 12, 0, 0)
j4 = new Date(year, 0, 4, 12, 0, 0)
mon1 = j4.getTime() - j10.getDay() * 86400000
new Date(mon1 + ((weekNumber - 1) * 7 + dayNumber) * 86400000)
getOnCall: (week) ->
person: @people[week % @people.length]
startDate: @getPrettyDate(@getDayOfWeek(week, 0))
endDate: @getPrettyDate(@getDayOfWeek(week, 7))
getPeopleCount: () ->
@people.length
getPeopleList: () ->
@people.join(',')
setPeopleList: (list) ->
@people = list.split(',')
getPrettyDate: (date) ->
"#{date.getMonth() + 1}/#{date.getDate()}"
module.exports = (robot) ->
onCallRoster = new OnCallRoster()
robot.respond /oncall set (.*)/i, (msg) ->
onCallRoster.setPeopleList(msg.match[1])
msg.send("All set. I don't persist data though so please save the list yourself in case I reboot.")
robot.respond /oncall list/, (msg) ->
if onCallRoster.getPeopleCount() == 0
msg.send("There's no people oncall. You can set them using the command <oncall set> with a comma separated list of people.")
return
msg.send(onCallRoster.getPeopleList())
robot.respond /oncall (now|next|last)/, (msg) ->
if onCallRoster.getPeopleCount() == 0
msg.send("There's no people oncall. You can set them using the command <oncall set> with a comma separated list of people.")
return
if msg.match[1] == 'now'
week_offset = 0
week_verb = 'is'
else if msg.match[1] == 'next'
week_offset = 1
week_verb = 'will be'
else if msg.match[1] == 'last'
week_offset = -1
week_verb = 'was'
week = onCallRoster.getWeekNumber(week_offset)
oncall = onCallRoster.getOnCall(week)
message = "#{oncall.person} #{week_verb} on call from #{oncall.startDate} to #{oncall.endDate}"
msg.reply(message)
|
[
{
"context": "tion for Backbone.Marionette\n#\n# Copyright (C)2012 Derick Bailey, Muted Solutions, LLC\n# Distributed Under MIT Lic",
"end": 108,
"score": 0.999780535697937,
"start": 95,
"tag": "NAME",
"value": "Derick Bailey"
},
{
"context": "nd Full License Available at:\n# http://... | client/01-main/routing.coffee | zhangcheng/bbclonemail-meteor | 1 | # Backbone.BBCloneMail
# A reference application for Backbone.Marionette
#
# Copyright (C)2012 Derick Bailey, Muted Solutions, LLC
# Distributed Under MIT License
#
# Documentation and Full License Available at:
# http://github.com/derickbailey/backbone.bbclonemail
# http://github.com/derickbailey/backbone.marionette
# Routing
# -------
BBCloneMail.module "Routing", (Routing, BBCloneMail, Backbone, Marionette, $, _) ->
# Public API
# ----------
# The `showRoute` method is a private method used to update the
# url's hash fragment route. It accepts a base route and an
# unlimited number of optional parameters for the route:
# `showRoute("foo", "bar", "baz", "etc");`.
Routing.showRoute = ->
route = getRoutePath(arguments)
Backbone.history.navigate route, false
# Helper Methods
# --------------
# Creates a proper route based on the `routeParts`
# that are passed to it.
getRoutePath = (routeParts) ->
base = routeParts[0]
length = routeParts.length
route = base
if length > 1
i = 1
while i < length
arg = routeParts[i]
route = route + "/" + arg if arg
i++
route
| 29833 | # Backbone.BBCloneMail
# A reference application for Backbone.Marionette
#
# Copyright (C)2012 <NAME>, Muted Solutions, LLC
# Distributed Under MIT License
#
# Documentation and Full License Available at:
# http://github.com/derickbailey/backbone.bbclonemail
# http://github.com/derickbailey/backbone.marionette
# Routing
# -------
BBCloneMail.module "Routing", (Routing, BBCloneMail, Backbone, Marionette, $, _) ->
# Public API
# ----------
# The `showRoute` method is a private method used to update the
# url's hash fragment route. It accepts a base route and an
# unlimited number of optional parameters for the route:
# `showRoute("foo", "bar", "baz", "etc");`.
Routing.showRoute = ->
route = getRoutePath(arguments)
Backbone.history.navigate route, false
# Helper Methods
# --------------
# Creates a proper route based on the `routeParts`
# that are passed to it.
getRoutePath = (routeParts) ->
base = routeParts[0]
length = routeParts.length
route = base
if length > 1
i = 1
while i < length
arg = routeParts[i]
route = route + "/" + arg if arg
i++
route
| true | # Backbone.BBCloneMail
# A reference application for Backbone.Marionette
#
# Copyright (C)2012 PI:NAME:<NAME>END_PI, Muted Solutions, LLC
# Distributed Under MIT License
#
# Documentation and Full License Available at:
# http://github.com/derickbailey/backbone.bbclonemail
# http://github.com/derickbailey/backbone.marionette
# Routing
# -------
BBCloneMail.module "Routing", (Routing, BBCloneMail, Backbone, Marionette, $, _) ->
# Public API
# ----------
# The `showRoute` method is a private method used to update the
# url's hash fragment route. It accepts a base route and an
# unlimited number of optional parameters for the route:
# `showRoute("foo", "bar", "baz", "etc");`.
Routing.showRoute = ->
route = getRoutePath(arguments)
Backbone.history.navigate route, false
# Helper Methods
# --------------
# Creates a proper route based on the `routeParts`
# that are passed to it.
getRoutePath = (routeParts) ->
base = routeParts[0]
length = routeParts.length
route = base
if length > 1
i = 1
while i < length
arg = routeParts[i]
route = route + "/" + arg if arg
i++
route
|
[
{
"context": "thorized OAuth2 client.\n###\n\nalice_calendar_id = '1ig43db9hq5sr6mta5kco0ij3s@group.calendar.google.com'\nbob_calendar_id = 'c625hc036oqls670bp8l62qa7c@gr",
"end": 2736,
"score": 0.9971498847007751,
"start": 2684,
"tag": "EMAIL",
"value": "1ig43db9hq5sr6mta5kco0ij3s@group.cal... | calendar.coffee | karlpokus/uppsala-hackathon-2016 | 0 | fs = require('fs')
readline = require('readline')
google = require('googleapis')
googleAuth = require('google-auth-library')
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/calendar-nodejs-quickstart.json
SCOPES = [ 'https://www.googleapis.com/auth/calendar' ]
TOKEN_DIR = '.credentials/'
TOKEN_PATH = TOKEN_DIR + 'calendar-nodejs-quickstart.json'
# Load client secrets from a local file.
###*
# Create an OAuth2 client with the given credentials, and then execute the
# given callback function.
#
# @param {Object} credentials The authorization client credentials.
# @param {function} callback The callback to call with the authorized client.
###
authorize = (credentials, callback) ->
clientSecret = credentials.installed.client_secret
clientId = credentials.installed.client_id
redirectUrl = credentials.installed.redirect_uris[0]
auth = new googleAuth
oauth2Client = new (auth.OAuth2)(clientId, clientSecret, redirectUrl)
# Check if we have previously stored a token.
fs.readFile TOKEN_PATH, (err, token) ->
if err
getNewToken oauth2Client, callback
else
oauth2Client.credentials = JSON.parse(token)
callback oauth2Client
return
return
###*
# Get and store new token after prompting for user authorization, and then
# execute the given callback with the authorized OAuth2 client.
#
# @param {google.auth.OAuth2} oauth2Client The OAuth2 client to get token for.
# @param {getEventsCallback} callback The callback to call with the authorized
# client.
###
getNewToken = (oauth2Client, callback) ->
authUrl = oauth2Client.generateAuthUrl(
access_type: 'offline'
scope: SCOPES)
console.log 'Authorize this app by visiting this url: ', authUrl
rl = readline.createInterface(
input: process.stdin
output: process.stdout)
rl.question 'Enter the code from that page here: ', (code) ->
rl.close()
oauth2Client.getToken code, (err, token) ->
if err
console.log 'Error while trying to retrieve access token', err
return
oauth2Client.credentials = token
storeToken token
callback oauth2Client
return
return
return
###*
# Store token to disk be used in later program executions.
#
# @param {Object} token The token to store to disk.
###
storeToken = (token) ->
try
fs.mkdirSync TOKEN_DIR
catch err
if err.code != 'EEXIST'
throw err
fs.writeFile TOKEN_PATH, JSON.stringify(token)
console.log 'Token stored to ' + TOKEN_PATH
return
###*
# Lists the next 10 events on the user's primary calendar.
#
# @param {google.auth.OAuth2} auth An authorized OAuth2 client.
###
alice_calendar_id = '1ig43db9hq5sr6mta5kco0ij3s@group.calendar.google.com'
bob_calendar_id = 'c625hc036oqls670bp8l62qa7c@group.calendar.google.com'
calendar = google.calendar('v3')
client_secret_content = null
listEvents = (auth) ->
calendar.events.list {
auth: auth
calendarId: bob_calendar_id
timeMin: (new Date).toISOString()
maxResults: 10
singleEvents: true
orderBy: 'startTime'
}, (err, response) ->
if err
console.log 'The API returned an error: ' + err
return
events = response.items
if events.length == 0
console.log 'No upcoming events found.'
else
console.log 'Upcoming 10 events:'
i = 0
while i < events.length
event = events[i]
start = event.start.dateTime or event.start.date
p event.start.timeZone
end = event.end.dateTime or event.end.date
console.log '%s -> %s - %s', start, end, event.summary
i++
return
return
client_secret_content = fs.readFileSync('client_secret.json').toString()
run = (callback) ->
authorize JSON.parse(client_secret_content), callback
event =
'summary': 'Business dinner'
'start':
'dateTime': '2016-10-24T17:00:00+01:00'
'timeZone': 'Europe/Stockholm'
'end':
'dateTime': '2016-10-24T19:00:00+01:00'
'timeZone': 'Europe/Stockholm'
exports.create = () ->
run (auth) ->
calendar.events.insert {
auth: auth
calendarId: alice_calendar_id
resource: event
}, (err, event) ->
if err
console.log 'There was an error contacting the Calendar service: ' + err
return
console.log 'Event created: %s', event.htmlLink
calendar.events.insert {
auth: auth
calendarId: bob_calendar_id
resource: event
}, (err, event) ->
if err
console.log 'There was an error contacting the Calendar service: ' + err
return
console.log 'Event created: %s', event.htmlLink
| 110154 | fs = require('fs')
readline = require('readline')
google = require('googleapis')
googleAuth = require('google-auth-library')
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/calendar-nodejs-quickstart.json
SCOPES = [ 'https://www.googleapis.com/auth/calendar' ]
TOKEN_DIR = '.credentials/'
TOKEN_PATH = TOKEN_DIR + 'calendar-nodejs-quickstart.json'
# Load client secrets from a local file.
###*
# Create an OAuth2 client with the given credentials, and then execute the
# given callback function.
#
# @param {Object} credentials The authorization client credentials.
# @param {function} callback The callback to call with the authorized client.
###
authorize = (credentials, callback) ->
clientSecret = credentials.installed.client_secret
clientId = credentials.installed.client_id
redirectUrl = credentials.installed.redirect_uris[0]
auth = new googleAuth
oauth2Client = new (auth.OAuth2)(clientId, clientSecret, redirectUrl)
# Check if we have previously stored a token.
fs.readFile TOKEN_PATH, (err, token) ->
if err
getNewToken oauth2Client, callback
else
oauth2Client.credentials = JSON.parse(token)
callback oauth2Client
return
return
###*
# Get and store new token after prompting for user authorization, and then
# execute the given callback with the authorized OAuth2 client.
#
# @param {google.auth.OAuth2} oauth2Client The OAuth2 client to get token for.
# @param {getEventsCallback} callback The callback to call with the authorized
# client.
###
getNewToken = (oauth2Client, callback) ->
authUrl = oauth2Client.generateAuthUrl(
access_type: 'offline'
scope: SCOPES)
console.log 'Authorize this app by visiting this url: ', authUrl
rl = readline.createInterface(
input: process.stdin
output: process.stdout)
rl.question 'Enter the code from that page here: ', (code) ->
rl.close()
oauth2Client.getToken code, (err, token) ->
if err
console.log 'Error while trying to retrieve access token', err
return
oauth2Client.credentials = token
storeToken token
callback oauth2Client
return
return
return
###*
# Store token to disk be used in later program executions.
#
# @param {Object} token The token to store to disk.
###
storeToken = (token) ->
try
fs.mkdirSync TOKEN_DIR
catch err
if err.code != 'EEXIST'
throw err
fs.writeFile TOKEN_PATH, JSON.stringify(token)
console.log 'Token stored to ' + TOKEN_PATH
return
###*
# Lists the next 10 events on the user's primary calendar.
#
# @param {google.auth.OAuth2} auth An authorized OAuth2 client.
###
alice_calendar_id = '<EMAIL>'
bob_calendar_id = '<EMAIL>'
calendar = google.calendar('v3')
client_secret_content = null
listEvents = (auth) ->
calendar.events.list {
auth: auth
calendarId: bob_calendar_id
timeMin: (new Date).toISOString()
maxResults: 10
singleEvents: true
orderBy: 'startTime'
}, (err, response) ->
if err
console.log 'The API returned an error: ' + err
return
events = response.items
if events.length == 0
console.log 'No upcoming events found.'
else
console.log 'Upcoming 10 events:'
i = 0
while i < events.length
event = events[i]
start = event.start.dateTime or event.start.date
p event.start.timeZone
end = event.end.dateTime or event.end.date
console.log '%s -> %s - %s', start, end, event.summary
i++
return
return
client_secret_content = fs.readFileSync('client_secret.json').toString()
run = (callback) ->
authorize JSON.parse(client_secret_content), callback
event =
'summary': 'Business dinner'
'start':
'dateTime': '2016-10-24T17:00:00+01:00'
'timeZone': 'Europe/Stockholm'
'end':
'dateTime': '2016-10-24T19:00:00+01:00'
'timeZone': 'Europe/Stockholm'
exports.create = () ->
run (auth) ->
calendar.events.insert {
auth: auth
calendarId: alice_calendar_id
resource: event
}, (err, event) ->
if err
console.log 'There was an error contacting the Calendar service: ' + err
return
console.log 'Event created: %s', event.htmlLink
calendar.events.insert {
auth: auth
calendarId: bob_calendar_id
resource: event
}, (err, event) ->
if err
console.log 'There was an error contacting the Calendar service: ' + err
return
console.log 'Event created: %s', event.htmlLink
| true | fs = require('fs')
readline = require('readline')
google = require('googleapis')
googleAuth = require('google-auth-library')
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/calendar-nodejs-quickstart.json
SCOPES = [ 'https://www.googleapis.com/auth/calendar' ]
TOKEN_DIR = '.credentials/'
TOKEN_PATH = TOKEN_DIR + 'calendar-nodejs-quickstart.json'
# Load client secrets from a local file.
###*
# Create an OAuth2 client with the given credentials, and then execute the
# given callback function.
#
# @param {Object} credentials The authorization client credentials.
# @param {function} callback The callback to call with the authorized client.
###
authorize = (credentials, callback) ->
clientSecret = credentials.installed.client_secret
clientId = credentials.installed.client_id
redirectUrl = credentials.installed.redirect_uris[0]
auth = new googleAuth
oauth2Client = new (auth.OAuth2)(clientId, clientSecret, redirectUrl)
# Check if we have previously stored a token.
fs.readFile TOKEN_PATH, (err, token) ->
if err
getNewToken oauth2Client, callback
else
oauth2Client.credentials = JSON.parse(token)
callback oauth2Client
return
return
###*
# Get and store new token after prompting for user authorization, and then
# execute the given callback with the authorized OAuth2 client.
#
# @param {google.auth.OAuth2} oauth2Client The OAuth2 client to get token for.
# @param {getEventsCallback} callback The callback to call with the authorized
# client.
###
getNewToken = (oauth2Client, callback) ->
authUrl = oauth2Client.generateAuthUrl(
access_type: 'offline'
scope: SCOPES)
console.log 'Authorize this app by visiting this url: ', authUrl
rl = readline.createInterface(
input: process.stdin
output: process.stdout)
rl.question 'Enter the code from that page here: ', (code) ->
rl.close()
oauth2Client.getToken code, (err, token) ->
if err
console.log 'Error while trying to retrieve access token', err
return
oauth2Client.credentials = token
storeToken token
callback oauth2Client
return
return
return
###*
# Store token to disk be used in later program executions.
#
# @param {Object} token The token to store to disk.
###
storeToken = (token) ->
try
fs.mkdirSync TOKEN_DIR
catch err
if err.code != 'EEXIST'
throw err
fs.writeFile TOKEN_PATH, JSON.stringify(token)
console.log 'Token stored to ' + TOKEN_PATH
return
###*
# Lists the next 10 events on the user's primary calendar.
#
# @param {google.auth.OAuth2} auth An authorized OAuth2 client.
###
alice_calendar_id = 'PI:EMAIL:<EMAIL>END_PI'
bob_calendar_id = 'PI:EMAIL:<EMAIL>END_PI'
calendar = google.calendar('v3')
client_secret_content = null
listEvents = (auth) ->
calendar.events.list {
auth: auth
calendarId: bob_calendar_id
timeMin: (new Date).toISOString()
maxResults: 10
singleEvents: true
orderBy: 'startTime'
}, (err, response) ->
if err
console.log 'The API returned an error: ' + err
return
events = response.items
if events.length == 0
console.log 'No upcoming events found.'
else
console.log 'Upcoming 10 events:'
i = 0
while i < events.length
event = events[i]
start = event.start.dateTime or event.start.date
p event.start.timeZone
end = event.end.dateTime or event.end.date
console.log '%s -> %s - %s', start, end, event.summary
i++
return
return
client_secret_content = fs.readFileSync('client_secret.json').toString()
run = (callback) ->
authorize JSON.parse(client_secret_content), callback
event =
'summary': 'Business dinner'
'start':
'dateTime': '2016-10-24T17:00:00+01:00'
'timeZone': 'Europe/Stockholm'
'end':
'dateTime': '2016-10-24T19:00:00+01:00'
'timeZone': 'Europe/Stockholm'
exports.create = () ->
run (auth) ->
calendar.events.insert {
auth: auth
calendarId: alice_calendar_id
resource: event
}, (err, event) ->
if err
console.log 'There was an error contacting the Calendar service: ' + err
return
console.log 'Event created: %s', event.htmlLink
calendar.events.insert {
auth: auth
calendarId: bob_calendar_id
resource: event
}, (err, event) ->
if err
console.log 'There was an error contacting the Calendar service: ' + err
return
console.log 'Event created: %s', event.htmlLink
|
[
{
"context": "###\nCopyright (c) 2014 Ramesh Nair (hiddentao.com)\n\nPermission is hereby granted, fr",
"end": 34,
"score": 0.9998844265937805,
"start": 23,
"tag": "NAME",
"value": "Ramesh Nair"
}
] | test/select.test.coffee | martianboy/squel | 0 | ###
Copyright (c) 2014 Ramesh Nair (hiddentao.com)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../dist/squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['SELECT builder'] =
beforeEach: ->
@func = squel.select
@inst = @func()
'instanceof QueryBuilder': ->
assert.instanceOf @inst, squel.cls.QueryBuilder
'constructor':
'override options': ->
@inst = squel.select
usingValuePlaceholders: true
dummy: true
expectedOptions = _.assign {}, squel.cls.DefaultQueryBuilderOptions,
usingValuePlaceholders: true
dummy: true
for block in @inst.blocks
assert.same _.pick(block.options, _.keys(expectedOptions)), expectedOptions
'override blocks': ->
block = new squel.cls.StringBlock('SELECT')
@inst = @func {}, [block]
assert.same [block], @inst.blocks
'build query':
'no need to call from() first': ->
@inst.toString()
'>> function(1)':
beforeEach: -> @inst.function('1')
toString: ->
assert.same @inst.toString(), 'SELECT 1'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT 1', values: [] }
'>> function(MAX(?,?), 3, 5)':
beforeEach: -> @inst.function('MAX(?, ?)', 3, 5)
toString: ->
assert.same @inst.toString(), 'SELECT MAX(3, 5)'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT MAX(?, ?)', values: [3, 5] }
'>> from(table).from(table2, alias2)':
beforeEach: -> @inst.from('table').from('table2', 'alias2')
toString: ->
assert.same @inst.toString(), 'SELECT * FROM table, table2 `alias2`'
'>> field(squel.select().field("MAX(score)").FROM("scores"), fa1)':
beforeEach: -> @inst.field(squel.select().field("MAX(score)").from("scores"), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT (SELECT MAX(score) FROM scores) AS "fa1" FROM table, table2 `alias2`'
'>> field(squel.case().when(score > ?, 1).then(1), fa1)':
beforeEach: -> @inst.field(squel.case().when("score > ?", 1).then(1), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT CASE WHEN (score > 1) THEN 1 ELSE NULL END AS "fa1" FROM table, table2 `alias2`'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT CASE WHEN (score > ?) THEN 1 ELSE NULL END AS "fa1" FROM table, table2 `alias2`', values: [1] }
'>> field( squel.str(SUM(?), squel.case().when(score > ?, 1).then(1) ), fa1)':
beforeEach: -> @inst.field( squel.str('SUM(?)', squel.case().when("score > ?", 1).then(1)), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT (SUM((CASE WHEN (score > 1) THEN 1 ELSE NULL END))) AS "fa1" FROM table, table2 `alias2`'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT (SUM(CASE WHEN (score > ?) THEN 1 ELSE NULL END)) AS "fa1" FROM table, table2 `alias2`', values: [1] }
'>> field(field1, fa1) >> field(field2)':
beforeEach: -> @inst.field('field1', 'fa1').field('field2')
toString: ->
assert.same @inst.toString(), 'SELECT field1 AS "fa1", field2 FROM table, table2 `alias2`'
'>> distinct()':
beforeEach: -> @inst.distinct()
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2`'
'>> group(field) >> group(field2)':
beforeEach: -> @inst.group('field').group('field2')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2'
'>> where(a = ?, squel.select().field("MAX(score)").from("scores"))':
beforeEach: ->
@subQuery = squel.select().field("MAX(score)").from("scores")
@inst.where('a = ?', @subQuery)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT MAX(score) FROM scores)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT MAX(score) FROM scores)) GROUP BY field, field2'
values: []
}
'>> where(squel.expr().and(a = ?, 1).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: -> @inst.where(squel.expr().and("a = ?", 1).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = 1 AND (b = 2 OR c = 3)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ? AND (b = ? OR c = ?)) GROUP BY field, field2'
values: [1, 2, 3]
}
'>> where(squel.expr().and(a = ?, QueryBuilder).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: ->
subQuery = squel.select().field('field1').from('table1').where('field2 = ?', 10)
@inst.where(squel.expr().and("a = ?", subQuery).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT field1 FROM table1 WHERE (field2 = 10)) AND (b = 2 OR c = 3)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT field1 FROM table1 WHERE (field2 = ?)) AND (b = ? OR c = ?)) GROUP BY field, field2'
values: [10, 2, 3]
}
'>> having(squel.expr().and(a = ?, QueryBuilder).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: ->
subQuery = squel.select().field('field1').from('table1').having('field2 = ?', 10)
@inst.having(squel.expr().and("a = ?", subQuery).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2 HAVING (a = (SELECT field1 FROM table1 HAVING (field2 = 10)) AND (b = 2 OR c = 3))'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2 HAVING (a = (SELECT field1 FROM table1 HAVING (field2 = ?)) AND (b = ? OR c = ?))'
values: [10, 2, 3]
}
'>> where(a = ?, null)':
beforeEach: -> @inst.where('a = ?', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = NULL) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ?) GROUP BY field, field2'
values: [null]
}
'>> where(a = ?, 1)':
beforeEach: -> @inst.where('a = ?', 1)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = 1) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ?) GROUP BY field, field2'
values: [1]
}
'>> join(other_table)':
beforeEach: -> @inst.join('other_table')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2'
'>> order(a)':
beforeEach: -> @inst.order('a')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> order(a, null)':
beforeEach: -> @inst.order('a', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a'
'>> order(a, \'asc nulls last\')':
beforeEach: -> @inst.order('a', 'asc nulls last')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a asc nulls last'
'>> order(a, true)':
beforeEach: -> @inst.order('a', true)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> limit(2)':
beforeEach: -> @inst.limit(2)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ?',
values: [1, 2]
}
'>> limit(0)':
beforeEach: -> @inst.limit(0)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 0'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ?',
values: [1, 0]
}
'>> offset(3)':
beforeEach: -> @inst.offset(3)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2 OFFSET 3'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ? OFFSET ?',
values: [1, 2, 3]
}
'>> offset(0)':
beforeEach: -> @inst.offset(0)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2 OFFSET 0'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ? OFFSET ?',
values: [1, 2, 0]
}
'>> order(DIST(?,?), true, 2, 3)':
beforeEach: -> @inst.order('DIST(?, ?)', true, 2, false)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY DIST(2, FALSE) ASC'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY DIST(?, ?) ASC'
values: [1, 2, false]
}
'>> order(a)':
beforeEach: -> @inst.order('a')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> order(b, null)':
beforeEach: -> @inst.order('b', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY b'
'>> join(other_table, condition = expr())':
beforeEach: ->
subQuery = squel.select().field('abc').from('table1').where('adf = ?', 'today1')
subQuery2 = squel.select().field('xyz').from('table2').where('adf = ?', 'today2')
expr = squel.expr().and('field1 = ?', subQuery)
@inst.join('other_table', null, expr)
@inst.where('def IN ?', subQuery2)
toString: ->
assert.same @inst.toString(), "SELECT DISTINCT field1 AS \"fa1\", field2 FROM table, table2 `alias2` INNER JOIN other_table ON (field1 = (SELECT abc FROM table1 WHERE (adf = 'today1'))) WHERE (a = 1) AND (def IN (SELECT xyz FROM table2 WHERE (adf = 'today2'))) GROUP BY field, field2"
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table ON (field1 = (SELECT abc FROM table1 WHERE (adf = ?))) WHERE (a = ?) AND (def IN (SELECT xyz FROM table2 WHERE (adf = ?))) GROUP BY field, field2', values: ["today1",1,"today2"] }
'nested queries':
'basic': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from('scores')
@inst.from(inner1).from(inner2, 'scores')
assert.same @inst.toString(), "SELECT * FROM (SELECT * FROM students), (SELECT * FROM scores) `scores`"
'deep nesting': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from(inner1)
@inst.from(inner2)
assert.same @inst.toString(), "SELECT * FROM (SELECT * FROM (SELECT * FROM students))"
'nesting in JOINs': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from(inner1)
@inst.from('schools').join(inner2, 'meh', 'meh.ID = ID')
assert.same @inst.toString(), "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students)) `meh` ON (meh.ID = ID)"
'nesting in JOINs with params': ->
inner1 = squel.select().from('students').where('age = ?', 6)
inner2 = squel.select().from(inner1)
@inst.from('schools').where('school_type = ?', 'junior').join(inner2, 'meh', 'meh.ID = ID')
assert.same @inst.toString(), "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = 6))) `meh` ON (meh.ID = ID) WHERE (school_type = 'junior')"
assert.same @inst.toParam(), { "text": "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = ?))) `meh` ON (meh.ID = ID) WHERE (school_type = ?)", "values": [6,'junior'] }
assert.same @inst.toParam({ "numberedParameters": true}), { "text": "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = $1))) `meh` ON (meh.ID = ID) WHERE (school_type = $2)", "values": [6,'junior'] }
'Complex table name, e.g. LATERAL (#230)':
beforeEach: ->
@inst = squel.select().from('foo').from(squel.str('LATERAL(?)', squel.select().from('bar').where('bar.id = ?', 2)), 'ss')
'toString': ->
assert.same @inst.toString(), 'SELECT * FROM foo, (LATERAL((SELECT * FROM bar WHERE (bar.id = 2)))) `ss`',
'toParam': ->
assert.same @inst.toParam(), {
text: 'SELECT * FROM foo, (LATERAL((SELECT * FROM bar WHERE (bar.id = ?)))) `ss`'
values: [2]
}
'cloning':
'basic': ->
newinst = @inst.from('students').limit(10).clone()
newinst.limit(20)
assert.same 'SELECT * FROM students LIMIT 10', @inst.toString()
assert.same 'SELECT * FROM students LIMIT 20', newinst.toString()
'with expressions (ticket #120)': ->
expr = squel.expr().and('a = 1')
newinst = @inst.from('table').left_join('table_2', 't', expr)
.clone()
.where('c = 1')
expr.and('b = 2')
assert.same 'SELECT * FROM table LEFT JOIN table_2 `t` ON (a = 1 AND b = 2)', @inst.toString()
assert.same 'SELECT * FROM table LEFT JOIN table_2 `t` ON (a = 1) WHERE (c = 1)', newinst.toString()
'with sub-queries (ticket #120)': ->
newinst = @inst.from(squel.select().from('students')).limit(30)
.clone()
.where('c = 1')
.limit(35)
assert.same 'SELECT * FROM (SELECT * FROM students) LIMIT 30', @inst.toString()
assert.same 'SELECT * FROM (SELECT * FROM students) WHERE (c = 1) LIMIT 35', newinst.toString()
'with complex expressions': ->
expr = squel.expr().and(
squel.expr().or('b = 2').or(
squel.expr().and('c = 3').and('d = 4')
)
).and('a = 1')
newinst = @inst.from('table').left_join('table_2', 't', expr)
.clone()
.where('c = 1')
expr.and('e = 5')
assert.same @inst.toString(), 'SELECT * FROM table LEFT JOIN table_2 `t` ON ((b = 2 OR (c = 3 AND d = 4)) AND a = 1 AND e = 5)'
assert.same newinst.toString(), 'SELECT * FROM table LEFT JOIN table_2 `t` ON ((b = 2 OR (c = 3 AND d = 4)) AND a = 1) WHERE (c = 1)'
'can specify block separator': ->
assert.same( squel.select({separator: '\n'})
.field('thing')
.from('table')
.toString(), """
SELECT
thing
FROM table
"""
)
'#242 - auto-quote table names':
beforeEach: ->
@inst = squel
.select({ autoQuoteTableNames: true })
.field('name')
.where('age > ?', 15)
'using string':
beforeEach: ->
@inst.from('students', 's')
toString: ->
assert.same @inst.toString(), """
SELECT name FROM `students` `s` WHERE (age > 15)
"""
toParam: ->
assert.same @inst.toParam(), {
"text": "SELECT name FROM `students` `s` WHERE (age > ?)"
"values": [15]
}
'using query builder':
beforeEach: ->
@inst.from(squel.select().from('students'), 's')
toString: ->
assert.same @inst.toString(), """
SELECT name FROM (SELECT * FROM students) `s` WHERE (age > 15)
"""
toParam: ->
assert.same @inst.toParam(), {
"text": "SELECT name FROM (SELECT * FROM students) `s` WHERE (age > ?)"
"values": [15]
}
'UNION JOINs':
'Two Queries NO Params':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > 15')
@qry2 = squel.select().field('name').from('students').where('age < 6')
@qry1.union(@qry2)
toString: ->
assert.same @qry1.toString(), """
SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))
"""
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))"
"values": [
]
}
'Two Queries with Params':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < ?', 6)
@qry1.union(@qry2)
toString: ->
assert.same @qry1.toString(), """
SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))
"""
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > ?) UNION (SELECT name FROM students WHERE (age < ?))"
"values": [
15
6
]
}
'Three Queries':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < 6')
@qry3 = squel.select().field('name').from('students').where('age = ?', 8)
@qry1.union(@qry2)
@qry1.union(@qry3)
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > ?) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = ?))"
"values": [
15
8
]
}
'toParam(2)': ->
assert.same @qry1.toParam({ "numberedParameters": true, "numberedParametersStartAt": 2}), {
"text": "SELECT name FROM students WHERE (age > $2) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = $3))"
"values": [
15
8
]
}
'Multi-Parameter Query':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < ?', 6)
@qry3 = squel.select().field('name').from('students').where('age = ?', 8)
@qry4 = squel.select().field('name').from('students').where('age IN [?, ?]', 2, 10)
@qry1.union(@qry2)
@qry1.union(@qry3)
@qry4.union_all(@qry1)
toString: ->
assert.same @qry4.toString(), """
SELECT name FROM students WHERE (age IN [2, 10]) UNION ALL (SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = 8)))
"""
toParam: ->
assert.same @qry4.toParam({ "numberedParameters": true}), {
"text": "SELECT name FROM students WHERE (age IN [$1, $2]) UNION ALL (SELECT name FROM students WHERE (age > $3) UNION (SELECT name FROM students WHERE (age < $4)) UNION (SELECT name FROM students WHERE (age = $5)))"
"values": [
2
10
15
6
8
]
}
'Where builder expression':
beforeEach: ->
@inst = squel.select().from('table').where('a = ?', 5)
.where(squel.str('EXISTS(?)', squel.select().from('blah').where('b > ?', 6)))
toString: ->
assert.same @inst.toString(), """
SELECT * FROM table WHERE (a = 5) AND (EXISTS((SELECT * FROM blah WHERE (b > 6))))
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM table WHERE (a = ?) AND (EXISTS((SELECT * FROM blah WHERE (b > ?))))",
values: [5, 6]
}
'Join on builder expression':
beforeEach: ->
@inst = squel.select().from('table').join('table2', 't2',
squel.str('EXISTS(?)', squel.select().from('blah').where('b > ?', 6))
)
toString: ->
assert.same @inst.toString(), """
SELECT * FROM table INNER JOIN table2 `t2` ON (EXISTS((SELECT * FROM blah WHERE (b > 6))))
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM table INNER JOIN table2 `t2` ON (EXISTS((SELECT * FROM blah WHERE (b > ?))))",
values: [6]
}
'#301 - FROM rstr() with nesting':
beforeEach: ->
@inst = squel.select().from(squel.rstr("generate_series(?,?,?)",1,10,2), "tblfn(odds)")
toString: ->
assert.same @inst.toString(), """
SELECT * FROM generate_series(1,10,2) `tblfn(odds)`
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM generate_series(?,?,?) `tblfn(odds)`",
values:[1,10,2]
}
module?.exports[require('path').basename(__filename)] = test
| 183810 | ###
Copyright (c) 2014 <NAME> (hiddentao.com)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../dist/squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['SELECT builder'] =
beforeEach: ->
@func = squel.select
@inst = @func()
'instanceof QueryBuilder': ->
assert.instanceOf @inst, squel.cls.QueryBuilder
'constructor':
'override options': ->
@inst = squel.select
usingValuePlaceholders: true
dummy: true
expectedOptions = _.assign {}, squel.cls.DefaultQueryBuilderOptions,
usingValuePlaceholders: true
dummy: true
for block in @inst.blocks
assert.same _.pick(block.options, _.keys(expectedOptions)), expectedOptions
'override blocks': ->
block = new squel.cls.StringBlock('SELECT')
@inst = @func {}, [block]
assert.same [block], @inst.blocks
'build query':
'no need to call from() first': ->
@inst.toString()
'>> function(1)':
beforeEach: -> @inst.function('1')
toString: ->
assert.same @inst.toString(), 'SELECT 1'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT 1', values: [] }
'>> function(MAX(?,?), 3, 5)':
beforeEach: -> @inst.function('MAX(?, ?)', 3, 5)
toString: ->
assert.same @inst.toString(), 'SELECT MAX(3, 5)'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT MAX(?, ?)', values: [3, 5] }
'>> from(table).from(table2, alias2)':
beforeEach: -> @inst.from('table').from('table2', 'alias2')
toString: ->
assert.same @inst.toString(), 'SELECT * FROM table, table2 `alias2`'
'>> field(squel.select().field("MAX(score)").FROM("scores"), fa1)':
beforeEach: -> @inst.field(squel.select().field("MAX(score)").from("scores"), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT (SELECT MAX(score) FROM scores) AS "fa1" FROM table, table2 `alias2`'
'>> field(squel.case().when(score > ?, 1).then(1), fa1)':
beforeEach: -> @inst.field(squel.case().when("score > ?", 1).then(1), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT CASE WHEN (score > 1) THEN 1 ELSE NULL END AS "fa1" FROM table, table2 `alias2`'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT CASE WHEN (score > ?) THEN 1 ELSE NULL END AS "fa1" FROM table, table2 `alias2`', values: [1] }
'>> field( squel.str(SUM(?), squel.case().when(score > ?, 1).then(1) ), fa1)':
beforeEach: -> @inst.field( squel.str('SUM(?)', squel.case().when("score > ?", 1).then(1)), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT (SUM((CASE WHEN (score > 1) THEN 1 ELSE NULL END))) AS "fa1" FROM table, table2 `alias2`'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT (SUM(CASE WHEN (score > ?) THEN 1 ELSE NULL END)) AS "fa1" FROM table, table2 `alias2`', values: [1] }
'>> field(field1, fa1) >> field(field2)':
beforeEach: -> @inst.field('field1', 'fa1').field('field2')
toString: ->
assert.same @inst.toString(), 'SELECT field1 AS "fa1", field2 FROM table, table2 `alias2`'
'>> distinct()':
beforeEach: -> @inst.distinct()
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2`'
'>> group(field) >> group(field2)':
beforeEach: -> @inst.group('field').group('field2')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2'
'>> where(a = ?, squel.select().field("MAX(score)").from("scores"))':
beforeEach: ->
@subQuery = squel.select().field("MAX(score)").from("scores")
@inst.where('a = ?', @subQuery)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT MAX(score) FROM scores)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT MAX(score) FROM scores)) GROUP BY field, field2'
values: []
}
'>> where(squel.expr().and(a = ?, 1).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: -> @inst.where(squel.expr().and("a = ?", 1).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = 1 AND (b = 2 OR c = 3)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ? AND (b = ? OR c = ?)) GROUP BY field, field2'
values: [1, 2, 3]
}
'>> where(squel.expr().and(a = ?, QueryBuilder).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: ->
subQuery = squel.select().field('field1').from('table1').where('field2 = ?', 10)
@inst.where(squel.expr().and("a = ?", subQuery).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT field1 FROM table1 WHERE (field2 = 10)) AND (b = 2 OR c = 3)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT field1 FROM table1 WHERE (field2 = ?)) AND (b = ? OR c = ?)) GROUP BY field, field2'
values: [10, 2, 3]
}
'>> having(squel.expr().and(a = ?, QueryBuilder).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: ->
subQuery = squel.select().field('field1').from('table1').having('field2 = ?', 10)
@inst.having(squel.expr().and("a = ?", subQuery).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2 HAVING (a = (SELECT field1 FROM table1 HAVING (field2 = 10)) AND (b = 2 OR c = 3))'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2 HAVING (a = (SELECT field1 FROM table1 HAVING (field2 = ?)) AND (b = ? OR c = ?))'
values: [10, 2, 3]
}
'>> where(a = ?, null)':
beforeEach: -> @inst.where('a = ?', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = NULL) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ?) GROUP BY field, field2'
values: [null]
}
'>> where(a = ?, 1)':
beforeEach: -> @inst.where('a = ?', 1)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = 1) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ?) GROUP BY field, field2'
values: [1]
}
'>> join(other_table)':
beforeEach: -> @inst.join('other_table')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2'
'>> order(a)':
beforeEach: -> @inst.order('a')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> order(a, null)':
beforeEach: -> @inst.order('a', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a'
'>> order(a, \'asc nulls last\')':
beforeEach: -> @inst.order('a', 'asc nulls last')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a asc nulls last'
'>> order(a, true)':
beforeEach: -> @inst.order('a', true)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> limit(2)':
beforeEach: -> @inst.limit(2)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ?',
values: [1, 2]
}
'>> limit(0)':
beforeEach: -> @inst.limit(0)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 0'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ?',
values: [1, 0]
}
'>> offset(3)':
beforeEach: -> @inst.offset(3)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2 OFFSET 3'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ? OFFSET ?',
values: [1, 2, 3]
}
'>> offset(0)':
beforeEach: -> @inst.offset(0)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2 OFFSET 0'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ? OFFSET ?',
values: [1, 2, 0]
}
'>> order(DIST(?,?), true, 2, 3)':
beforeEach: -> @inst.order('DIST(?, ?)', true, 2, false)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY DIST(2, FALSE) ASC'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY DIST(?, ?) ASC'
values: [1, 2, false]
}
'>> order(a)':
beforeEach: -> @inst.order('a')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> order(b, null)':
beforeEach: -> @inst.order('b', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY b'
'>> join(other_table, condition = expr())':
beforeEach: ->
subQuery = squel.select().field('abc').from('table1').where('adf = ?', 'today1')
subQuery2 = squel.select().field('xyz').from('table2').where('adf = ?', 'today2')
expr = squel.expr().and('field1 = ?', subQuery)
@inst.join('other_table', null, expr)
@inst.where('def IN ?', subQuery2)
toString: ->
assert.same @inst.toString(), "SELECT DISTINCT field1 AS \"fa1\", field2 FROM table, table2 `alias2` INNER JOIN other_table ON (field1 = (SELECT abc FROM table1 WHERE (adf = 'today1'))) WHERE (a = 1) AND (def IN (SELECT xyz FROM table2 WHERE (adf = 'today2'))) GROUP BY field, field2"
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table ON (field1 = (SELECT abc FROM table1 WHERE (adf = ?))) WHERE (a = ?) AND (def IN (SELECT xyz FROM table2 WHERE (adf = ?))) GROUP BY field, field2', values: ["today1",1,"today2"] }
'nested queries':
'basic': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from('scores')
@inst.from(inner1).from(inner2, 'scores')
assert.same @inst.toString(), "SELECT * FROM (SELECT * FROM students), (SELECT * FROM scores) `scores`"
'deep nesting': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from(inner1)
@inst.from(inner2)
assert.same @inst.toString(), "SELECT * FROM (SELECT * FROM (SELECT * FROM students))"
'nesting in JOINs': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from(inner1)
@inst.from('schools').join(inner2, 'meh', 'meh.ID = ID')
assert.same @inst.toString(), "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students)) `meh` ON (meh.ID = ID)"
'nesting in JOINs with params': ->
inner1 = squel.select().from('students').where('age = ?', 6)
inner2 = squel.select().from(inner1)
@inst.from('schools').where('school_type = ?', 'junior').join(inner2, 'meh', 'meh.ID = ID')
assert.same @inst.toString(), "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = 6))) `meh` ON (meh.ID = ID) WHERE (school_type = 'junior')"
assert.same @inst.toParam(), { "text": "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = ?))) `meh` ON (meh.ID = ID) WHERE (school_type = ?)", "values": [6,'junior'] }
assert.same @inst.toParam({ "numberedParameters": true}), { "text": "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = $1))) `meh` ON (meh.ID = ID) WHERE (school_type = $2)", "values": [6,'junior'] }
'Complex table name, e.g. LATERAL (#230)':
beforeEach: ->
@inst = squel.select().from('foo').from(squel.str('LATERAL(?)', squel.select().from('bar').where('bar.id = ?', 2)), 'ss')
'toString': ->
assert.same @inst.toString(), 'SELECT * FROM foo, (LATERAL((SELECT * FROM bar WHERE (bar.id = 2)))) `ss`',
'toParam': ->
assert.same @inst.toParam(), {
text: 'SELECT * FROM foo, (LATERAL((SELECT * FROM bar WHERE (bar.id = ?)))) `ss`'
values: [2]
}
'cloning':
'basic': ->
newinst = @inst.from('students').limit(10).clone()
newinst.limit(20)
assert.same 'SELECT * FROM students LIMIT 10', @inst.toString()
assert.same 'SELECT * FROM students LIMIT 20', newinst.toString()
'with expressions (ticket #120)': ->
expr = squel.expr().and('a = 1')
newinst = @inst.from('table').left_join('table_2', 't', expr)
.clone()
.where('c = 1')
expr.and('b = 2')
assert.same 'SELECT * FROM table LEFT JOIN table_2 `t` ON (a = 1 AND b = 2)', @inst.toString()
assert.same 'SELECT * FROM table LEFT JOIN table_2 `t` ON (a = 1) WHERE (c = 1)', newinst.toString()
'with sub-queries (ticket #120)': ->
newinst = @inst.from(squel.select().from('students')).limit(30)
.clone()
.where('c = 1')
.limit(35)
assert.same 'SELECT * FROM (SELECT * FROM students) LIMIT 30', @inst.toString()
assert.same 'SELECT * FROM (SELECT * FROM students) WHERE (c = 1) LIMIT 35', newinst.toString()
'with complex expressions': ->
expr = squel.expr().and(
squel.expr().or('b = 2').or(
squel.expr().and('c = 3').and('d = 4')
)
).and('a = 1')
newinst = @inst.from('table').left_join('table_2', 't', expr)
.clone()
.where('c = 1')
expr.and('e = 5')
assert.same @inst.toString(), 'SELECT * FROM table LEFT JOIN table_2 `t` ON ((b = 2 OR (c = 3 AND d = 4)) AND a = 1 AND e = 5)'
assert.same newinst.toString(), 'SELECT * FROM table LEFT JOIN table_2 `t` ON ((b = 2 OR (c = 3 AND d = 4)) AND a = 1) WHERE (c = 1)'
'can specify block separator': ->
assert.same( squel.select({separator: '\n'})
.field('thing')
.from('table')
.toString(), """
SELECT
thing
FROM table
"""
)
'#242 - auto-quote table names':
beforeEach: ->
@inst = squel
.select({ autoQuoteTableNames: true })
.field('name')
.where('age > ?', 15)
'using string':
beforeEach: ->
@inst.from('students', 's')
toString: ->
assert.same @inst.toString(), """
SELECT name FROM `students` `s` WHERE (age > 15)
"""
toParam: ->
assert.same @inst.toParam(), {
"text": "SELECT name FROM `students` `s` WHERE (age > ?)"
"values": [15]
}
'using query builder':
beforeEach: ->
@inst.from(squel.select().from('students'), 's')
toString: ->
assert.same @inst.toString(), """
SELECT name FROM (SELECT * FROM students) `s` WHERE (age > 15)
"""
toParam: ->
assert.same @inst.toParam(), {
"text": "SELECT name FROM (SELECT * FROM students) `s` WHERE (age > ?)"
"values": [15]
}
'UNION JOINs':
'Two Queries NO Params':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > 15')
@qry2 = squel.select().field('name').from('students').where('age < 6')
@qry1.union(@qry2)
toString: ->
assert.same @qry1.toString(), """
SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))
"""
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))"
"values": [
]
}
'Two Queries with Params':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < ?', 6)
@qry1.union(@qry2)
toString: ->
assert.same @qry1.toString(), """
SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))
"""
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > ?) UNION (SELECT name FROM students WHERE (age < ?))"
"values": [
15
6
]
}
'Three Queries':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < 6')
@qry3 = squel.select().field('name').from('students').where('age = ?', 8)
@qry1.union(@qry2)
@qry1.union(@qry3)
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > ?) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = ?))"
"values": [
15
8
]
}
'toParam(2)': ->
assert.same @qry1.toParam({ "numberedParameters": true, "numberedParametersStartAt": 2}), {
"text": "SELECT name FROM students WHERE (age > $2) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = $3))"
"values": [
15
8
]
}
'Multi-Parameter Query':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < ?', 6)
@qry3 = squel.select().field('name').from('students').where('age = ?', 8)
@qry4 = squel.select().field('name').from('students').where('age IN [?, ?]', 2, 10)
@qry1.union(@qry2)
@qry1.union(@qry3)
@qry4.union_all(@qry1)
toString: ->
assert.same @qry4.toString(), """
SELECT name FROM students WHERE (age IN [2, 10]) UNION ALL (SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = 8)))
"""
toParam: ->
assert.same @qry4.toParam({ "numberedParameters": true}), {
"text": "SELECT name FROM students WHERE (age IN [$1, $2]) UNION ALL (SELECT name FROM students WHERE (age > $3) UNION (SELECT name FROM students WHERE (age < $4)) UNION (SELECT name FROM students WHERE (age = $5)))"
"values": [
2
10
15
6
8
]
}
'Where builder expression':
beforeEach: ->
@inst = squel.select().from('table').where('a = ?', 5)
.where(squel.str('EXISTS(?)', squel.select().from('blah').where('b > ?', 6)))
toString: ->
assert.same @inst.toString(), """
SELECT * FROM table WHERE (a = 5) AND (EXISTS((SELECT * FROM blah WHERE (b > 6))))
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM table WHERE (a = ?) AND (EXISTS((SELECT * FROM blah WHERE (b > ?))))",
values: [5, 6]
}
'Join on builder expression':
beforeEach: ->
@inst = squel.select().from('table').join('table2', 't2',
squel.str('EXISTS(?)', squel.select().from('blah').where('b > ?', 6))
)
toString: ->
assert.same @inst.toString(), """
SELECT * FROM table INNER JOIN table2 `t2` ON (EXISTS((SELECT * FROM blah WHERE (b > 6))))
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM table INNER JOIN table2 `t2` ON (EXISTS((SELECT * FROM blah WHERE (b > ?))))",
values: [6]
}
'#301 - FROM rstr() with nesting':
beforeEach: ->
@inst = squel.select().from(squel.rstr("generate_series(?,?,?)",1,10,2), "tblfn(odds)")
toString: ->
assert.same @inst.toString(), """
SELECT * FROM generate_series(1,10,2) `tblfn(odds)`
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM generate_series(?,?,?) `tblfn(odds)`",
values:[1,10,2]
}
module?.exports[require('path').basename(__filename)] = test
| true | ###
Copyright (c) 2014 PI:NAME:<NAME>END_PI (hiddentao.com)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../dist/squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['SELECT builder'] =
beforeEach: ->
@func = squel.select
@inst = @func()
'instanceof QueryBuilder': ->
assert.instanceOf @inst, squel.cls.QueryBuilder
'constructor':
'override options': ->
@inst = squel.select
usingValuePlaceholders: true
dummy: true
expectedOptions = _.assign {}, squel.cls.DefaultQueryBuilderOptions,
usingValuePlaceholders: true
dummy: true
for block in @inst.blocks
assert.same _.pick(block.options, _.keys(expectedOptions)), expectedOptions
'override blocks': ->
block = new squel.cls.StringBlock('SELECT')
@inst = @func {}, [block]
assert.same [block], @inst.blocks
'build query':
'no need to call from() first': ->
@inst.toString()
'>> function(1)':
beforeEach: -> @inst.function('1')
toString: ->
assert.same @inst.toString(), 'SELECT 1'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT 1', values: [] }
'>> function(MAX(?,?), 3, 5)':
beforeEach: -> @inst.function('MAX(?, ?)', 3, 5)
toString: ->
assert.same @inst.toString(), 'SELECT MAX(3, 5)'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT MAX(?, ?)', values: [3, 5] }
'>> from(table).from(table2, alias2)':
beforeEach: -> @inst.from('table').from('table2', 'alias2')
toString: ->
assert.same @inst.toString(), 'SELECT * FROM table, table2 `alias2`'
'>> field(squel.select().field("MAX(score)").FROM("scores"), fa1)':
beforeEach: -> @inst.field(squel.select().field("MAX(score)").from("scores"), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT (SELECT MAX(score) FROM scores) AS "fa1" FROM table, table2 `alias2`'
'>> field(squel.case().when(score > ?, 1).then(1), fa1)':
beforeEach: -> @inst.field(squel.case().when("score > ?", 1).then(1), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT CASE WHEN (score > 1) THEN 1 ELSE NULL END AS "fa1" FROM table, table2 `alias2`'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT CASE WHEN (score > ?) THEN 1 ELSE NULL END AS "fa1" FROM table, table2 `alias2`', values: [1] }
'>> field( squel.str(SUM(?), squel.case().when(score > ?, 1).then(1) ), fa1)':
beforeEach: -> @inst.field( squel.str('SUM(?)', squel.case().when("score > ?", 1).then(1)), 'fa1')
toString: ->
assert.same @inst.toString(), 'SELECT (SUM((CASE WHEN (score > 1) THEN 1 ELSE NULL END))) AS "fa1" FROM table, table2 `alias2`'
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT (SUM(CASE WHEN (score > ?) THEN 1 ELSE NULL END)) AS "fa1" FROM table, table2 `alias2`', values: [1] }
'>> field(field1, fa1) >> field(field2)':
beforeEach: -> @inst.field('field1', 'fa1').field('field2')
toString: ->
assert.same @inst.toString(), 'SELECT field1 AS "fa1", field2 FROM table, table2 `alias2`'
'>> distinct()':
beforeEach: -> @inst.distinct()
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2`'
'>> group(field) >> group(field2)':
beforeEach: -> @inst.group('field').group('field2')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2'
'>> where(a = ?, squel.select().field("MAX(score)").from("scores"))':
beforeEach: ->
@subQuery = squel.select().field("MAX(score)").from("scores")
@inst.where('a = ?', @subQuery)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT MAX(score) FROM scores)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT MAX(score) FROM scores)) GROUP BY field, field2'
values: []
}
'>> where(squel.expr().and(a = ?, 1).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: -> @inst.where(squel.expr().and("a = ?", 1).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = 1 AND (b = 2 OR c = 3)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ? AND (b = ? OR c = ?)) GROUP BY field, field2'
values: [1, 2, 3]
}
'>> where(squel.expr().and(a = ?, QueryBuilder).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: ->
subQuery = squel.select().field('field1').from('table1').where('field2 = ?', 10)
@inst.where(squel.expr().and("a = ?", subQuery).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT field1 FROM table1 WHERE (field2 = 10)) AND (b = 2 OR c = 3)) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = (SELECT field1 FROM table1 WHERE (field2 = ?)) AND (b = ? OR c = ?)) GROUP BY field, field2'
values: [10, 2, 3]
}
'>> having(squel.expr().and(a = ?, QueryBuilder).and( expr().or(b = ?, 2).or(c = ?, 3) ))':
beforeEach: ->
subQuery = squel.select().field('field1').from('table1').having('field2 = ?', 10)
@inst.having(squel.expr().and("a = ?", subQuery).and(squel.expr().or("b = ?", 2).or("c = ?", 3)))
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2 HAVING (a = (SELECT field1 FROM table1 HAVING (field2 = 10)) AND (b = 2 OR c = 3))'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` GROUP BY field, field2 HAVING (a = (SELECT field1 FROM table1 HAVING (field2 = ?)) AND (b = ? OR c = ?))'
values: [10, 2, 3]
}
'>> where(a = ?, null)':
beforeEach: -> @inst.where('a = ?', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = NULL) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ?) GROUP BY field, field2'
values: [null]
}
'>> where(a = ?, 1)':
beforeEach: -> @inst.where('a = ?', 1)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = 1) GROUP BY field, field2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` WHERE (a = ?) GROUP BY field, field2'
values: [1]
}
'>> join(other_table)':
beforeEach: -> @inst.join('other_table')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2'
'>> order(a)':
beforeEach: -> @inst.order('a')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> order(a, null)':
beforeEach: -> @inst.order('a', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a'
'>> order(a, \'asc nulls last\')':
beforeEach: -> @inst.order('a', 'asc nulls last')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a asc nulls last'
'>> order(a, true)':
beforeEach: -> @inst.order('a', true)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> limit(2)':
beforeEach: -> @inst.limit(2)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ?',
values: [1, 2]
}
'>> limit(0)':
beforeEach: -> @inst.limit(0)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 0'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ?',
values: [1, 0]
}
'>> offset(3)':
beforeEach: -> @inst.offset(3)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2 OFFSET 3'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ? OFFSET ?',
values: [1, 2, 3]
}
'>> offset(0)':
beforeEach: -> @inst.offset(0)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC LIMIT 2 OFFSET 0'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY a ASC LIMIT ? OFFSET ?',
values: [1, 2, 0]
}
'>> order(DIST(?,?), true, 2, 3)':
beforeEach: -> @inst.order('DIST(?, ?)', true, 2, false)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY DIST(2, FALSE) ASC'
toParam: ->
assert.same @inst.toParam(), {
text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = ?) GROUP BY field, field2 ORDER BY DIST(?, ?) ASC'
values: [1, 2, false]
}
'>> order(a)':
beforeEach: -> @inst.order('a')
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY a ASC'
'>> order(b, null)':
beforeEach: -> @inst.order('b', null)
toString: ->
assert.same @inst.toString(), 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table WHERE (a = 1) GROUP BY field, field2 ORDER BY b'
'>> join(other_table, condition = expr())':
beforeEach: ->
subQuery = squel.select().field('abc').from('table1').where('adf = ?', 'today1')
subQuery2 = squel.select().field('xyz').from('table2').where('adf = ?', 'today2')
expr = squel.expr().and('field1 = ?', subQuery)
@inst.join('other_table', null, expr)
@inst.where('def IN ?', subQuery2)
toString: ->
assert.same @inst.toString(), "SELECT DISTINCT field1 AS \"fa1\", field2 FROM table, table2 `alias2` INNER JOIN other_table ON (field1 = (SELECT abc FROM table1 WHERE (adf = 'today1'))) WHERE (a = 1) AND (def IN (SELECT xyz FROM table2 WHERE (adf = 'today2'))) GROUP BY field, field2"
toParam: ->
assert.same @inst.toParam(), { text: 'SELECT DISTINCT field1 AS "fa1", field2 FROM table, table2 `alias2` INNER JOIN other_table ON (field1 = (SELECT abc FROM table1 WHERE (adf = ?))) WHERE (a = ?) AND (def IN (SELECT xyz FROM table2 WHERE (adf = ?))) GROUP BY field, field2', values: ["today1",1,"today2"] }
'nested queries':
'basic': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from('scores')
@inst.from(inner1).from(inner2, 'scores')
assert.same @inst.toString(), "SELECT * FROM (SELECT * FROM students), (SELECT * FROM scores) `scores`"
'deep nesting': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from(inner1)
@inst.from(inner2)
assert.same @inst.toString(), "SELECT * FROM (SELECT * FROM (SELECT * FROM students))"
'nesting in JOINs': ->
inner1 = squel.select().from('students')
inner2 = squel.select().from(inner1)
@inst.from('schools').join(inner2, 'meh', 'meh.ID = ID')
assert.same @inst.toString(), "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students)) `meh` ON (meh.ID = ID)"
'nesting in JOINs with params': ->
inner1 = squel.select().from('students').where('age = ?', 6)
inner2 = squel.select().from(inner1)
@inst.from('schools').where('school_type = ?', 'junior').join(inner2, 'meh', 'meh.ID = ID')
assert.same @inst.toString(), "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = 6))) `meh` ON (meh.ID = ID) WHERE (school_type = 'junior')"
assert.same @inst.toParam(), { "text": "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = ?))) `meh` ON (meh.ID = ID) WHERE (school_type = ?)", "values": [6,'junior'] }
assert.same @inst.toParam({ "numberedParameters": true}), { "text": "SELECT * FROM schools INNER JOIN (SELECT * FROM (SELECT * FROM students WHERE (age = $1))) `meh` ON (meh.ID = ID) WHERE (school_type = $2)", "values": [6,'junior'] }
'Complex table name, e.g. LATERAL (#230)':
beforeEach: ->
@inst = squel.select().from('foo').from(squel.str('LATERAL(?)', squel.select().from('bar').where('bar.id = ?', 2)), 'ss')
'toString': ->
assert.same @inst.toString(), 'SELECT * FROM foo, (LATERAL((SELECT * FROM bar WHERE (bar.id = 2)))) `ss`',
'toParam': ->
assert.same @inst.toParam(), {
text: 'SELECT * FROM foo, (LATERAL((SELECT * FROM bar WHERE (bar.id = ?)))) `ss`'
values: [2]
}
'cloning':
'basic': ->
newinst = @inst.from('students').limit(10).clone()
newinst.limit(20)
assert.same 'SELECT * FROM students LIMIT 10', @inst.toString()
assert.same 'SELECT * FROM students LIMIT 20', newinst.toString()
'with expressions (ticket #120)': ->
expr = squel.expr().and('a = 1')
newinst = @inst.from('table').left_join('table_2', 't', expr)
.clone()
.where('c = 1')
expr.and('b = 2')
assert.same 'SELECT * FROM table LEFT JOIN table_2 `t` ON (a = 1 AND b = 2)', @inst.toString()
assert.same 'SELECT * FROM table LEFT JOIN table_2 `t` ON (a = 1) WHERE (c = 1)', newinst.toString()
'with sub-queries (ticket #120)': ->
newinst = @inst.from(squel.select().from('students')).limit(30)
.clone()
.where('c = 1')
.limit(35)
assert.same 'SELECT * FROM (SELECT * FROM students) LIMIT 30', @inst.toString()
assert.same 'SELECT * FROM (SELECT * FROM students) WHERE (c = 1) LIMIT 35', newinst.toString()
'with complex expressions': ->
expr = squel.expr().and(
squel.expr().or('b = 2').or(
squel.expr().and('c = 3').and('d = 4')
)
).and('a = 1')
newinst = @inst.from('table').left_join('table_2', 't', expr)
.clone()
.where('c = 1')
expr.and('e = 5')
assert.same @inst.toString(), 'SELECT * FROM table LEFT JOIN table_2 `t` ON ((b = 2 OR (c = 3 AND d = 4)) AND a = 1 AND e = 5)'
assert.same newinst.toString(), 'SELECT * FROM table LEFT JOIN table_2 `t` ON ((b = 2 OR (c = 3 AND d = 4)) AND a = 1) WHERE (c = 1)'
'can specify block separator': ->
assert.same( squel.select({separator: '\n'})
.field('thing')
.from('table')
.toString(), """
SELECT
thing
FROM table
"""
)
'#242 - auto-quote table names':
beforeEach: ->
@inst = squel
.select({ autoQuoteTableNames: true })
.field('name')
.where('age > ?', 15)
'using string':
beforeEach: ->
@inst.from('students', 's')
toString: ->
assert.same @inst.toString(), """
SELECT name FROM `students` `s` WHERE (age > 15)
"""
toParam: ->
assert.same @inst.toParam(), {
"text": "SELECT name FROM `students` `s` WHERE (age > ?)"
"values": [15]
}
'using query builder':
beforeEach: ->
@inst.from(squel.select().from('students'), 's')
toString: ->
assert.same @inst.toString(), """
SELECT name FROM (SELECT * FROM students) `s` WHERE (age > 15)
"""
toParam: ->
assert.same @inst.toParam(), {
"text": "SELECT name FROM (SELECT * FROM students) `s` WHERE (age > ?)"
"values": [15]
}
'UNION JOINs':
'Two Queries NO Params':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > 15')
@qry2 = squel.select().field('name').from('students').where('age < 6')
@qry1.union(@qry2)
toString: ->
assert.same @qry1.toString(), """
SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))
"""
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))"
"values": [
]
}
'Two Queries with Params':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < ?', 6)
@qry1.union(@qry2)
toString: ->
assert.same @qry1.toString(), """
SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6))
"""
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > ?) UNION (SELECT name FROM students WHERE (age < ?))"
"values": [
15
6
]
}
'Three Queries':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < 6')
@qry3 = squel.select().field('name').from('students').where('age = ?', 8)
@qry1.union(@qry2)
@qry1.union(@qry3)
toParam: ->
assert.same @qry1.toParam(), {
"text": "SELECT name FROM students WHERE (age > ?) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = ?))"
"values": [
15
8
]
}
'toParam(2)': ->
assert.same @qry1.toParam({ "numberedParameters": true, "numberedParametersStartAt": 2}), {
"text": "SELECT name FROM students WHERE (age > $2) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = $3))"
"values": [
15
8
]
}
'Multi-Parameter Query':
beforeEach: ->
@qry1 = squel.select().field('name').from('students').where('age > ?', 15)
@qry2 = squel.select().field('name').from('students').where('age < ?', 6)
@qry3 = squel.select().field('name').from('students').where('age = ?', 8)
@qry4 = squel.select().field('name').from('students').where('age IN [?, ?]', 2, 10)
@qry1.union(@qry2)
@qry1.union(@qry3)
@qry4.union_all(@qry1)
toString: ->
assert.same @qry4.toString(), """
SELECT name FROM students WHERE (age IN [2, 10]) UNION ALL (SELECT name FROM students WHERE (age > 15) UNION (SELECT name FROM students WHERE (age < 6)) UNION (SELECT name FROM students WHERE (age = 8)))
"""
toParam: ->
assert.same @qry4.toParam({ "numberedParameters": true}), {
"text": "SELECT name FROM students WHERE (age IN [$1, $2]) UNION ALL (SELECT name FROM students WHERE (age > $3) UNION (SELECT name FROM students WHERE (age < $4)) UNION (SELECT name FROM students WHERE (age = $5)))"
"values": [
2
10
15
6
8
]
}
'Where builder expression':
beforeEach: ->
@inst = squel.select().from('table').where('a = ?', 5)
.where(squel.str('EXISTS(?)', squel.select().from('blah').where('b > ?', 6)))
toString: ->
assert.same @inst.toString(), """
SELECT * FROM table WHERE (a = 5) AND (EXISTS((SELECT * FROM blah WHERE (b > 6))))
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM table WHERE (a = ?) AND (EXISTS((SELECT * FROM blah WHERE (b > ?))))",
values: [5, 6]
}
'Join on builder expression':
beforeEach: ->
@inst = squel.select().from('table').join('table2', 't2',
squel.str('EXISTS(?)', squel.select().from('blah').where('b > ?', 6))
)
toString: ->
assert.same @inst.toString(), """
SELECT * FROM table INNER JOIN table2 `t2` ON (EXISTS((SELECT * FROM blah WHERE (b > 6))))
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM table INNER JOIN table2 `t2` ON (EXISTS((SELECT * FROM blah WHERE (b > ?))))",
values: [6]
}
'#301 - FROM rstr() with nesting':
beforeEach: ->
@inst = squel.select().from(squel.rstr("generate_series(?,?,?)",1,10,2), "tblfn(odds)")
toString: ->
assert.same @inst.toString(), """
SELECT * FROM generate_series(1,10,2) `tblfn(odds)`
"""
toParam: ->
assert.same @inst.toParam(), {
text: "SELECT * FROM generate_series(?,?,?) `tblfn(odds)`",
values:[1,10,2]
}
module?.exports[require('path').basename(__filename)] = test
|
[
{
"context": " edition view\n#\n# Nodize CMS\n# https://github.com/hypee/nodize\n#\n# Copyright 2012-2013, Hypee\n# http://hy",
"end": 71,
"score": 0.9994576573371887,
"start": 66,
"tag": "USERNAME",
"value": "hypee"
},
{
"context": "//github.com/hypee/nodize\n#\n# Copyright 2012-201... | modules/backend/inline_views/view_theme.coffee | nodize/nodizecms | 32 | # Themes / views edition view
#
# Nodize CMS
# https://github.com/hypee/nodize
#
# Copyright 2012-2013, Hypee
# http://hypee.com
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
@include = ->
#
# Displaying lang settings page
#
@view backend_themes: ->
html ->
div '#sidecolumn.close', ->
div '.info', ->
dl '.small.compact', ->
dt ->
label @ion_lang.ionize_label_current_theme
dd __nodizeSettings.get("theme")
div '#options', ->
# Themes
h3 '.toggler', -> @ion_lang.ionize_title_themes
div '.element', ->
form '#themesForm', name: 'themesForm', method: 'post', action: '/admin/setting/save_themes', ->
# 'Theme'
dl '.small', ->
dt ->
label for: 'theme', -> @ion_lang.ionize_label_theme
dd ->
select '#theme', name: 'theme', ->
for theme in @themes
option value: theme, selected: 'selected', theme
# 'Submit button'
dl '.small', ->
dt ' '
dd ->
input '#themesFormSubmit.submit', type: 'submit', value: @ion_lang.ionize_button_save_themes
br()
# '/element'
# '/togglers'
# '/sidecolumn'
# 'Main Column'
div '#maincolumn', ->
h2 '#main-title.main.themes', @ion_lang.ionize_title_themes
# 'Views list'
h3 '.mt20', @ion_lang.ionize_title_views_list + ' : ' + __nodizeSettings.get("theme")
# '<div class="element">'
form '#viewsForm', name: 'viewsForm', method: 'post', action: 'save_views', ->
div '#viewsTableContainer', ->
# 'Views table list'
table '#viewsTable.list', ->
thead ->
tr ->
#th axis: 'string', style: 'width:20px;'
th axis: 'string', -> @ion_lang.ionize_label_view_filename
#th axis: 'string', -> @ion_lang.ionize_label_view_folder
th axis: 'string', -> "Default"
th @ion_lang.ionize_label_view_name
th @ion_lang.ionize_label_view_type
tbody ->
for file in @files.sort()
do (file) =>
#
# Extract filename without path
#
filename = file.split('/').pop();
#
# Define variables used in the loop
#
logical_name = ''
view_type = ''
#
# Search current file in views definition to guess its type
#
if @views["pages"][filename]
logical_name = @views["pages"][filename]
view_type = 'page'
if @views["blocks"][filename]
logical_name = @views["blocks"][filename]
view_type = 'block'
tr ->
#
# File edition with CodeMirror
#
#td ->
# a class: 'icon edit viewEdit', rel: filename
td ->
a class: 'viewEdit', rel: 'page_home', -> filename
td ->
if view_type is 'page'
checked = if @views["page_default"] is filename then "checked" else ""
input type:'radio', id: 'page_default', name: 'page_default', value: filename, checked: checked
if view_type is 'block'
checked = if @views["block_default"] is filename then "checked" else ""
input type:'radio', id: 'block_default', name: 'block_default', value: filename, checked: checked
td ->
input '.inputtext.w160', type: 'text', name: 'viewdefinition_'+filename, value: logical_name
td ->
select '.select', name: 'viewtype_'+filename, ->
option selected: ('selected' if view_type is 'page'), class: 'type_page', value: 'page', 'Page'
option selected: ('selected' if view_type is 'block'), value: 'block', class: 'type_block', 'Block'
option selected: ('selected' if view_type is ''), value: '', '-- No type --'
br()
br()
# '</div>'
# '/maincolumn'
#
# COFFEESCRIPT SECTION / CLIENT SIDE
#
coffeescript ->
j$ = jQuery.noConflict()
# Using ui.select jQuery widget for selects
# Disabled, seems to bug item type
# j$('document').ready ->
# j$('select').selectmenu
# width : 140
# style : 'dropdown'
# icons: [
# {find: '.type_page', icon: 'ui-icon-document'},
# {find: '.type_article', icon: 'ui-icon-script'},
# {find: '.type_block', icon: 'ui-icon-document-b'}
# ]
#
# Panel toolbox
#
ION.initToolbox "setting_theme_toolbox"
#
# Options Accordion
#
ION.initAccordion ".toggler", "div.element"
#
# Adds Sortable function to the user list table
#
new SortableTable("viewsTable",
sortOn: 1
sortBy: "ASC"
)
#
# Views Edit links
#
$$(".viewEdit").each (item) ->
rel = item.getProperty("rel")
id = rel.replace(/\//g, "")
form = "formView" + id
item.addEvent "click", (e) ->
e.stop()
self = this
@resizeCodeMirror = (w) ->
contentEl = w.el.contentWrapper
mfw = contentEl.getElement(".CodeMirror-wrapping")
mfw.setStyle "height", contentEl.getSize().y - 70
wOptions =
id: "w" + id
title: Lang.get("ionize_title_view_edit") + " : " + rel
content:
url: admin_url + "setting/edit_view/" + rel
method: "post"
onLoaded: (element, content) ->
# CodeMirror settings
c = $("editview_" + id).value
mirrorFrame = new ViewCodeMirror(CodeMirror.replace($("editview_" + id)),
height: "360px"
width: "95%"
content: c
tabMode: "shift"
parserfile: [ "parsexml.js", "parsecss.js", "tokenizejavascript.js", "parsejavascript.js", "parsehtmlmixed.js", "tokenizephp.js", "parsephp.js", "parsephphtmlmixed.js" ]
stylesheet: [ "http://192.168.1.162/themes/admin/javascript/codemirror/css/basic.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/xmlcolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/jscolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/csscolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/phpcolors.css" ]
path: "http://192.168.1.162/themes/admin/javascript/codemirror/js/"
lineNumbers: true
)
# Set height of CodeMirror
self.resizeCodeMirror this
form = "formView" + id
# Get the form action URL and adds 'true' so the transport is set to XHR
formUrl = $(form).getProperty("action")
# Add the cancel event if cancel button exists
if bCancel = $("bCancel" + id)
bCancel.addEvent "click", (e) ->
e.stop()
ION.closeWindow $("w" + id)
# Event on save button
if bSave = $("bSave" + id)
bSave.addEvent "click", (e) ->
e.stop()
# Get the CodeMirror Code
$("contentview_" + id).value = mirrorFrame.mirror.getCode()
# Get the form
options = ION.getFormObject(formUrl, $(form))
r = new Request.JSON(options)
r.send()
y: 80
width: 800
height: 450
padding:
top: 12
right: 12
bottom: 10
left: 12
maximizable: true
contentBgColor: "#fff"
onResize: (w) ->
self.resizeCodeMirror w
onMaximize: (w) ->
self.resizeCodeMirror w
onRestore: (w) ->
self.resizeCodeMirror w
# Window creation
new MUI.Window(wOptions)
#
# Database form action
# see ionize-form.js for more information about this method
#
ION.setFormSubmit "themesForm", "themesFormSubmit", "setting\/\/save_themes"
| 8887 | # Themes / views edition view
#
# Nodize CMS
# https://github.com/hypee/nodize
#
# Copyright 2012-2013, <NAME>
# http://hypee.com
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
@include = ->
#
# Displaying lang settings page
#
@view backend_themes: ->
html ->
div '#sidecolumn.close', ->
div '.info', ->
dl '.small.compact', ->
dt ->
label @ion_lang.ionize_label_current_theme
dd __nodizeSettings.get("theme")
div '#options', ->
# Themes
h3 '.toggler', -> @ion_lang.ionize_title_themes
div '.element', ->
form '#themesForm', name: 'themesForm', method: 'post', action: '/admin/setting/save_themes', ->
# 'Theme'
dl '.small', ->
dt ->
label for: 'theme', -> @ion_lang.ionize_label_theme
dd ->
select '#theme', name: 'theme', ->
for theme in @themes
option value: theme, selected: 'selected', theme
# 'Submit button'
dl '.small', ->
dt ' '
dd ->
input '#themesFormSubmit.submit', type: 'submit', value: @ion_lang.ionize_button_save_themes
br()
# '/element'
# '/togglers'
# '/sidecolumn'
# 'Main Column'
div '#maincolumn', ->
h2 '#main-title.main.themes', @ion_lang.ionize_title_themes
# 'Views list'
h3 '.mt20', @ion_lang.ionize_title_views_list + ' : ' + __nodizeSettings.get("theme")
# '<div class="element">'
form '#viewsForm', name: 'viewsForm', method: 'post', action: 'save_views', ->
div '#viewsTableContainer', ->
# 'Views table list'
table '#viewsTable.list', ->
thead ->
tr ->
#th axis: 'string', style: 'width:20px;'
th axis: 'string', -> @ion_lang.ionize_label_view_filename
#th axis: 'string', -> @ion_lang.ionize_label_view_folder
th axis: 'string', -> "Default"
th @ion_lang.ionize_label_view_name
th @ion_lang.ionize_label_view_type
tbody ->
for file in @files.sort()
do (file) =>
#
# Extract filename without path
#
filename = file.split('/').pop();
#
# Define variables used in the loop
#
logical_name = ''
view_type = ''
#
# Search current file in views definition to guess its type
#
if @views["pages"][filename]
logical_name = @views["pages"][filename]
view_type = 'page'
if @views["blocks"][filename]
logical_name = @views["blocks"][filename]
view_type = 'block'
tr ->
#
# File edition with CodeMirror
#
#td ->
# a class: 'icon edit viewEdit', rel: filename
td ->
a class: 'viewEdit', rel: 'page_home', -> filename
td ->
if view_type is 'page'
checked = if @views["page_default"] is filename then "checked" else ""
input type:'radio', id: 'page_default', name: 'page_default', value: filename, checked: checked
if view_type is 'block'
checked = if @views["block_default"] is filename then "checked" else ""
input type:'radio', id: 'block_default', name: 'block_default', value: filename, checked: checked
td ->
input '.inputtext.w160', type: 'text', name: 'viewdefinition_'+filename, value: logical_name
td ->
select '.select', name: 'viewtype_'+filename, ->
option selected: ('selected' if view_type is 'page'), class: 'type_page', value: 'page', 'Page'
option selected: ('selected' if view_type is 'block'), value: 'block', class: 'type_block', 'Block'
option selected: ('selected' if view_type is ''), value: '', '-- No type --'
br()
br()
# '</div>'
# '/maincolumn'
#
# COFFEESCRIPT SECTION / CLIENT SIDE
#
coffeescript ->
j$ = jQuery.noConflict()
# Using ui.select jQuery widget for selects
# Disabled, seems to bug item type
# j$('document').ready ->
# j$('select').selectmenu
# width : 140
# style : 'dropdown'
# icons: [
# {find: '.type_page', icon: 'ui-icon-document'},
# {find: '.type_article', icon: 'ui-icon-script'},
# {find: '.type_block', icon: 'ui-icon-document-b'}
# ]
#
# Panel toolbox
#
ION.initToolbox "setting_theme_toolbox"
#
# Options Accordion
#
ION.initAccordion ".toggler", "div.element"
#
# Adds Sortable function to the user list table
#
new SortableTable("viewsTable",
sortOn: 1
sortBy: "ASC"
)
#
# Views Edit links
#
$$(".viewEdit").each (item) ->
rel = item.getProperty("rel")
id = rel.replace(/\//g, "")
form = "formView" + id
item.addEvent "click", (e) ->
e.stop()
self = this
@resizeCodeMirror = (w) ->
contentEl = w.el.contentWrapper
mfw = contentEl.getElement(".CodeMirror-wrapping")
mfw.setStyle "height", contentEl.getSize().y - 70
wOptions =
id: "w" + id
title: Lang.get("ionize_title_view_edit") + " : " + rel
content:
url: admin_url + "setting/edit_view/" + rel
method: "post"
onLoaded: (element, content) ->
# CodeMirror settings
c = $("editview_" + id).value
mirrorFrame = new ViewCodeMirror(CodeMirror.replace($("editview_" + id)),
height: "360px"
width: "95%"
content: c
tabMode: "shift"
parserfile: [ "parsexml.js", "parsecss.js", "tokenizejavascript.js", "parsejavascript.js", "parsehtmlmixed.js", "tokenizephp.js", "parsephp.js", "parsephphtmlmixed.js" ]
stylesheet: [ "http://192.168.1.162/themes/admin/javascript/codemirror/css/basic.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/xmlcolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/jscolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/csscolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/phpcolors.css" ]
path: "http://192.168.1.162/themes/admin/javascript/codemirror/js/"
lineNumbers: true
)
# Set height of CodeMirror
self.resizeCodeMirror this
form = "formView" + id
# Get the form action URL and adds 'true' so the transport is set to XHR
formUrl = $(form).getProperty("action")
# Add the cancel event if cancel button exists
if bCancel = $("bCancel" + id)
bCancel.addEvent "click", (e) ->
e.stop()
ION.closeWindow $("w" + id)
# Event on save button
if bSave = $("bSave" + id)
bSave.addEvent "click", (e) ->
e.stop()
# Get the CodeMirror Code
$("contentview_" + id).value = mirrorFrame.mirror.getCode()
# Get the form
options = ION.getFormObject(formUrl, $(form))
r = new Request.JSON(options)
r.send()
y: 80
width: 800
height: 450
padding:
top: 12
right: 12
bottom: 10
left: 12
maximizable: true
contentBgColor: "#fff"
onResize: (w) ->
self.resizeCodeMirror w
onMaximize: (w) ->
self.resizeCodeMirror w
onRestore: (w) ->
self.resizeCodeMirror w
# Window creation
new MUI.Window(wOptions)
#
# Database form action
# see ionize-form.js for more information about this method
#
ION.setFormSubmit "themesForm", "themesFormSubmit", "setting\/\/save_themes"
| true | # Themes / views edition view
#
# Nodize CMS
# https://github.com/hypee/nodize
#
# Copyright 2012-2013, PI:NAME:<NAME>END_PI
# http://hypee.com
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
@include = ->
#
# Displaying lang settings page
#
@view backend_themes: ->
html ->
div '#sidecolumn.close', ->
div '.info', ->
dl '.small.compact', ->
dt ->
label @ion_lang.ionize_label_current_theme
dd __nodizeSettings.get("theme")
div '#options', ->
# Themes
h3 '.toggler', -> @ion_lang.ionize_title_themes
div '.element', ->
form '#themesForm', name: 'themesForm', method: 'post', action: '/admin/setting/save_themes', ->
# 'Theme'
dl '.small', ->
dt ->
label for: 'theme', -> @ion_lang.ionize_label_theme
dd ->
select '#theme', name: 'theme', ->
for theme in @themes
option value: theme, selected: 'selected', theme
# 'Submit button'
dl '.small', ->
dt ' '
dd ->
input '#themesFormSubmit.submit', type: 'submit', value: @ion_lang.ionize_button_save_themes
br()
# '/element'
# '/togglers'
# '/sidecolumn'
# 'Main Column'
div '#maincolumn', ->
h2 '#main-title.main.themes', @ion_lang.ionize_title_themes
# 'Views list'
h3 '.mt20', @ion_lang.ionize_title_views_list + ' : ' + __nodizeSettings.get("theme")
# '<div class="element">'
form '#viewsForm', name: 'viewsForm', method: 'post', action: 'save_views', ->
div '#viewsTableContainer', ->
# 'Views table list'
table '#viewsTable.list', ->
thead ->
tr ->
#th axis: 'string', style: 'width:20px;'
th axis: 'string', -> @ion_lang.ionize_label_view_filename
#th axis: 'string', -> @ion_lang.ionize_label_view_folder
th axis: 'string', -> "Default"
th @ion_lang.ionize_label_view_name
th @ion_lang.ionize_label_view_type
tbody ->
for file in @files.sort()
do (file) =>
#
# Extract filename without path
#
filename = file.split('/').pop();
#
# Define variables used in the loop
#
logical_name = ''
view_type = ''
#
# Search current file in views definition to guess its type
#
if @views["pages"][filename]
logical_name = @views["pages"][filename]
view_type = 'page'
if @views["blocks"][filename]
logical_name = @views["blocks"][filename]
view_type = 'block'
tr ->
#
# File edition with CodeMirror
#
#td ->
# a class: 'icon edit viewEdit', rel: filename
td ->
a class: 'viewEdit', rel: 'page_home', -> filename
td ->
if view_type is 'page'
checked = if @views["page_default"] is filename then "checked" else ""
input type:'radio', id: 'page_default', name: 'page_default', value: filename, checked: checked
if view_type is 'block'
checked = if @views["block_default"] is filename then "checked" else ""
input type:'radio', id: 'block_default', name: 'block_default', value: filename, checked: checked
td ->
input '.inputtext.w160', type: 'text', name: 'viewdefinition_'+filename, value: logical_name
td ->
select '.select', name: 'viewtype_'+filename, ->
option selected: ('selected' if view_type is 'page'), class: 'type_page', value: 'page', 'Page'
option selected: ('selected' if view_type is 'block'), value: 'block', class: 'type_block', 'Block'
option selected: ('selected' if view_type is ''), value: '', '-- No type --'
br()
br()
# '</div>'
# '/maincolumn'
#
# COFFEESCRIPT SECTION / CLIENT SIDE
#
coffeescript ->
j$ = jQuery.noConflict()
# Using ui.select jQuery widget for selects
# Disabled, seems to bug item type
# j$('document').ready ->
# j$('select').selectmenu
# width : 140
# style : 'dropdown'
# icons: [
# {find: '.type_page', icon: 'ui-icon-document'},
# {find: '.type_article', icon: 'ui-icon-script'},
# {find: '.type_block', icon: 'ui-icon-document-b'}
# ]
#
# Panel toolbox
#
ION.initToolbox "setting_theme_toolbox"
#
# Options Accordion
#
ION.initAccordion ".toggler", "div.element"
#
# Adds Sortable function to the user list table
#
new SortableTable("viewsTable",
sortOn: 1
sortBy: "ASC"
)
#
# Views Edit links
#
$$(".viewEdit").each (item) ->
rel = item.getProperty("rel")
id = rel.replace(/\//g, "")
form = "formView" + id
item.addEvent "click", (e) ->
e.stop()
self = this
@resizeCodeMirror = (w) ->
contentEl = w.el.contentWrapper
mfw = contentEl.getElement(".CodeMirror-wrapping")
mfw.setStyle "height", contentEl.getSize().y - 70
wOptions =
id: "w" + id
title: Lang.get("ionize_title_view_edit") + " : " + rel
content:
url: admin_url + "setting/edit_view/" + rel
method: "post"
onLoaded: (element, content) ->
# CodeMirror settings
c = $("editview_" + id).value
mirrorFrame = new ViewCodeMirror(CodeMirror.replace($("editview_" + id)),
height: "360px"
width: "95%"
content: c
tabMode: "shift"
parserfile: [ "parsexml.js", "parsecss.js", "tokenizejavascript.js", "parsejavascript.js", "parsehtmlmixed.js", "tokenizephp.js", "parsephp.js", "parsephphtmlmixed.js" ]
stylesheet: [ "http://192.168.1.162/themes/admin/javascript/codemirror/css/basic.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/xmlcolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/jscolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/csscolors.css", "http://192.168.1.162/themes/admin/javascript/codemirror/css/phpcolors.css" ]
path: "http://192.168.1.162/themes/admin/javascript/codemirror/js/"
lineNumbers: true
)
# Set height of CodeMirror
self.resizeCodeMirror this
form = "formView" + id
# Get the form action URL and adds 'true' so the transport is set to XHR
formUrl = $(form).getProperty("action")
# Add the cancel event if cancel button exists
if bCancel = $("bCancel" + id)
bCancel.addEvent "click", (e) ->
e.stop()
ION.closeWindow $("w" + id)
# Event on save button
if bSave = $("bSave" + id)
bSave.addEvent "click", (e) ->
e.stop()
# Get the CodeMirror Code
$("contentview_" + id).value = mirrorFrame.mirror.getCode()
# Get the form
options = ION.getFormObject(formUrl, $(form))
r = new Request.JSON(options)
r.send()
y: 80
width: 800
height: 450
padding:
top: 12
right: 12
bottom: 10
left: 12
maximizable: true
contentBgColor: "#fff"
onResize: (w) ->
self.resizeCodeMirror w
onMaximize: (w) ->
self.resizeCodeMirror w
onRestore: (w) ->
self.resizeCodeMirror w
# Window creation
new MUI.Window(wOptions)
#
# Database form action
# see ionize-form.js for more information about this method
#
ION.setFormSubmit "themesForm", "themesFormSubmit", "setting\/\/save_themes"
|
[
{
"context": "###\n# grunt/clean.coffee\n#\n# © 2014 Dan Nichols\n# See LICENSE for more details\n#\n# Define our cle",
"end": 47,
"score": 0.9996957182884216,
"start": 36,
"tag": "NAME",
"value": "Dan Nichols"
}
] | grunt/clean.coffee | dlnichols/h_media | 0 | ###
# grunt/clean.coffee
#
# © 2014 Dan Nichols
# See LICENSE for more details
#
# Define our clean configuration block for grunt
###
'use strict'
module.exports =
build: [
'.tmp/'
'dist/'
]
serve: [
'.tmp/'
]
| 61865 | ###
# grunt/clean.coffee
#
# © 2014 <NAME>
# See LICENSE for more details
#
# Define our clean configuration block for grunt
###
'use strict'
module.exports =
build: [
'.tmp/'
'dist/'
]
serve: [
'.tmp/'
]
| true | ###
# grunt/clean.coffee
#
# © 2014 PI:NAME:<NAME>END_PI
# See LICENSE for more details
#
# Define our clean configuration block for grunt
###
'use strict'
module.exports =
build: [
'.tmp/'
'dist/'
]
serve: [
'.tmp/'
]
|
[
{
"context": "request =\n partnership:\n name: \"#{profile.firstName} #{profile.lastName}\"\n ",
"end": 383,
"score": 0.7731042504310608,
"start": 383,
"tag": "NAME",
"value": ""
}
] | src/Vifeed/FrontendBundle/Resources/assets/js/partnership/resources/partnerships.coffee | bzis/zomba | 0 | angular.module('resources.partnerships', ['zmbk.config']).factory 'Partnerships', [
'$http', 'APP.CONFIG', ($http, config) ->
'use strict'
new class Partnerships
resourceUrl = "#{config.apiPath}/partnership"
# Creates partnership request
# Full link: /api/partnership
create: (profile) ->
request =
partnership:
name: "#{profile.firstName} #{profile.lastName}"
email: profile.email
phone: profile.phone
$http.put resourceUrl, request
]
| 209363 | angular.module('resources.partnerships', ['zmbk.config']).factory 'Partnerships', [
'$http', 'APP.CONFIG', ($http, config) ->
'use strict'
new class Partnerships
resourceUrl = "#{config.apiPath}/partnership"
# Creates partnership request
# Full link: /api/partnership
create: (profile) ->
request =
partnership:
name:<NAME> "#{profile.firstName} #{profile.lastName}"
email: profile.email
phone: profile.phone
$http.put resourceUrl, request
]
| true | angular.module('resources.partnerships', ['zmbk.config']).factory 'Partnerships', [
'$http', 'APP.CONFIG', ($http, config) ->
'use strict'
new class Partnerships
resourceUrl = "#{config.apiPath}/partnership"
# Creates partnership request
# Full link: /api/partnership
create: (profile) ->
request =
partnership:
name:PI:NAME:<NAME>END_PI "#{profile.firstName} #{profile.lastName}"
email: profile.email
phone: profile.phone
$http.put resourceUrl, request
]
|
[
{
"context": "ntWidget\n\n setting = {}\n setting.key = \"time-slider\"\n\n PERIODS = ['d','w','m','q','y','f']\n\n ",
"end": 502,
"score": 0.8523778319358826,
"start": 491,
"tag": "KEY",
"value": "time-slider"
}
] | src/components/widgets-settings/time-slider/time-slider.directive.coffee | agranado2k/impac-angular | 7 | module = angular.module('impac.components.widgets-settings.time-slider',[])
module.directive('settingTimeSlider', ($templateCache, $timeout, ImpacMainSvc, ImpacUtilities, ImpacTheming) ->
return {
restrict: 'A',
scope: {
parentWidget: '='
deferred: '='
timeRange: '=?'
onUse: '&?'
},
template: $templateCache.get('widgets-settings/time-slider.tmpl.html'),
link: (scope) ->
w = scope.parentWidget
setting = {}
setting.key = "time-slider"
PERIODS = ['d','w','m','q','y','f']
setting.initialize = ->
# Make sure scope.timeRange has been propagated
$timeout ->
initNumberOfPeriods()
initPeriod()
initFinancialYearEndMonth()
return true
setting.toMetadata = ->
histParams =
to: scope.toDate().format('YYYY-MM-DD')
time_range: getTimeRange()
histParams.from = scope.fromDate().format('YYYY-MM-DD') if getPeriod() == 'f'
return { hist_parameters: histParams }
initNumberOfPeriods = ->
tr = scope.timeRange
scope.numberOfPeriods = moment().month()
return scope.numberOfPeriods unless tr?
nPattern = /^-?([0-9]{1,2})[a-z]?$/
n = nPattern.exec(tr)
scope.numberOfPeriods = parseInt(n[1]) if (n? && n[1] && parseInt(n[1]))
return scope.numberOfPeriods
initPeriod = ->
tr = scope.timeRange
scope.period = "m"
return "m" unless tr?
pPattern = /^-?[0-9]{0,2}([a-z])$/
p = pPattern.exec(tr)
period = _.find(PERIODS, (authPeriod) -> (p? && (p[1] == authPeriod)) )
scope.period = period if period?
return scope.period
initFinancialYearEndMonth = ->
scope.financialYearEndMonth = 6
ImpacMainSvc.load().then( (config) ->
if config? && config.currentOrganization? && parseInt(config.currentOrganization.financial_year_end_month)
scope.financialYearEndMonth = parseInt(config.currentOrganization.financial_year_end_month)
)
getPeriod = ->
if scope.period?
return scope.period
else
return initPeriod()
getPeriodWord = ->
return ImpacUtilities.getPeriodWord(getPeriod())
getNumberOfPeriods = ->
if scope.numberOfPeriods?
return scope.numberOfPeriods
else
return initNumberOfPeriods()
getTimeRange = ->
n = getNumberOfPeriods()
p = getPeriod()
return "-#{n}#{p}"
scope.formatPeriod = ->
return ImpacUtilities.formatPeriod(getNumberOfPeriods(), getPeriod())
scope.fromDate = ->
n = getNumberOfPeriods()
word = getPeriodWord()
if word.slice(0,1) == "f"
financialYearStartYear = moment().year() - 1
financialYearStartYear = moment().year() if moment().month() >= 6
financialYearStartYear = financialYearStartYear - n
return moment("#{financialYearStartYear}-#{scope.financialYearEndMonth + 1}-01", "YYYY-M-DD")
else if word.slice(0,1) == "w"
return moment().subtract(n, word).startOf('isoweek')
else
return moment().subtract(n, word).startOf(word)
scope.toDate = ->
return moment()
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
scope.deferred.resolve(setting)
}
)
| 86315 | module = angular.module('impac.components.widgets-settings.time-slider',[])
module.directive('settingTimeSlider', ($templateCache, $timeout, ImpacMainSvc, ImpacUtilities, ImpacTheming) ->
return {
restrict: 'A',
scope: {
parentWidget: '='
deferred: '='
timeRange: '=?'
onUse: '&?'
},
template: $templateCache.get('widgets-settings/time-slider.tmpl.html'),
link: (scope) ->
w = scope.parentWidget
setting = {}
setting.key = "<KEY>"
PERIODS = ['d','w','m','q','y','f']
setting.initialize = ->
# Make sure scope.timeRange has been propagated
$timeout ->
initNumberOfPeriods()
initPeriod()
initFinancialYearEndMonth()
return true
setting.toMetadata = ->
histParams =
to: scope.toDate().format('YYYY-MM-DD')
time_range: getTimeRange()
histParams.from = scope.fromDate().format('YYYY-MM-DD') if getPeriod() == 'f'
return { hist_parameters: histParams }
initNumberOfPeriods = ->
tr = scope.timeRange
scope.numberOfPeriods = moment().month()
return scope.numberOfPeriods unless tr?
nPattern = /^-?([0-9]{1,2})[a-z]?$/
n = nPattern.exec(tr)
scope.numberOfPeriods = parseInt(n[1]) if (n? && n[1] && parseInt(n[1]))
return scope.numberOfPeriods
initPeriod = ->
tr = scope.timeRange
scope.period = "m"
return "m" unless tr?
pPattern = /^-?[0-9]{0,2}([a-z])$/
p = pPattern.exec(tr)
period = _.find(PERIODS, (authPeriod) -> (p? && (p[1] == authPeriod)) )
scope.period = period if period?
return scope.period
initFinancialYearEndMonth = ->
scope.financialYearEndMonth = 6
ImpacMainSvc.load().then( (config) ->
if config? && config.currentOrganization? && parseInt(config.currentOrganization.financial_year_end_month)
scope.financialYearEndMonth = parseInt(config.currentOrganization.financial_year_end_month)
)
getPeriod = ->
if scope.period?
return scope.period
else
return initPeriod()
getPeriodWord = ->
return ImpacUtilities.getPeriodWord(getPeriod())
getNumberOfPeriods = ->
if scope.numberOfPeriods?
return scope.numberOfPeriods
else
return initNumberOfPeriods()
getTimeRange = ->
n = getNumberOfPeriods()
p = getPeriod()
return "-#{n}#{p}"
scope.formatPeriod = ->
return ImpacUtilities.formatPeriod(getNumberOfPeriods(), getPeriod())
scope.fromDate = ->
n = getNumberOfPeriods()
word = getPeriodWord()
if word.slice(0,1) == "f"
financialYearStartYear = moment().year() - 1
financialYearStartYear = moment().year() if moment().month() >= 6
financialYearStartYear = financialYearStartYear - n
return moment("#{financialYearStartYear}-#{scope.financialYearEndMonth + 1}-01", "YYYY-M-DD")
else if word.slice(0,1) == "w"
return moment().subtract(n, word).startOf('isoweek')
else
return moment().subtract(n, word).startOf(word)
scope.toDate = ->
return moment()
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
scope.deferred.resolve(setting)
}
)
| true | module = angular.module('impac.components.widgets-settings.time-slider',[])
module.directive('settingTimeSlider', ($templateCache, $timeout, ImpacMainSvc, ImpacUtilities, ImpacTheming) ->
return {
restrict: 'A',
scope: {
parentWidget: '='
deferred: '='
timeRange: '=?'
onUse: '&?'
},
template: $templateCache.get('widgets-settings/time-slider.tmpl.html'),
link: (scope) ->
w = scope.parentWidget
setting = {}
setting.key = "PI:KEY:<KEY>END_PI"
PERIODS = ['d','w','m','q','y','f']
setting.initialize = ->
# Make sure scope.timeRange has been propagated
$timeout ->
initNumberOfPeriods()
initPeriod()
initFinancialYearEndMonth()
return true
setting.toMetadata = ->
histParams =
to: scope.toDate().format('YYYY-MM-DD')
time_range: getTimeRange()
histParams.from = scope.fromDate().format('YYYY-MM-DD') if getPeriod() == 'f'
return { hist_parameters: histParams }
initNumberOfPeriods = ->
tr = scope.timeRange
scope.numberOfPeriods = moment().month()
return scope.numberOfPeriods unless tr?
nPattern = /^-?([0-9]{1,2})[a-z]?$/
n = nPattern.exec(tr)
scope.numberOfPeriods = parseInt(n[1]) if (n? && n[1] && parseInt(n[1]))
return scope.numberOfPeriods
initPeriod = ->
tr = scope.timeRange
scope.period = "m"
return "m" unless tr?
pPattern = /^-?[0-9]{0,2}([a-z])$/
p = pPattern.exec(tr)
period = _.find(PERIODS, (authPeriod) -> (p? && (p[1] == authPeriod)) )
scope.period = period if period?
return scope.period
initFinancialYearEndMonth = ->
scope.financialYearEndMonth = 6
ImpacMainSvc.load().then( (config) ->
if config? && config.currentOrganization? && parseInt(config.currentOrganization.financial_year_end_month)
scope.financialYearEndMonth = parseInt(config.currentOrganization.financial_year_end_month)
)
getPeriod = ->
if scope.period?
return scope.period
else
return initPeriod()
getPeriodWord = ->
return ImpacUtilities.getPeriodWord(getPeriod())
getNumberOfPeriods = ->
if scope.numberOfPeriods?
return scope.numberOfPeriods
else
return initNumberOfPeriods()
getTimeRange = ->
n = getNumberOfPeriods()
p = getPeriod()
return "-#{n}#{p}"
scope.formatPeriod = ->
return ImpacUtilities.formatPeriod(getNumberOfPeriods(), getPeriod())
scope.fromDate = ->
n = getNumberOfPeriods()
word = getPeriodWord()
if word.slice(0,1) == "f"
financialYearStartYear = moment().year() - 1
financialYearStartYear = moment().year() if moment().month() >= 6
financialYearStartYear = financialYearStartYear - n
return moment("#{financialYearStartYear}-#{scope.financialYearEndMonth + 1}-01", "YYYY-M-DD")
else if word.slice(0,1) == "w"
return moment().subtract(n, word).startOf('isoweek')
else
return moment().subtract(n, word).startOf(word)
scope.toDate = ->
return moment()
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
scope.deferred.resolve(setting)
}
)
|
[
{
"context": "ue\n default: 5\n }\n {\n field: 'filler'\n startPos: 9\n endPos: 17\n ",
"end": 453,
"score": 0.5222318768501282,
"start": 449,
"tag": "NAME",
"value": "fill"
},
{
"context": " type: 'numeric'\n }\n {\n field: '... | coffee/layout/HSBC/Pagamento/LoteTrailing.coffee | s2way/cnab240-nodejs | 10 | module.exports = [
{
field: 'banco'
startPos: 1
endPos: 3
length: 3
required: true
default: 399
}
{
field: 'lote'
startPos: 4
endPos: 7
length: 4
required: true
default: '0000'
}
{
field: 'registro'
startPos: 8
endPos: 8
length: 1
required: true
default: 5
}
{
field: 'filler'
startPos: 9
endPos: 17
length: 9
required: false
default: new Array(9).fill(' ').join('')
}
{
field: 'qtde_registros'
startPos: 18
endPos: 23
length: 6
required: true
type: 'numeric'
}
{
field: 'filler'
startPos: 24
endPos: 26
length: 3
required: false
default: new Array(3).fill(' ').join('')
}
{
field: 'valor_credito'
startPos: 27
endPos: 41
length: 15
required: true
type: 'numeric'
}
{
field: 'filler'
startPos: 42
endPos: 240
length: 199
required: false
default: new Array(199).fill(' ').join('')
}
]
| 206556 | module.exports = [
{
field: 'banco'
startPos: 1
endPos: 3
length: 3
required: true
default: 399
}
{
field: 'lote'
startPos: 4
endPos: 7
length: 4
required: true
default: '0000'
}
{
field: 'registro'
startPos: 8
endPos: 8
length: 1
required: true
default: 5
}
{
field: '<NAME>er'
startPos: 9
endPos: 17
length: 9
required: false
default: new Array(9).fill(' ').join('')
}
{
field: 'qtde_registros'
startPos: 18
endPos: 23
length: 6
required: true
type: 'numeric'
}
{
field: '<NAME>er'
startPos: 24
endPos: 26
length: 3
required: false
default: new Array(3).fill(' ').join('')
}
{
field: 'valor_credito'
startPos: 27
endPos: 41
length: 15
required: true
type: 'numeric'
}
{
field: 'filler'
startPos: 42
endPos: 240
length: 199
required: false
default: new Array(199).fill(' ').join('')
}
]
| true | module.exports = [
{
field: 'banco'
startPos: 1
endPos: 3
length: 3
required: true
default: 399
}
{
field: 'lote'
startPos: 4
endPos: 7
length: 4
required: true
default: '0000'
}
{
field: 'registro'
startPos: 8
endPos: 8
length: 1
required: true
default: 5
}
{
field: 'PI:NAME:<NAME>END_PIer'
startPos: 9
endPos: 17
length: 9
required: false
default: new Array(9).fill(' ').join('')
}
{
field: 'qtde_registros'
startPos: 18
endPos: 23
length: 6
required: true
type: 'numeric'
}
{
field: 'PI:NAME:<NAME>END_PIer'
startPos: 24
endPos: 26
length: 3
required: false
default: new Array(3).fill(' ').join('')
}
{
field: 'valor_credito'
startPos: 27
endPos: 41
length: 15
required: true
type: 'numeric'
}
{
field: 'filler'
startPos: 42
endPos: 240
length: 199
required: false
default: new Array(199).fill(' ').join('')
}
]
|
[
{
"context": ">\n @$scope.original_task = { id: 1, name: 'original_name', position: 5, project_id: 1}\n\n it 'sho",
"end": 3952,
"score": 0.5821702480316162,
"start": 3944,
"tag": "NAME",
"value": "original"
}
] | spec/javascripts/unit/controllers/task_ctrl_spec.js.coffee | FanAnToXa/todo-app | 2 | #= require spec_helper
describe 'TaskCtrl', ->
beforeEach ->
@success_action = true
@TaskResource = jasmine.createSpyObj 'TaskResource', [
'query',
'save',
'update',
'remove'
]
@tasks_list = [
{ id: 1, name: 'name 1', project_id: 1, position: 1},
{ id: 3, name: 'name 2', project_id: 1, position: 0}
]
@task = { id: 3, name: 'name 3', project_id: 1, position: 2}
@TaskResource.query.and.returnValue(@tasks_list)
action_with_promise = $promise: then: (resolve, reject) =>
if @success_action then resolve(@task) else reject(@task)
@TaskResource.save.and.returnValue action_with_promise
@TaskResource.update.and.returnValue action_with_promise
@TaskResource.remove.and.returnValue action_with_promise
@filter = jasmine.createSpy 'filter'
@$scope.projectId = 1
console.log = jasmine.createSpy 'log'
@initTaskCtrl = ->
@$controller 'TaskCtrl',
$scope: @$scope
$location: @$location
Auth: @Auth
TaskResource: @TaskResource
$filter: @filter
describe 'when authenticated', ->
beforeEach ->
@controller = @initTaskCtrl()
it 'should not redirect to /', ->
expect(@$location.path).not.toHaveBeenCalledWith '/'
describe 'should initialize vairables', ->
it 'should get tasks list', ->
expect(@$scope.tasks_list).toEqual @tasks_list
it 'should set defult vairables', ->
expect(@$scope.new_task).toEqual {}
expect(@$scope.edited_task).toEqual null
expect(@$scope.sortable_options).toEqual {
animation: 150
onUpdate: @controller.updatePosition
}
it 'should assign functions to scope', ->
expect(@$scope.openDatapicker).toEqual @controller.openDatapicker
expect(@$scope.addTask).toEqual @controller.addTask
expect(@$scope.editTask).toEqual @controller.editTask
expect(@$scope.cancelEditingTask).toEqual @controller.cancelEditingTask
expect(@$scope.updateTask).toEqual @controller.updateTask
expect(@$scope.destroyTask).toEqual @controller.destroyTask
describe '#openDatapicker', ->
it 'should open Datapicker', ->
@$scope.openDatapicker @task
expect(@task.datapickerOpened).toEqual true
describe '#addTask', ->
it 'should save task', ->
@$scope.addTask @task
expect(@$scope.tasks_list.shift()).toEqual @task
expect(@$scope.new_task).toEqual {}
it 'should handle errors', ->
@success_action = false
@$scope.addTask @task
expect(@TaskResource.save).toHaveBeenCalledWith @task
expect(console.log).toHaveBeenCalledWith 'error'
describe '#editTask', ->
it 'should initialize editing', ->
@$scope.editTask @task
expect(@$scope.edited_task).toEqual @task
expect(@$scope.original_task).toEqual @task
describe '#cancelEditingTask', ->
it 'should cancel editing', ->
@$scope.cancelEditingTask @task
expect(@$scope.edited_task).toEqual null
expect(@$scope.original_task).toEqual null
describe '#updatePosition', ->
beforeEach ->
@event = { oldIndex: 0, newIndex: 1 }
@$scope.original_task = @task
it 'should update position', ->
@controller.updatePosition @event
expect(@$scope.tasks_list).toEqual [
{ id: 1, name: 'name 1', project_id: 1, position: 0},
{ id: 3, name: 'name 2', project_id: 1, position: 1}
]
it 'should handle errors', ->
@success_action = false
@controller.updatePosition @event
expect(@$scope.tasks_list).toEqual [
{ id: 3, name: 'name 2', project_id: 1, position: 2},
{ id: 1, name: 'name 1', project_id: 1, position: 1}
]
describe '#updateTask', ->
beforeEach ->
@$scope.original_task = { id: 1, name: 'original_name', position: 5, project_id: 1}
it 'should not call update when project not modified', ->
@$scope.original_task = @task
@$scope.updateTask @task, 'name'
expect(@TaskResource.update).not.toHaveBeenCalled()
it 'should update task', ->
@$scope.updateTask @task, 'name'
expect(@$scope.edited_task).toEqual null
it 'should handle errors', ->
@success_action = false
@$scope.updateTask @task, 'name'
expect(@task.name).toEqual 'original_name'
describe '#destroyTask', ->
it 'should destroy project', ->
@$scope.destroyTask @task
expect(@$scope.tasks_list.indexOf(@task)).toEqual -1
describe 'when not authenticated', ->
beforeEach ->
@Auth.isAuthenticated.and.returnValue false
@initTaskCtrl()
it 'should not redirect to /', ->
expect(@$location.path).toHaveBeenCalledWith '/' | 50283 | #= require spec_helper
describe 'TaskCtrl', ->
beforeEach ->
@success_action = true
@TaskResource = jasmine.createSpyObj 'TaskResource', [
'query',
'save',
'update',
'remove'
]
@tasks_list = [
{ id: 1, name: 'name 1', project_id: 1, position: 1},
{ id: 3, name: 'name 2', project_id: 1, position: 0}
]
@task = { id: 3, name: 'name 3', project_id: 1, position: 2}
@TaskResource.query.and.returnValue(@tasks_list)
action_with_promise = $promise: then: (resolve, reject) =>
if @success_action then resolve(@task) else reject(@task)
@TaskResource.save.and.returnValue action_with_promise
@TaskResource.update.and.returnValue action_with_promise
@TaskResource.remove.and.returnValue action_with_promise
@filter = jasmine.createSpy 'filter'
@$scope.projectId = 1
console.log = jasmine.createSpy 'log'
@initTaskCtrl = ->
@$controller 'TaskCtrl',
$scope: @$scope
$location: @$location
Auth: @Auth
TaskResource: @TaskResource
$filter: @filter
describe 'when authenticated', ->
beforeEach ->
@controller = @initTaskCtrl()
it 'should not redirect to /', ->
expect(@$location.path).not.toHaveBeenCalledWith '/'
describe 'should initialize vairables', ->
it 'should get tasks list', ->
expect(@$scope.tasks_list).toEqual @tasks_list
it 'should set defult vairables', ->
expect(@$scope.new_task).toEqual {}
expect(@$scope.edited_task).toEqual null
expect(@$scope.sortable_options).toEqual {
animation: 150
onUpdate: @controller.updatePosition
}
it 'should assign functions to scope', ->
expect(@$scope.openDatapicker).toEqual @controller.openDatapicker
expect(@$scope.addTask).toEqual @controller.addTask
expect(@$scope.editTask).toEqual @controller.editTask
expect(@$scope.cancelEditingTask).toEqual @controller.cancelEditingTask
expect(@$scope.updateTask).toEqual @controller.updateTask
expect(@$scope.destroyTask).toEqual @controller.destroyTask
describe '#openDatapicker', ->
it 'should open Datapicker', ->
@$scope.openDatapicker @task
expect(@task.datapickerOpened).toEqual true
describe '#addTask', ->
it 'should save task', ->
@$scope.addTask @task
expect(@$scope.tasks_list.shift()).toEqual @task
expect(@$scope.new_task).toEqual {}
it 'should handle errors', ->
@success_action = false
@$scope.addTask @task
expect(@TaskResource.save).toHaveBeenCalledWith @task
expect(console.log).toHaveBeenCalledWith 'error'
describe '#editTask', ->
it 'should initialize editing', ->
@$scope.editTask @task
expect(@$scope.edited_task).toEqual @task
expect(@$scope.original_task).toEqual @task
describe '#cancelEditingTask', ->
it 'should cancel editing', ->
@$scope.cancelEditingTask @task
expect(@$scope.edited_task).toEqual null
expect(@$scope.original_task).toEqual null
describe '#updatePosition', ->
beforeEach ->
@event = { oldIndex: 0, newIndex: 1 }
@$scope.original_task = @task
it 'should update position', ->
@controller.updatePosition @event
expect(@$scope.tasks_list).toEqual [
{ id: 1, name: 'name 1', project_id: 1, position: 0},
{ id: 3, name: 'name 2', project_id: 1, position: 1}
]
it 'should handle errors', ->
@success_action = false
@controller.updatePosition @event
expect(@$scope.tasks_list).toEqual [
{ id: 3, name: 'name 2', project_id: 1, position: 2},
{ id: 1, name: 'name 1', project_id: 1, position: 1}
]
describe '#updateTask', ->
beforeEach ->
@$scope.original_task = { id: 1, name: '<NAME>_name', position: 5, project_id: 1}
it 'should not call update when project not modified', ->
@$scope.original_task = @task
@$scope.updateTask @task, 'name'
expect(@TaskResource.update).not.toHaveBeenCalled()
it 'should update task', ->
@$scope.updateTask @task, 'name'
expect(@$scope.edited_task).toEqual null
it 'should handle errors', ->
@success_action = false
@$scope.updateTask @task, 'name'
expect(@task.name).toEqual 'original_name'
describe '#destroyTask', ->
it 'should destroy project', ->
@$scope.destroyTask @task
expect(@$scope.tasks_list.indexOf(@task)).toEqual -1
describe 'when not authenticated', ->
beforeEach ->
@Auth.isAuthenticated.and.returnValue false
@initTaskCtrl()
it 'should not redirect to /', ->
expect(@$location.path).toHaveBeenCalledWith '/' | true | #= require spec_helper
describe 'TaskCtrl', ->
beforeEach ->
@success_action = true
@TaskResource = jasmine.createSpyObj 'TaskResource', [
'query',
'save',
'update',
'remove'
]
@tasks_list = [
{ id: 1, name: 'name 1', project_id: 1, position: 1},
{ id: 3, name: 'name 2', project_id: 1, position: 0}
]
@task = { id: 3, name: 'name 3', project_id: 1, position: 2}
@TaskResource.query.and.returnValue(@tasks_list)
action_with_promise = $promise: then: (resolve, reject) =>
if @success_action then resolve(@task) else reject(@task)
@TaskResource.save.and.returnValue action_with_promise
@TaskResource.update.and.returnValue action_with_promise
@TaskResource.remove.and.returnValue action_with_promise
@filter = jasmine.createSpy 'filter'
@$scope.projectId = 1
console.log = jasmine.createSpy 'log'
@initTaskCtrl = ->
@$controller 'TaskCtrl',
$scope: @$scope
$location: @$location
Auth: @Auth
TaskResource: @TaskResource
$filter: @filter
describe 'when authenticated', ->
beforeEach ->
@controller = @initTaskCtrl()
it 'should not redirect to /', ->
expect(@$location.path).not.toHaveBeenCalledWith '/'
describe 'should initialize vairables', ->
it 'should get tasks list', ->
expect(@$scope.tasks_list).toEqual @tasks_list
it 'should set defult vairables', ->
expect(@$scope.new_task).toEqual {}
expect(@$scope.edited_task).toEqual null
expect(@$scope.sortable_options).toEqual {
animation: 150
onUpdate: @controller.updatePosition
}
it 'should assign functions to scope', ->
expect(@$scope.openDatapicker).toEqual @controller.openDatapicker
expect(@$scope.addTask).toEqual @controller.addTask
expect(@$scope.editTask).toEqual @controller.editTask
expect(@$scope.cancelEditingTask).toEqual @controller.cancelEditingTask
expect(@$scope.updateTask).toEqual @controller.updateTask
expect(@$scope.destroyTask).toEqual @controller.destroyTask
describe '#openDatapicker', ->
it 'should open Datapicker', ->
@$scope.openDatapicker @task
expect(@task.datapickerOpened).toEqual true
describe '#addTask', ->
it 'should save task', ->
@$scope.addTask @task
expect(@$scope.tasks_list.shift()).toEqual @task
expect(@$scope.new_task).toEqual {}
it 'should handle errors', ->
@success_action = false
@$scope.addTask @task
expect(@TaskResource.save).toHaveBeenCalledWith @task
expect(console.log).toHaveBeenCalledWith 'error'
describe '#editTask', ->
it 'should initialize editing', ->
@$scope.editTask @task
expect(@$scope.edited_task).toEqual @task
expect(@$scope.original_task).toEqual @task
describe '#cancelEditingTask', ->
it 'should cancel editing', ->
@$scope.cancelEditingTask @task
expect(@$scope.edited_task).toEqual null
expect(@$scope.original_task).toEqual null
describe '#updatePosition', ->
beforeEach ->
@event = { oldIndex: 0, newIndex: 1 }
@$scope.original_task = @task
it 'should update position', ->
@controller.updatePosition @event
expect(@$scope.tasks_list).toEqual [
{ id: 1, name: 'name 1', project_id: 1, position: 0},
{ id: 3, name: 'name 2', project_id: 1, position: 1}
]
it 'should handle errors', ->
@success_action = false
@controller.updatePosition @event
expect(@$scope.tasks_list).toEqual [
{ id: 3, name: 'name 2', project_id: 1, position: 2},
{ id: 1, name: 'name 1', project_id: 1, position: 1}
]
describe '#updateTask', ->
beforeEach ->
@$scope.original_task = { id: 1, name: 'PI:NAME:<NAME>END_PI_name', position: 5, project_id: 1}
it 'should not call update when project not modified', ->
@$scope.original_task = @task
@$scope.updateTask @task, 'name'
expect(@TaskResource.update).not.toHaveBeenCalled()
it 'should update task', ->
@$scope.updateTask @task, 'name'
expect(@$scope.edited_task).toEqual null
it 'should handle errors', ->
@success_action = false
@$scope.updateTask @task, 'name'
expect(@task.name).toEqual 'original_name'
describe '#destroyTask', ->
it 'should destroy project', ->
@$scope.destroyTask @task
expect(@$scope.tasks_list.indexOf(@task)).toEqual -1
describe 'when not authenticated', ->
beforeEach ->
@Auth.isAuthenticated.and.returnValue false
@initTaskCtrl()
it 'should not redirect to /', ->
expect(@$location.path).toHaveBeenCalledWith '/' |
[
{
"context": "OKUP_SIMPLE\n#\n# Commands:\n# None\n#\n# Author:\n# Matthew Finlayson <matthew.finlayson@jivesoftware.com> (http://www.",
"end": 367,
"score": 0.9998491406440735,
"start": 350,
"tag": "NAME",
"value": "Matthew Finlayson"
},
{
"context": "ands:\n# None\n#\n# Autho... | scripts/jira-lookup.coffee | optimizely/hubot-jira-lookup | 2 | # Description:
# Jira lookup when issues are heard
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_JIRA_LOOKUP_USERNAME
# HUBOT_JIRA_LOOKUP_PASSWORD
# HUBOT_JIRA_LOOKUP_URL
# HUBOT_JIRA_LOOKUP_IGNORE_USERS (optional, format: "user1|user2", default is "jira|github")
# HUBOT_JIRA_LOOKUP_SIMPLE
#
# Commands:
# None
#
# Author:
# Matthew Finlayson <matthew.finlayson@jivesoftware.com> (http://www.jivesoftware.com)
# Benjamin Sherman <benjamin@jivesoftware.com> (http://www.jivesoftware.com)
# Dustin Miller <dustin@sharepointexperts.com> (http://sharepointexperience.com)
module.exports = (robot) ->
ignored_users = process.env.HUBOT_JIRA_LOOKUP_IGNORE_USERS
ignored_user_ids = process.env.HUBOT_JIRA_LOOKUP_IGNORE_USER_IDS
if ignored_users == undefined
ignored_users = "jira|github"
console.log "Ignoring Users: #{ignored_users} User IDs: #{ignored_user_ids}"
robot.hear /\b[a-zA-Z0-9]{2,12}-[0-9]{1,10}\b/, (msg) ->
return if msg.message.user.name.match(new RegExp(ignored_users, "gi"))
return if ignored_user_ids && msg.message.user.id.match(new RegExp(ignored_user_ids, "gi"))
issue = msg.match[0]
if process.env.HUBOT_JIRA_LOOKUP_SIMPLE is "true"
msg.send "Issue: #{issue} - #{process.env.HUBOT_JIRA_LOOKUP_URL}/browse/#{issue}"
else
user = process.env.HUBOT_JIRA_LOOKUP_USERNAME
pass = process.env.HUBOT_JIRA_LOOKUP_PASSWORD
url = process.env.HUBOT_JIRA_LOOKUP_URL
auth = 'Basic ' + new Buffer(user + ':' + pass).toString('base64')
robot.http("#{url}/rest/api/latest/issue/#{issue}")
.headers(Authorization: auth, Accept: 'application/json')
.get() (err, res, body) ->
try
json = JSON.parse(body)
data = {
'key': {
key: 'Key'
value: issue
}
'summary': {
key: 'Summary'
value: json.fields.summary || null
}
'link': {
key: 'Link'
value: "#{process.env.HUBOT_JIRA_LOOKUP_URL}/browse/#{json.key}"
}
'description': {
key: 'Description',
value: json.fields.description || null
}
'assignee': {
key: 'Assignee',
value: (json.fields.assignee && json.fields.assignee.displayName) || 'Unassigned'
}
'reporter': {
key: 'Reporter',
value: (json.fields.reporter && json.fields.reporter.displayName) || null
}
'created': {
key: 'Created',
value: json.fields.created && (new Date(json.fields.created)).toLocaleString() || null
}
'status': {
key: 'Status',
value: (json.fields.status && json.fields.status.name) || null
}
}
fallback = "Issue:\t #{data.key.value}: #{data.summary.value}\n"
if data.description.value?
fallback += "Description:\t #{data.description.value}\n"
fallback += "Assignee:\t #{data.assignee.key}\nStatus:\t #{data.status.value}\nLink:\t #{data.link.value}\n"
msg.send({
attachments: [
{
fallback: fallback
title: "#{data.key.value}: #{data.summary.value}"
title_link: data.link.value
text: data.description.value
fields: [
{
title: data.reporter.key
value: data.reporter.value
short: true
}
{
title: data.assignee.key
value: data.assignee.value
short: true
}
{
title: data.status.key
value: data.status.value
short: true
}
{
title: data.created.key
value: data.created.value
short: true
}
]
}
]
})
catch error
console.log error
| 205861 | # Description:
# Jira lookup when issues are heard
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_JIRA_LOOKUP_USERNAME
# HUBOT_JIRA_LOOKUP_PASSWORD
# HUBOT_JIRA_LOOKUP_URL
# HUBOT_JIRA_LOOKUP_IGNORE_USERS (optional, format: "user1|user2", default is "jira|github")
# HUBOT_JIRA_LOOKUP_SIMPLE
#
# Commands:
# None
#
# Author:
# <NAME> <<EMAIL>> (http://www.jivesoftware.com)
# <NAME> <<EMAIL>> (http://www.jivesoftware.com)
# <NAME> <<EMAIL>> (http://sharepointexperience.com)
module.exports = (robot) ->
ignored_users = process.env.HUBOT_JIRA_LOOKUP_IGNORE_USERS
ignored_user_ids = process.env.HUBOT_JIRA_LOOKUP_IGNORE_USER_IDS
if ignored_users == undefined
ignored_users = "jira|github"
console.log "Ignoring Users: #{ignored_users} User IDs: #{ignored_user_ids}"
robot.hear /\b[a-zA-Z0-9]{2,12}-[0-9]{1,10}\b/, (msg) ->
return if msg.message.user.name.match(new RegExp(ignored_users, "gi"))
return if ignored_user_ids && msg.message.user.id.match(new RegExp(ignored_user_ids, "gi"))
issue = msg.match[0]
if process.env.HUBOT_JIRA_LOOKUP_SIMPLE is "true"
msg.send "Issue: #{issue} - #{process.env.HUBOT_JIRA_LOOKUP_URL}/browse/#{issue}"
else
user = process.env.HUBOT_JIRA_LOOKUP_USERNAME
pass = process.env.HUBOT_JIRA_LOOKUP_PASSWORD
url = process.env.HUBOT_JIRA_LOOKUP_URL
auth = 'Basic ' + new Buffer(user + ':' + pass).toString('base64')
robot.http("#{url}/rest/api/latest/issue/#{issue}")
.headers(Authorization: auth, Accept: 'application/json')
.get() (err, res, body) ->
try
json = JSON.parse(body)
data = {
'key': {
key: 'Key'
value: issue
}
'summary': {
key: 'Summary'
value: json.fields.summary || null
}
'link': {
key: 'Link'
value: "#{process.env.HUBOT_JIRA_LOOKUP_URL}/browse/#{json.key}"
}
'description': {
key: 'Description',
value: json.fields.description || null
}
'assignee': {
key: 'Assignee',
value: (json.fields.assignee && json.fields.assignee.displayName) || 'Unassigned'
}
'reporter': {
key: 'Reporter',
value: (json.fields.reporter && json.fields.reporter.displayName) || null
}
'created': {
key: 'Created',
value: json.fields.created && (new Date(json.fields.created)).toLocaleString() || null
}
'status': {
key: 'Status',
value: (json.fields.status && json.fields.status.name) || null
}
}
fallback = "Issue:\t #{data.key.value}: #{data.summary.value}\n"
if data.description.value?
fallback += "Description:\t #{data.description.value}\n"
fallback += "Assignee:\t #{data.assignee.key}\nStatus:\t #{data.status.value}\nLink:\t #{data.link.value}\n"
msg.send({
attachments: [
{
fallback: fallback
title: "#{data.key.value}: #{data.summary.value}"
title_link: data.link.value
text: data.description.value
fields: [
{
title: data.reporter.key
value: data.reporter.value
short: true
}
{
title: data.assignee.key
value: data.assignee.value
short: true
}
{
title: data.status.key
value: data.status.value
short: true
}
{
title: data.created.key
value: data.created.value
short: true
}
]
}
]
})
catch error
console.log error
| true | # Description:
# Jira lookup when issues are heard
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_JIRA_LOOKUP_USERNAME
# HUBOT_JIRA_LOOKUP_PASSWORD
# HUBOT_JIRA_LOOKUP_URL
# HUBOT_JIRA_LOOKUP_IGNORE_USERS (optional, format: "user1|user2", default is "jira|github")
# HUBOT_JIRA_LOOKUP_SIMPLE
#
# Commands:
# None
#
# Author:
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> (http://www.jivesoftware.com)
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> (http://www.jivesoftware.com)
# PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> (http://sharepointexperience.com)
module.exports = (robot) ->
ignored_users = process.env.HUBOT_JIRA_LOOKUP_IGNORE_USERS
ignored_user_ids = process.env.HUBOT_JIRA_LOOKUP_IGNORE_USER_IDS
if ignored_users == undefined
ignored_users = "jira|github"
console.log "Ignoring Users: #{ignored_users} User IDs: #{ignored_user_ids}"
robot.hear /\b[a-zA-Z0-9]{2,12}-[0-9]{1,10}\b/, (msg) ->
return if msg.message.user.name.match(new RegExp(ignored_users, "gi"))
return if ignored_user_ids && msg.message.user.id.match(new RegExp(ignored_user_ids, "gi"))
issue = msg.match[0]
if process.env.HUBOT_JIRA_LOOKUP_SIMPLE is "true"
msg.send "Issue: #{issue} - #{process.env.HUBOT_JIRA_LOOKUP_URL}/browse/#{issue}"
else
user = process.env.HUBOT_JIRA_LOOKUP_USERNAME
pass = process.env.HUBOT_JIRA_LOOKUP_PASSWORD
url = process.env.HUBOT_JIRA_LOOKUP_URL
auth = 'Basic ' + new Buffer(user + ':' + pass).toString('base64')
robot.http("#{url}/rest/api/latest/issue/#{issue}")
.headers(Authorization: auth, Accept: 'application/json')
.get() (err, res, body) ->
try
json = JSON.parse(body)
data = {
'key': {
key: 'Key'
value: issue
}
'summary': {
key: 'Summary'
value: json.fields.summary || null
}
'link': {
key: 'Link'
value: "#{process.env.HUBOT_JIRA_LOOKUP_URL}/browse/#{json.key}"
}
'description': {
key: 'Description',
value: json.fields.description || null
}
'assignee': {
key: 'Assignee',
value: (json.fields.assignee && json.fields.assignee.displayName) || 'Unassigned'
}
'reporter': {
key: 'Reporter',
value: (json.fields.reporter && json.fields.reporter.displayName) || null
}
'created': {
key: 'Created',
value: json.fields.created && (new Date(json.fields.created)).toLocaleString() || null
}
'status': {
key: 'Status',
value: (json.fields.status && json.fields.status.name) || null
}
}
fallback = "Issue:\t #{data.key.value}: #{data.summary.value}\n"
if data.description.value?
fallback += "Description:\t #{data.description.value}\n"
fallback += "Assignee:\t #{data.assignee.key}\nStatus:\t #{data.status.value}\nLink:\t #{data.link.value}\n"
msg.send({
attachments: [
{
fallback: fallback
title: "#{data.key.value}: #{data.summary.value}"
title_link: data.link.value
text: data.description.value
fields: [
{
title: data.reporter.key
value: data.reporter.value
short: true
}
{
title: data.assignee.key
value: data.assignee.value
short: true
}
{
title: data.status.key
value: data.status.value
short: true
}
{
title: data.created.key
value: data.created.value
short: true
}
]
}
]
})
catch error
console.log error
|
[
{
"context": "e same as for keymaps!\n # see https://github.com/atom/electron/blob/master/docs/api/global-shortcut.md ",
"end": 308,
"score": 0.9960262179374695,
"start": 304,
"tag": "USERNAME",
"value": "atom"
},
{
"context": "/api/global-shortcut.md for details\n keyCombo = 'cmd+... | docs/recommended-usage/.atom/init.coffee | viddo/atom-textual-velocity | 35 | # Init script to register global shortcut and hide tree-view when session is started
atom.packages.onDidActivatePackage (pkg) ->
return if pkg.name isnt 'textual-velocity'
# Try register the global shortcut
# note that key combo syntax is _not_ the same as for keymaps!
# see https://github.com/atom/electron/blob/master/docs/api/global-shortcut.md for details
keyCombo = 'cmd+shift+space'
ret = require('remote').globalShortcut.register keyCombo, ->
target = document.body.querySelector('atom-workspace')
atom.commands.dispatch(target, 'textual-velocity:toggle-atom-window')
if ret
atom.notifications.addSuccess "Registered #{keyCombo} to toggle this Atom window", {
description: ''
dismissable: true
}
else
atom.notifications.addWarning "Could not register #{keyCombo} as shortcut", {
description: 'Probably already registered by another app or window, try restarted atom completely'
dismissable: true
}
# Hide tree-view when package is activated
try
treeViewMainModule = atom.packages.getActivePackage('tree-view').mainModule
treeViewMainModule.createOrDestroyTreeViewIfNeeded()
treeViewMainModule.getTreeViewInstance().hide()
catch err
console.error(err)
atom.notifications.addWarning 'Could not hide tree view', {
detail: 'See the console for the error'
dismissable: true
}
# Activate package right away
target = document.body.querySelector('atom-workspace')
atom.commands.dispatch(target, 'textual-velocity:start-session')
| 30288 | # Init script to register global shortcut and hide tree-view when session is started
atom.packages.onDidActivatePackage (pkg) ->
return if pkg.name isnt 'textual-velocity'
# Try register the global shortcut
# note that key combo syntax is _not_ the same as for keymaps!
# see https://github.com/atom/electron/blob/master/docs/api/global-shortcut.md for details
keyCombo = '<KEY>'
ret = require('remote').globalShortcut.register keyCombo, ->
target = document.body.querySelector('atom-workspace')
atom.commands.dispatch(target, 'textual-velocity:toggle-atom-window')
if ret
atom.notifications.addSuccess "Registered #{keyCombo} to toggle this Atom window", {
description: ''
dismissable: true
}
else
atom.notifications.addWarning "Could not register #{keyCombo} as shortcut", {
description: 'Probably already registered by another app or window, try restarted atom completely'
dismissable: true
}
# Hide tree-view when package is activated
try
treeViewMainModule = atom.packages.getActivePackage('tree-view').mainModule
treeViewMainModule.createOrDestroyTreeViewIfNeeded()
treeViewMainModule.getTreeViewInstance().hide()
catch err
console.error(err)
atom.notifications.addWarning 'Could not hide tree view', {
detail: 'See the console for the error'
dismissable: true
}
# Activate package right away
target = document.body.querySelector('atom-workspace')
atom.commands.dispatch(target, 'textual-velocity:start-session')
| true | # Init script to register global shortcut and hide tree-view when session is started
atom.packages.onDidActivatePackage (pkg) ->
return if pkg.name isnt 'textual-velocity'
# Try register the global shortcut
# note that key combo syntax is _not_ the same as for keymaps!
# see https://github.com/atom/electron/blob/master/docs/api/global-shortcut.md for details
keyCombo = 'PI:KEY:<KEY>END_PI'
ret = require('remote').globalShortcut.register keyCombo, ->
target = document.body.querySelector('atom-workspace')
atom.commands.dispatch(target, 'textual-velocity:toggle-atom-window')
if ret
atom.notifications.addSuccess "Registered #{keyCombo} to toggle this Atom window", {
description: ''
dismissable: true
}
else
atom.notifications.addWarning "Could not register #{keyCombo} as shortcut", {
description: 'Probably already registered by another app or window, try restarted atom completely'
dismissable: true
}
# Hide tree-view when package is activated
try
treeViewMainModule = atom.packages.getActivePackage('tree-view').mainModule
treeViewMainModule.createOrDestroyTreeViewIfNeeded()
treeViewMainModule.getTreeViewInstance().hide()
catch err
console.error(err)
atom.notifications.addWarning 'Could not hide tree view', {
detail: 'See the console for the error'
dismissable: true
}
# Activate package right away
target = document.body.querySelector('atom-workspace')
atom.commands.dispatch(target, 'textual-velocity:start-session')
|
[
{
"context": " 'HH:mm:ss'\n meta = \"\"\"\n {\n \"name\": \"#{name}\",\n \"date\": \"#{date}\",\n \"time\":",
"end": 2752,
"score": 0.9651554822921753,
"start": 2752,
"tag": "NAME",
"value": ""
},
{
"context": ":mm:ss'\n meta = \"\"\"\n {\n ... | lib/cli.coffee | CatTail/catlog | 1 | fs = require 'fs-extra'
path = require 'path'
_ = require 'underscore'
program = require 'commander'
inquirer = require 'inquirer'
colors = require 'colors'
moment = require 'moment'
async = require 'async'
temp = require 'temp'
server = require '../lib/server'
directory = require '../lib/directory'
parser = require '../lib/parser'
render = require '../lib/render.coffee'
colors.setTheme({
silly: 'rainbow'
input: 'grey'
verbose: 'cyan'
prompt: 'grey'
info: 'green'
data: 'grey'
help: 'cyan'
warn: 'yellow'
debug: 'blue'
error: 'red'
})
import_settings = (to='.') ->
to = path.resolve to
top = directory.root to, "settings.json"
if top is null
# check if directory valid
console.log 'use `catlog init [to]` to initialize project directory'.error
process.exit()
global_settings = require '../assets/settings'
local_settings = require path.join(top, 'settings.json')
local_settings = _.clone _.defaults local_settings, global_settings
# reset as relative path
local_settings.source = path.join top, local_settings.source
local_settings.destination = path.join top, local_settings.destination
local_settings.theme_path = path.join top, "themes"
local_settings.plugin_path = path.join top, "plugins"
# asset_url default to base_url
local_settings.asset_url = local_settings.base_url
return local_settings
create_post = (src, to, callback) ->
if (src)
console.log "Original markdown file #{src}"
content = fs.readFileSync src, 'utf8'
settings = import_settings to
categories = fs.readdirSync settings.source
newCategory = 'Add new category'
categories.push newCategory
questions = [
{
type: 'input'
name: 'name'
message: 'write your article name'
}
{
type: 'list'
name: 'category'
message: 'choose article category'
choices: categories
}
{
type: 'input'
name: 'category'
message: 'input new category name'
validate: (value) -> value.length isnt 0
filter: (category) ->
fs.mkdirSync path.join(settings.source, category)
return category
when: (answers) ->
return answers.category is newCategory
}
{
type: 'input'
name: 'title'
message: 'input new permalink title'
validate: (value) -> value.length isnt 0
}
{
type: 'input'
name: 'author'
message: 'input author name'
default: settings.author
}
]
inquirer.prompt questions, (answers) ->
# meta data
title = answers.title
name = answers.name
category = answers.category
author = answers.author
date = moment().format 'YYYY-MM-DD'
time = moment().format 'HH:mm:ss'
meta = """
{
"name": "#{name}",
"date": "#{date}",
"time": "#{time}",
"author": "#{author}"
}
"""
basename = path.join settings.source, category, title
fs.mkdirSync basename
fs.writeFileSync path.join(basename, 'meta.json'), meta, 'utf8'
fs.writeFileSync path.join(basename, 'index.md'), content or '', 'utf8'
console.log 'created a new article directory below contents folder.'.prompt
console.log "edit article in #{settings.source}/#{category}/#{title}/index.md".prompt
callback and callback()
cmd_init = (to='.', options) ->
init = ->
global_settings = require '../assets/settings'
assets = path.resolve __dirname, '../assets'
to = path.resolve to
src = path.join to, global_settings.source
dest = path.join to, global_settings.destination
console.log 'creates site skeleton structure'.info
if not fs.existsSync(src)
fs.mkdirSync(src)
console.log 'copying default blog content'.info
fs.copy "#{assets}/assets/examples", "#{src}/examples"
else
console.log "#{src} exist, leave without touch".warn
if not fs.existsSync(dest)
fs.mkdirSync(dest)
else
console.log "#{dest} exist, leave without touch".warn
assets = [
["#{assets}/plugins", "#{to}/plugins"]
["#{assets}/themes", "#{to}/themes"]
["#{assets}/settings.json", "#{to}/settings.json"]
]
for asset in assets
if not fs.existsSync asset[1]
console.log "copy #{asset[1]}".info
fs.copy asset[0], asset[1]
else
console.log "#{asset[1]} exist, leave without touch".warn
try
if not fs.readdirSync(to).length or options.force
init()
else
# directory not empty
inquirer.prompt {
type: 'confirm'
name: 'ifProcess'
message: 'Current directory not empty, do you really want to process?'
default: false
}, (answers) ->
if answers.ifProcess
init()
catch err
console.log "Directory not exit".error
cmd_publish = (to) ->
create_post '', to, ->
process.stdin.destroy()
build = (settings, callback) ->
parser.parse settings, (env) ->
render.render env
cmd_build = (to='.', args) ->
settings = import_settings to
if args.assetUrl
settings.asset_url = args.assetUrl
console.log 'copying theme'.info
fs.copy "#{settings.theme_path}", "#{settings.destination}/themes", ->
console.log 'parse markdown'.info
settings.auto = args.auto
parser.parse settings, (env) ->
console.log 'render html'.info
render.render env
# static file server
if args.server isnt undefined
if typeof args.server is 'boolean'
port = settings.port
else
port = args.server
server.run {path: settings.destination, port: port}
# auto build
if args.auto
watch settings.source, {followSymLinks: true}, ->
build settings
cmd_preview = (to='.', args) ->
temp.mkdir 'catlog', (err, dirPath) ->
console.log "create temp directory #{dirPath}".info
settings = import_settings to
settings.destination = dirPath
settings.base_url = '/' # local server always use root
console.log 'copy theme'.info
fs.copy "#{settings.theme_path}", "#{settings.destination}/themes", ->
settings.auto = args.auto
console.log 'parse markdown'.info
parser.parse settings, (env) ->
console.log 'render markdown'.info
render.render env
# static file server
if args.server isnt undefined and typeof args.server isnt 'boolean'
port = args.server
else
port = settings.port
server.run {path: settings.destination, port: port}
# auto build
if args.auto
watch settings.source, {followSymLinks: true}, ->
build settings
cmd_migrate = (from, to) ->
srcs = directory.list from, (src) ->
fs.statSync(src).isFile() and path.extname(src) is '.md'
async.eachSeries srcs, ((src, callback) ->
create_post src, to, callback
), ->
process.stdin.destroy()
cmd_help = (cmd) ->
if cmd
command = _.find program.commands, (command) -> command._name is cmd
command.outputHelp()
else
program.help()
program
.version(require('../package.json').version)
program
.command('init [to]')
.description('initialize project, create new directory before initialize')
.option('-f --force', 'force initialize on directory not empty')
.action(cmd_init)
program
.command('publish [to]')
.description('publish new article')
.action(cmd_publish)
program
.command('preview [to]')
.description('preview generated html files')
.option('-s --server [port]', 'start local server')
.option('-a --auto', 'watch for file change and auto update')
.action(cmd_preview)
program
.command('build [to]')
.description('build html files')
.option('-u --asset-url [url]', 'use self defined asset url')
.option('-s --server [port]', 'start local server')
.option('-a --auto', 'watch for file change and auto update')
.action(cmd_build)
program
.command('migrate <from> [to]')
.description('migrate exist markdown file into project')
.action(cmd_migrate)
program
.command('help [cmd]')
.description('display command description')
.action(cmd_help)
program
.command('*')
.description('unknown')
.action(program.help)
program.parse process.argv
if program.args.length is 0
program.help()
| 80375 | fs = require 'fs-extra'
path = require 'path'
_ = require 'underscore'
program = require 'commander'
inquirer = require 'inquirer'
colors = require 'colors'
moment = require 'moment'
async = require 'async'
temp = require 'temp'
server = require '../lib/server'
directory = require '../lib/directory'
parser = require '../lib/parser'
render = require '../lib/render.coffee'
colors.setTheme({
silly: 'rainbow'
input: 'grey'
verbose: 'cyan'
prompt: 'grey'
info: 'green'
data: 'grey'
help: 'cyan'
warn: 'yellow'
debug: 'blue'
error: 'red'
})
import_settings = (to='.') ->
to = path.resolve to
top = directory.root to, "settings.json"
if top is null
# check if directory valid
console.log 'use `catlog init [to]` to initialize project directory'.error
process.exit()
global_settings = require '../assets/settings'
local_settings = require path.join(top, 'settings.json')
local_settings = _.clone _.defaults local_settings, global_settings
# reset as relative path
local_settings.source = path.join top, local_settings.source
local_settings.destination = path.join top, local_settings.destination
local_settings.theme_path = path.join top, "themes"
local_settings.plugin_path = path.join top, "plugins"
# asset_url default to base_url
local_settings.asset_url = local_settings.base_url
return local_settings
create_post = (src, to, callback) ->
if (src)
console.log "Original markdown file #{src}"
content = fs.readFileSync src, 'utf8'
settings = import_settings to
categories = fs.readdirSync settings.source
newCategory = 'Add new category'
categories.push newCategory
questions = [
{
type: 'input'
name: 'name'
message: 'write your article name'
}
{
type: 'list'
name: 'category'
message: 'choose article category'
choices: categories
}
{
type: 'input'
name: 'category'
message: 'input new category name'
validate: (value) -> value.length isnt 0
filter: (category) ->
fs.mkdirSync path.join(settings.source, category)
return category
when: (answers) ->
return answers.category is newCategory
}
{
type: 'input'
name: 'title'
message: 'input new permalink title'
validate: (value) -> value.length isnt 0
}
{
type: 'input'
name: 'author'
message: 'input author name'
default: settings.author
}
]
inquirer.prompt questions, (answers) ->
# meta data
title = answers.title
name = answers.name
category = answers.category
author = answers.author
date = moment().format 'YYYY-MM-DD'
time = moment().format 'HH:mm:ss'
meta = """
{
"name":<NAME> "#{<NAME>}",
"date": "#{date}",
"time": "#{time}",
"author": "#{author}"
}
"""
basename = path.join settings.source, category, title
fs.mkdirSync basename
fs.writeFileSync path.join(basename, 'meta.json'), meta, 'utf8'
fs.writeFileSync path.join(basename, 'index.md'), content or '', 'utf8'
console.log 'created a new article directory below contents folder.'.prompt
console.log "edit article in #{settings.source}/#{category}/#{title}/index.md".prompt
callback and callback()
cmd_init = (to='.', options) ->
init = ->
global_settings = require '../assets/settings'
assets = path.resolve __dirname, '../assets'
to = path.resolve to
src = path.join to, global_settings.source
dest = path.join to, global_settings.destination
console.log 'creates site skeleton structure'.info
if not fs.existsSync(src)
fs.mkdirSync(src)
console.log 'copying default blog content'.info
fs.copy "#{assets}/assets/examples", "#{src}/examples"
else
console.log "#{src} exist, leave without touch".warn
if not fs.existsSync(dest)
fs.mkdirSync(dest)
else
console.log "#{dest} exist, leave without touch".warn
assets = [
["#{assets}/plugins", "#{to}/plugins"]
["#{assets}/themes", "#{to}/themes"]
["#{assets}/settings.json", "#{to}/settings.json"]
]
for asset in assets
if not fs.existsSync asset[1]
console.log "copy #{asset[1]}".info
fs.copy asset[0], asset[1]
else
console.log "#{asset[1]} exist, leave without touch".warn
try
if not fs.readdirSync(to).length or options.force
init()
else
# directory not empty
inquirer.prompt {
type: 'confirm'
name: 'ifProcess'
message: 'Current directory not empty, do you really want to process?'
default: false
}, (answers) ->
if answers.ifProcess
init()
catch err
console.log "Directory not exit".error
cmd_publish = (to) ->
create_post '', to, ->
process.stdin.destroy()
build = (settings, callback) ->
parser.parse settings, (env) ->
render.render env
cmd_build = (to='.', args) ->
settings = import_settings to
if args.assetUrl
settings.asset_url = args.assetUrl
console.log 'copying theme'.info
fs.copy "#{settings.theme_path}", "#{settings.destination}/themes", ->
console.log 'parse markdown'.info
settings.auto = args.auto
parser.parse settings, (env) ->
console.log 'render html'.info
render.render env
# static file server
if args.server isnt undefined
if typeof args.server is 'boolean'
port = settings.port
else
port = args.server
server.run {path: settings.destination, port: port}
# auto build
if args.auto
watch settings.source, {followSymLinks: true}, ->
build settings
cmd_preview = (to='.', args) ->
temp.mkdir 'catlog', (err, dirPath) ->
console.log "create temp directory #{dirPath}".info
settings = import_settings to
settings.destination = dirPath
settings.base_url = '/' # local server always use root
console.log 'copy theme'.info
fs.copy "#{settings.theme_path}", "#{settings.destination}/themes", ->
settings.auto = args.auto
console.log 'parse markdown'.info
parser.parse settings, (env) ->
console.log 'render markdown'.info
render.render env
# static file server
if args.server isnt undefined and typeof args.server isnt 'boolean'
port = args.server
else
port = settings.port
server.run {path: settings.destination, port: port}
# auto build
if args.auto
watch settings.source, {followSymLinks: true}, ->
build settings
cmd_migrate = (from, to) ->
srcs = directory.list from, (src) ->
fs.statSync(src).isFile() and path.extname(src) is '.md'
async.eachSeries srcs, ((src, callback) ->
create_post src, to, callback
), ->
process.stdin.destroy()
cmd_help = (cmd) ->
if cmd
command = _.find program.commands, (command) -> command._name is cmd
command.outputHelp()
else
program.help()
program
.version(require('../package.json').version)
program
.command('init [to]')
.description('initialize project, create new directory before initialize')
.option('-f --force', 'force initialize on directory not empty')
.action(cmd_init)
program
.command('publish [to]')
.description('publish new article')
.action(cmd_publish)
program
.command('preview [to]')
.description('preview generated html files')
.option('-s --server [port]', 'start local server')
.option('-a --auto', 'watch for file change and auto update')
.action(cmd_preview)
program
.command('build [to]')
.description('build html files')
.option('-u --asset-url [url]', 'use self defined asset url')
.option('-s --server [port]', 'start local server')
.option('-a --auto', 'watch for file change and auto update')
.action(cmd_build)
program
.command('migrate <from> [to]')
.description('migrate exist markdown file into project')
.action(cmd_migrate)
program
.command('help [cmd]')
.description('display command description')
.action(cmd_help)
program
.command('*')
.description('unknown')
.action(program.help)
program.parse process.argv
if program.args.length is 0
program.help()
| true | fs = require 'fs-extra'
path = require 'path'
_ = require 'underscore'
program = require 'commander'
inquirer = require 'inquirer'
colors = require 'colors'
moment = require 'moment'
async = require 'async'
temp = require 'temp'
server = require '../lib/server'
directory = require '../lib/directory'
parser = require '../lib/parser'
render = require '../lib/render.coffee'
colors.setTheme({
silly: 'rainbow'
input: 'grey'
verbose: 'cyan'
prompt: 'grey'
info: 'green'
data: 'grey'
help: 'cyan'
warn: 'yellow'
debug: 'blue'
error: 'red'
})
import_settings = (to='.') ->
to = path.resolve to
top = directory.root to, "settings.json"
if top is null
# check if directory valid
console.log 'use `catlog init [to]` to initialize project directory'.error
process.exit()
global_settings = require '../assets/settings'
local_settings = require path.join(top, 'settings.json')
local_settings = _.clone _.defaults local_settings, global_settings
# reset as relative path
local_settings.source = path.join top, local_settings.source
local_settings.destination = path.join top, local_settings.destination
local_settings.theme_path = path.join top, "themes"
local_settings.plugin_path = path.join top, "plugins"
# asset_url default to base_url
local_settings.asset_url = local_settings.base_url
return local_settings
create_post = (src, to, callback) ->
if (src)
console.log "Original markdown file #{src}"
content = fs.readFileSync src, 'utf8'
settings = import_settings to
categories = fs.readdirSync settings.source
newCategory = 'Add new category'
categories.push newCategory
questions = [
{
type: 'input'
name: 'name'
message: 'write your article name'
}
{
type: 'list'
name: 'category'
message: 'choose article category'
choices: categories
}
{
type: 'input'
name: 'category'
message: 'input new category name'
validate: (value) -> value.length isnt 0
filter: (category) ->
fs.mkdirSync path.join(settings.source, category)
return category
when: (answers) ->
return answers.category is newCategory
}
{
type: 'input'
name: 'title'
message: 'input new permalink title'
validate: (value) -> value.length isnt 0
}
{
type: 'input'
name: 'author'
message: 'input author name'
default: settings.author
}
]
inquirer.prompt questions, (answers) ->
# meta data
title = answers.title
name = answers.name
category = answers.category
author = answers.author
date = moment().format 'YYYY-MM-DD'
time = moment().format 'HH:mm:ss'
meta = """
{
"name":PI:NAME:<NAME>END_PI "#{PI:NAME:<NAME>END_PI}",
"date": "#{date}",
"time": "#{time}",
"author": "#{author}"
}
"""
basename = path.join settings.source, category, title
fs.mkdirSync basename
fs.writeFileSync path.join(basename, 'meta.json'), meta, 'utf8'
fs.writeFileSync path.join(basename, 'index.md'), content or '', 'utf8'
console.log 'created a new article directory below contents folder.'.prompt
console.log "edit article in #{settings.source}/#{category}/#{title}/index.md".prompt
callback and callback()
cmd_init = (to='.', options) ->
init = ->
global_settings = require '../assets/settings'
assets = path.resolve __dirname, '../assets'
to = path.resolve to
src = path.join to, global_settings.source
dest = path.join to, global_settings.destination
console.log 'creates site skeleton structure'.info
if not fs.existsSync(src)
fs.mkdirSync(src)
console.log 'copying default blog content'.info
fs.copy "#{assets}/assets/examples", "#{src}/examples"
else
console.log "#{src} exist, leave without touch".warn
if not fs.existsSync(dest)
fs.mkdirSync(dest)
else
console.log "#{dest} exist, leave without touch".warn
assets = [
["#{assets}/plugins", "#{to}/plugins"]
["#{assets}/themes", "#{to}/themes"]
["#{assets}/settings.json", "#{to}/settings.json"]
]
for asset in assets
if not fs.existsSync asset[1]
console.log "copy #{asset[1]}".info
fs.copy asset[0], asset[1]
else
console.log "#{asset[1]} exist, leave without touch".warn
try
if not fs.readdirSync(to).length or options.force
init()
else
# directory not empty
inquirer.prompt {
type: 'confirm'
name: 'ifProcess'
message: 'Current directory not empty, do you really want to process?'
default: false
}, (answers) ->
if answers.ifProcess
init()
catch err
console.log "Directory not exit".error
cmd_publish = (to) ->
create_post '', to, ->
process.stdin.destroy()
build = (settings, callback) ->
parser.parse settings, (env) ->
render.render env
cmd_build = (to='.', args) ->
settings = import_settings to
if args.assetUrl
settings.asset_url = args.assetUrl
console.log 'copying theme'.info
fs.copy "#{settings.theme_path}", "#{settings.destination}/themes", ->
console.log 'parse markdown'.info
settings.auto = args.auto
parser.parse settings, (env) ->
console.log 'render html'.info
render.render env
# static file server
if args.server isnt undefined
if typeof args.server is 'boolean'
port = settings.port
else
port = args.server
server.run {path: settings.destination, port: port}
# auto build
if args.auto
watch settings.source, {followSymLinks: true}, ->
build settings
cmd_preview = (to='.', args) ->
temp.mkdir 'catlog', (err, dirPath) ->
console.log "create temp directory #{dirPath}".info
settings = import_settings to
settings.destination = dirPath
settings.base_url = '/' # local server always use root
console.log 'copy theme'.info
fs.copy "#{settings.theme_path}", "#{settings.destination}/themes", ->
settings.auto = args.auto
console.log 'parse markdown'.info
parser.parse settings, (env) ->
console.log 'render markdown'.info
render.render env
# static file server
if args.server isnt undefined and typeof args.server isnt 'boolean'
port = args.server
else
port = settings.port
server.run {path: settings.destination, port: port}
# auto build
if args.auto
watch settings.source, {followSymLinks: true}, ->
build settings
cmd_migrate = (from, to) ->
srcs = directory.list from, (src) ->
fs.statSync(src).isFile() and path.extname(src) is '.md'
async.eachSeries srcs, ((src, callback) ->
create_post src, to, callback
), ->
process.stdin.destroy()
cmd_help = (cmd) ->
if cmd
command = _.find program.commands, (command) -> command._name is cmd
command.outputHelp()
else
program.help()
program
.version(require('../package.json').version)
program
.command('init [to]')
.description('initialize project, create new directory before initialize')
.option('-f --force', 'force initialize on directory not empty')
.action(cmd_init)
program
.command('publish [to]')
.description('publish new article')
.action(cmd_publish)
program
.command('preview [to]')
.description('preview generated html files')
.option('-s --server [port]', 'start local server')
.option('-a --auto', 'watch for file change and auto update')
.action(cmd_preview)
program
.command('build [to]')
.description('build html files')
.option('-u --asset-url [url]', 'use self defined asset url')
.option('-s --server [port]', 'start local server')
.option('-a --auto', 'watch for file change and auto update')
.action(cmd_build)
program
.command('migrate <from> [to]')
.description('migrate exist markdown file into project')
.action(cmd_migrate)
program
.command('help [cmd]')
.description('display command description')
.action(cmd_help)
program
.command('*')
.description('unknown')
.action(program.help)
program.parse process.argv
if program.args.length is 0
program.help()
|
[
{
"context": " 'hyperlink-test', [\n {\n Name: {value: 'Bob', hyperlink: 'http://www.bob.com'}\n Compan",
"end": 85,
"score": 0.999842643737793,
"start": 82,
"tag": "NAME",
"value": "Bob"
},
{
"context": "w.google.com'}\n }\n {\n Name: {value: 'Joel Spol... | test/hyperlink_test.coffee | SBeyeMHP/node-xlsx-writestream | 42 | test = require('./common')
test 'hyperlink-test', [
{
Name: {value: 'Bob', hyperlink: 'http://www.bob.com'}
Company: {value: 'Google', hyperlink: 'http://www.google.com'}
}
{
Name: {value: 'Joel Spolsky', hyperlink: 'http://www.joelonsoftware.com'}
Company: {value: 'Fog Creek', hyperlink: 'http://www.fogcreek.com'}
}
]
| 110138 | test = require('./common')
test 'hyperlink-test', [
{
Name: {value: '<NAME>', hyperlink: 'http://www.bob.com'}
Company: {value: 'Google', hyperlink: 'http://www.google.com'}
}
{
Name: {value: '<NAME>', hyperlink: 'http://www.joelonsoftware.com'}
Company: {value: 'Fog Creek', hyperlink: 'http://www.fogcreek.com'}
}
]
| true | test = require('./common')
test 'hyperlink-test', [
{
Name: {value: 'PI:NAME:<NAME>END_PI', hyperlink: 'http://www.bob.com'}
Company: {value: 'Google', hyperlink: 'http://www.google.com'}
}
{
Name: {value: 'PI:NAME:<NAME>END_PI', hyperlink: 'http://www.joelonsoftware.com'}
Company: {value: 'Fog Creek', hyperlink: 'http://www.fogcreek.com'}
}
]
|
[
{
"context": "no = (doc['Año'] || '').toLowerCase()\n\n key = [family, names, municipality, community, sexo, dobDia, me",
"end": 1373,
"score": 0.898979127407074,
"start": 1367,
"tag": "KEY",
"value": "family"
}
] | app/views/duplicateCheck/map.coffee | ICTatRTI/coconut | 1 | (doc) ->
if doc.collection is "result" and doc.Completado
hasRequiredFields = doc['Apellido']? and doc['Nombre']? and doc['Municipio']? and doc['BarrioComunidad']? and doc['Sexo']? and doc['Día']? and doc['Mes']? and doc['Año']?
return unless hasRequiredFields
spacePattern = new RegExp(" ", "g")
family = (doc['Apellido'] || '').toLowerCase()
names = (doc['Nombre'] || '').toLowerCase()
municipality = (doc['Municipio'] || '').toLowerCase()
community = (doc['BarrioComunidad'] || '').toLowerCase()
sexo = (doc['Sexo'] || '').toLowerCase()
dobDia = (doc['Día'] || '').toLowerCase()
dobDia = "0" + dobDia if dobDia.length < 2
dobMes = (doc['Mes'] || '').toLowerCase()
mes = dobMes
switch dobMes
when "enero"
mes = "01"
when "febrero"
mes = "02"
when "marzo"
mes = "03"
when "abril"
mes = "04"
when "mayo"
mes = "05"
when "junio"
mes = "06"
when "julio"
mes = "07"
when "agosto"
mes = "08"
when "septiembre"
mes = "09"
when "octubre"
mes = "10"
when "noviembre"
mes = "11"
when "diciembre"
mes = "12"
mes = "0" + mes if mes.length < 2
dobAno = (doc['Año'] || '').toLowerCase()
key = [family, names, municipality, community, sexo, dobDia, mes, dobAno].join(":").replace(spacePattern, '')
emit key, doc
| 107355 | (doc) ->
if doc.collection is "result" and doc.Completado
hasRequiredFields = doc['Apellido']? and doc['Nombre']? and doc['Municipio']? and doc['BarrioComunidad']? and doc['Sexo']? and doc['Día']? and doc['Mes']? and doc['Año']?
return unless hasRequiredFields
spacePattern = new RegExp(" ", "g")
family = (doc['Apellido'] || '').toLowerCase()
names = (doc['Nombre'] || '').toLowerCase()
municipality = (doc['Municipio'] || '').toLowerCase()
community = (doc['BarrioComunidad'] || '').toLowerCase()
sexo = (doc['Sexo'] || '').toLowerCase()
dobDia = (doc['Día'] || '').toLowerCase()
dobDia = "0" + dobDia if dobDia.length < 2
dobMes = (doc['Mes'] || '').toLowerCase()
mes = dobMes
switch dobMes
when "enero"
mes = "01"
when "febrero"
mes = "02"
when "marzo"
mes = "03"
when "abril"
mes = "04"
when "mayo"
mes = "05"
when "junio"
mes = "06"
when "julio"
mes = "07"
when "agosto"
mes = "08"
when "septiembre"
mes = "09"
when "octubre"
mes = "10"
when "noviembre"
mes = "11"
when "diciembre"
mes = "12"
mes = "0" + mes if mes.length < 2
dobAno = (doc['Año'] || '').toLowerCase()
key = [<KEY>, names, municipality, community, sexo, dobDia, mes, dobAno].join(":").replace(spacePattern, '')
emit key, doc
| true | (doc) ->
if doc.collection is "result" and doc.Completado
hasRequiredFields = doc['Apellido']? and doc['Nombre']? and doc['Municipio']? and doc['BarrioComunidad']? and doc['Sexo']? and doc['Día']? and doc['Mes']? and doc['Año']?
return unless hasRequiredFields
spacePattern = new RegExp(" ", "g")
family = (doc['Apellido'] || '').toLowerCase()
names = (doc['Nombre'] || '').toLowerCase()
municipality = (doc['Municipio'] || '').toLowerCase()
community = (doc['BarrioComunidad'] || '').toLowerCase()
sexo = (doc['Sexo'] || '').toLowerCase()
dobDia = (doc['Día'] || '').toLowerCase()
dobDia = "0" + dobDia if dobDia.length < 2
dobMes = (doc['Mes'] || '').toLowerCase()
mes = dobMes
switch dobMes
when "enero"
mes = "01"
when "febrero"
mes = "02"
when "marzo"
mes = "03"
when "abril"
mes = "04"
when "mayo"
mes = "05"
when "junio"
mes = "06"
when "julio"
mes = "07"
when "agosto"
mes = "08"
when "septiembre"
mes = "09"
when "octubre"
mes = "10"
when "noviembre"
mes = "11"
when "diciembre"
mes = "12"
mes = "0" + mes if mes.length < 2
dobAno = (doc['Año'] || '').toLowerCase()
key = [PI:KEY:<KEY>END_PI, names, municipality, community, sexo, dobDia, mes, dobAno].join(":").replace(spacePattern, '')
emit key, doc
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999121427536011,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/forum-posts-seek.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @ForumPostsSeek
constructor: (@forum) ->
@tooltip = document.getElementsByClassName('js-forum-posts-seek--tooltip')
@tooltipNumber = document.getElementsByClassName('js-forum-posts-seek-tooltip-number')
@seekbar = document.getElementsByClassName('js-forum__posts-seek')
$(document).on 'mousemove', '.js-forum__posts-seek', @move
$(document).on 'mouseleave', '.js-forum__posts-seek', @hideTooltip
$(document).on 'click', '.js-forum__posts-seek', @click
$(document).on 'click', '.js-forum-posts-seek--jump', @jump
addEventListener 'turbolinks:before-cache', @reset
hideTooltip: =>
return if @tooltip.length == 0
Fade.out @tooltip[0]
move: (e) =>
e.preventDefault()
e.stopPropagation()
@setPostPosition(e.clientX)
Fade.in @tooltip[0]
Timeout.clear @_autohide
@_autohide = Timeout.set 1000, @hideTooltip
click: =>
@forum.jumpTo @postPosition
jump: (e) =>
e.preventDefault()
currentPost = @forum.currentPostPosition
totalPosts = @forum.totalPosts()
$target = $(e.currentTarget)
jumpTarget = $target.attr('data-jump-target')
n = switch jumpTarget
when 'first'
1
when 'last'
totalPosts
when 'previous'
defaultN = currentPost - 10
# avoid jumping beyond loaded posts
minLoadedN = @forum.postPosition @forum.posts[0]
Math.max(defaultN, minLoadedN)
when 'next'
defaultN = currentPost + 10
# avoid jumping beyond loaded posts
maxLoadedN = @forum.postPosition @forum.endPost()
Math.min(defaultN, maxLoadedN)
$target.blur()
@forum.jumpTo n
reset: =>
Timeout.clear @_autohide
@hideTooltip()
setPostPosition: (x) =>
full = @seekbar[0].offsetWidth
position = x / full
totalPosts = @forum.totalPosts()
postPosition = Math.ceil(position * @forum.totalPosts())
postPosition = Math.min(postPosition, totalPosts)
@postPosition = Math.max(postPosition, 1)
@tooltip[0].style.transform = "translateX(#{x}px)"
@tooltipNumber[0].textContent = @postPosition
| 174525 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @ForumPostsSeek
constructor: (@forum) ->
@tooltip = document.getElementsByClassName('js-forum-posts-seek--tooltip')
@tooltipNumber = document.getElementsByClassName('js-forum-posts-seek-tooltip-number')
@seekbar = document.getElementsByClassName('js-forum__posts-seek')
$(document).on 'mousemove', '.js-forum__posts-seek', @move
$(document).on 'mouseleave', '.js-forum__posts-seek', @hideTooltip
$(document).on 'click', '.js-forum__posts-seek', @click
$(document).on 'click', '.js-forum-posts-seek--jump', @jump
addEventListener 'turbolinks:before-cache', @reset
hideTooltip: =>
return if @tooltip.length == 0
Fade.out @tooltip[0]
move: (e) =>
e.preventDefault()
e.stopPropagation()
@setPostPosition(e.clientX)
Fade.in @tooltip[0]
Timeout.clear @_autohide
@_autohide = Timeout.set 1000, @hideTooltip
click: =>
@forum.jumpTo @postPosition
jump: (e) =>
e.preventDefault()
currentPost = @forum.currentPostPosition
totalPosts = @forum.totalPosts()
$target = $(e.currentTarget)
jumpTarget = $target.attr('data-jump-target')
n = switch jumpTarget
when 'first'
1
when 'last'
totalPosts
when 'previous'
defaultN = currentPost - 10
# avoid jumping beyond loaded posts
minLoadedN = @forum.postPosition @forum.posts[0]
Math.max(defaultN, minLoadedN)
when 'next'
defaultN = currentPost + 10
# avoid jumping beyond loaded posts
maxLoadedN = @forum.postPosition @forum.endPost()
Math.min(defaultN, maxLoadedN)
$target.blur()
@forum.jumpTo n
reset: =>
Timeout.clear @_autohide
@hideTooltip()
setPostPosition: (x) =>
full = @seekbar[0].offsetWidth
position = x / full
totalPosts = @forum.totalPosts()
postPosition = Math.ceil(position * @forum.totalPosts())
postPosition = Math.min(postPosition, totalPosts)
@postPosition = Math.max(postPosition, 1)
@tooltip[0].style.transform = "translateX(#{x}px)"
@tooltipNumber[0].textContent = @postPosition
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @ForumPostsSeek
constructor: (@forum) ->
@tooltip = document.getElementsByClassName('js-forum-posts-seek--tooltip')
@tooltipNumber = document.getElementsByClassName('js-forum-posts-seek-tooltip-number')
@seekbar = document.getElementsByClassName('js-forum__posts-seek')
$(document).on 'mousemove', '.js-forum__posts-seek', @move
$(document).on 'mouseleave', '.js-forum__posts-seek', @hideTooltip
$(document).on 'click', '.js-forum__posts-seek', @click
$(document).on 'click', '.js-forum-posts-seek--jump', @jump
addEventListener 'turbolinks:before-cache', @reset
hideTooltip: =>
return if @tooltip.length == 0
Fade.out @tooltip[0]
move: (e) =>
e.preventDefault()
e.stopPropagation()
@setPostPosition(e.clientX)
Fade.in @tooltip[0]
Timeout.clear @_autohide
@_autohide = Timeout.set 1000, @hideTooltip
click: =>
@forum.jumpTo @postPosition
jump: (e) =>
e.preventDefault()
currentPost = @forum.currentPostPosition
totalPosts = @forum.totalPosts()
$target = $(e.currentTarget)
jumpTarget = $target.attr('data-jump-target')
n = switch jumpTarget
when 'first'
1
when 'last'
totalPosts
when 'previous'
defaultN = currentPost - 10
# avoid jumping beyond loaded posts
minLoadedN = @forum.postPosition @forum.posts[0]
Math.max(defaultN, minLoadedN)
when 'next'
defaultN = currentPost + 10
# avoid jumping beyond loaded posts
maxLoadedN = @forum.postPosition @forum.endPost()
Math.min(defaultN, maxLoadedN)
$target.blur()
@forum.jumpTo n
reset: =>
Timeout.clear @_autohide
@hideTooltip()
setPostPosition: (x) =>
full = @seekbar[0].offsetWidth
position = x / full
totalPosts = @forum.totalPosts()
postPosition = Math.ceil(position * @forum.totalPosts())
postPosition = Math.min(postPosition, totalPosts)
@postPosition = Math.max(postPosition, 1)
@tooltip[0].style.transform = "translateX(#{x}px)"
@tooltipNumber[0].textContent = @postPosition
|
[
{
"context": "s\",\"Prov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"",
"end": 505,
"score": 0.7930408716201782,
"start": 502,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ov\",\"Eccl\",\"Song\",\"Isa\"... | src/ne/spec.coffee | phillipb/Bible-Passage-Reference-Parser | 149 | bcv_parser = require("../../js/ne_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","Dan","Hos","Joel","Amos","Obad","Jonah","Mic","Nah","Hab","Zeph","Hag","Zech","Mal","Matt","Mark","Luke","John","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (ne)", ->
`
expect(p.parse("उत्पत्तिको पुस्तक 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("utpattiko pustak 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्तिको 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("utpattiko 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्ति 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("उत्पत्तिको पुस्तक 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("UTPATTIKO PUSTAK 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्तिको 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("UTPATTIKO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्ति 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (ne)", ->
`
expect(p.parse("prastʰanko pustak 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰānko pustak 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको पुस्तक 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰanko 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰānko 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थान 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("PRASTʰANKO PUSTAK 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰĀNKO PUSTAK 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको पुस्तक 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰANKO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰĀNKO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थान 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (ne)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (ne)", ->
`
expect(p.parse("leviharuko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharūko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharuko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharūko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको पुस्तक 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharuko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharūko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharuko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharūko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवि 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवी 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVIHARUKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARŪKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARUKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARŪKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको पुस्तक 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARUKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARŪKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARUKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARŪKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवि 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवी 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (ne)", ->
`
expect(p.parse("gantiko pustak 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantīko pustak 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको पुस्तक 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantiko 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantīko 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्ती 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("GANTIKO PUSTAK 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTĪKO PUSTAK 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको पुस्तक 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTIKO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTĪKO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्ती 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (ne)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (ne)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (ne)", ->
`
expect(p.parse("yarmiyako vilap 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyako vilāp 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyāko vilap 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyāko vilāp 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("यर्मियाको विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("YARMIYAKO VILAP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYAKO VILĀP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYĀKO VILAP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYĀKO VILĀP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("यर्मियाको विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (ne)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (ne)", ->
`
expect(p.parse("yuhannalai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("यूहन्नालाई भएको प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNALAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("यूहन्नालाई भएको प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (ne)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (ne)", ->
`
expect(p.parse("vyavastʰako pustak 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰāko pustak 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको पुस्तक 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰako 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰāko 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यावस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("VYAVASTʰAKO PUSTAK 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰĀKO PUSTAK 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको पुस्तक 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰAKO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰĀKO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यावस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (ne)", ->
`
expect(p.parse("yahosuko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosūko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošuko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošūko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको पुस्तक 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosuko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosūko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošuko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošūko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशू 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("YAHOSUKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSŪKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠUKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠŪKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको पुस्तक 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSUKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSŪKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠUKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠŪKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशू 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (ne)", ->
`
expect(p.parse("nyayakarttaharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ता 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("NYAYAKARTTAHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ता 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (ne)", ->
`
expect(p.parse("rutʰko pustak 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rūtʰko pustak 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको पुस्तक 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rutʰko 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rūtʰko 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथ 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTʰKO PUSTAK 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RŪTʰKO PUSTAK 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको पुस्तक 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTʰKO 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RŪTʰKO 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथ 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (ne)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (ne)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (ne)", ->
`
expect(p.parse("yasəiyako pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyāko pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyako pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyāko pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको पुस्तक 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyako 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyāko 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyako 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyāko 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("येशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("YASƏIYAKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYĀKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYAKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYĀKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको पुस्तक 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYAKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYĀKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYAKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYĀKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("येशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (ne)", ->
`
expect(p.parse("शमूएलको दोस्रो पुस्तक 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. samuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. samūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. šamuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. šamūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 samuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 samūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 šamuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 šamūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("शमूएलको दोस्रो पुस्तक 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. ŠAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. ŠAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 ŠAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 ŠAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (ne)", ->
`
expect(p.parse("शमूएलको पहिलो पुस्तक 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("samuelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("samūelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("šamuelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("šamūelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. samuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. samūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. šamuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. šamūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 samuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 samūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 šamuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 šamūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("शमूएलको पहिलो पुस्तक 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMUELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMŪELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ŠAMUELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ŠAMŪELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. ŠAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. ŠAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 ŠAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 ŠAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (ne)", ->
`
expect(p.parse("राजाहरूको दोस्रो पुस्तक 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("राजाहरूको दोस्रो पुस्तक 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (ne)", ->
`
expect(p.parse("राजाहरूक पहिल पुस्तक 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzaharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzaharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzāharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzāharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzaharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzaharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzāharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzāharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("राजाहरूक पहिल पुस्तक 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZAHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZAHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZĀHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZĀHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZAHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZAHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZĀHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZĀHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (ne)", ->
`
expect(p.parse("इतिहासको दोस्रो पुस्तक 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. itihasko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. itihāsko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 itihasko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 itihāsko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("इतिहासको दोस्रो पुस्तक 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. ITIHASKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. ITIHĀSKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 ITIHASKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 ITIHĀSKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (ne)", ->
`
expect(p.parse("इतिहासको पहिलो पुस्तक 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("itihasko pustak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("itihāsko pustak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. itihasko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. itihāsko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 itihasko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 itihāsko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("इतिहासको पहिलो पुस्तक 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ITIHASKO PUSTAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ITIHĀSKO PUSTAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. ITIHASKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. ITIHĀSKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 ITIHASKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 ITIHĀSKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (ne)", ->
`
expect(p.parse("एज्राको पुस्तक 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्राको 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("edzrako 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("edzrāko 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्रा 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("एज्राको पुस्तक 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्राको 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EDZRAKO 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EDZRĀKO 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्रा 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (ne)", ->
`
expect(p.parse("nahemyahko pustak 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyāhko pustak 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको पुस्तक 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyahko 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyāhko 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याह 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHEMYAHKO PUSTAK 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYĀHKO PUSTAK 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको पुस्तक 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYAHKO 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYĀHKO 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याह 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (ne)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (ne)", ->
`
expect(p.parse("estarko pustak 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको पुस्तक 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("estarko 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तर 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTARKO PUSTAK 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको पुस्तक 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTARKO 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तर 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (ne)", ->
`
expect(p.parse("अय्यूबको पुस्तक 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyubko pustak 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyūbko pustak 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूबको 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyubko 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyūbko 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूब 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("अय्यूबको पुस्तक 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYUBKO PUSTAK 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYŪBKO PUSTAK 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूबको 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYUBKO 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYŪBKO 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूब 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (ne)", ->
`
expect(p.parse("bʰadzansamgrah 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("bʰadzansaṃgrah 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसंग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजन 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("BʰADZANSAMGRAH 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("BʰADZANSAṂGRAH 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसंग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजन 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (ne)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (ne)", ->
`
expect(p.parse("hitopadesko pustak 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadeško pustak 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको पुस्तक 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadesko 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadeško 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेश 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("HITOPADESKO PUSTAK 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADEŠKO PUSTAK 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको पुस्तक 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADESKO 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADEŠKO 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेश 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (ne)", ->
`
expect(p.parse("upadesakko pustak 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadešakko pustak 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको पुस्तक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadesakko 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadešakko 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("UPADESAKKO PUSTAK 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADEŠAKKO PUSTAK 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको पुस्तक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADESAKKO 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADEŠAKKO 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (ne)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (ne)", ->
`
expect(p.parse("sulemanko srestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko srestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šrestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šrestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko srestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko srestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šrestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šrestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("सुलेमानको श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("SULEMANKO SRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("सुलेमानको श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (ne)", ->
`
expect(p.parse("yarmiyako pustak 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyāko pustak 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको पुस्तक 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyako 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyāko 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मिया 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("YARMIYAKO PUSTAK 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYĀKO PUSTAK 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको पुस्तक 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYAKO 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYĀKO 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मिया 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (ne)", ->
`
expect(p.parse("idzakielko pustak 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको पुस्तक 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("idzakielko 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएल 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("IDZAKIELKO PUSTAK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको पुस्तक 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("IDZAKIELKO 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएल 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (ne)", ->
`
expect(p.parse("daniyalko pustak 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("dāniyalko pustak 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको पुस्तक 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("daniyalko 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("dāniyalko 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियल 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIYALKO PUSTAK 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DĀNIYALKO PUSTAK 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको पुस्तक 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIYALKO 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DĀNIYALKO 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियल 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (ne)", ->
`
expect(p.parse("होशेको पुस्तक 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("hose 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("hoše 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("होशे 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("होशेको पुस्तक 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOSE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOŠE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("होशे 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (ne)", ->
`
expect(p.parse("योएलको पुस्तक 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("yoel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("योएल 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("योएलको पुस्तक 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("YOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("योएल 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (ne)", ->
`
expect(p.parse("आमोसको पुस्तक 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("āmos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("अमोस 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("आमोस 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("आमोसको पुस्तक 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("ĀMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("अमोस 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("आमोस 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (ne)", ->
`
expect(p.parse("ओबदियाको पुस्तक 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("obadiyā 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ओबदिया 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ओबदियाको पुस्तक 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYĀ 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ओबदिया 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book Jonah (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (ne)", ->
`
expect(p.parse("योनाको पुस्तक 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("yona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("yonā 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("योना 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("योनाको पुस्तक 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("YONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("YONĀ 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("योना 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (ne)", ->
`
expect(p.parse("मीकाको पुस्तक 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mikā 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mīka 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mīkā 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मिका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मीका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("मीकाको पुस्तक 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKĀ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MĪKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MĪKĀ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मिका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मीका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (ne)", ->
`
expect(p.parse("नहूमको पुस्तक 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("nahūm 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("नहूम 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("नहूमको पुस्तक 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHŪM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("नहूम 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (ne)", ->
`
expect(p.parse("हबकूकको पुस्तक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("habakūk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("हबकूक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("हबकूकको पुस्तक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKŪK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("हबकूक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (ne)", ->
`
expect(p.parse("सपन्याहको पुस्तक 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("sapanyah 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("sapanyāh 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("सपन्याह 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("सपन्याहको पुस्तक 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SAPANYAH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SAPANYĀH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("सपन्याह 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (ne)", ->
`
expect(p.parse("हाग्गैको पुस्तक 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("haggəi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("hāggəi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("हाग्गै 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("हाग्गैको पुस्तक 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGƏI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HĀGGƏI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("हाग्गै 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (ne)", ->
`
expect(p.parse("जकरियाको पुस्तक 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("jakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("jakariyā 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("जकरिया 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("जकरियाको पुस्तक 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("JAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("JAKARIYĀ 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("जकरिया 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (ne)", ->
`
expect(p.parse("मलाकीको पुस्तक 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malakī 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malāki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malākī 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलाकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("मलाकीको पुस्तक 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKĪ 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALĀKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALĀKĪ 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलाकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (ne)", ->
`
expect(p.parse("mattile lekʰeko susmacar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmacār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmācar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmācār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmacar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmacār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmācar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmācār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले लेखेको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ति 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ती 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTILE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले लेखेको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ति 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ती 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (ne)", ->
`
expect(p.parse("markusle lekʰeko susmacar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmacār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmācar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmācār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmacar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmacār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmācar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmācār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले लेखेको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कुस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूश 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUSLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले लेखेको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कुस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूश 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (ne)", ->
`
expect(p.parse("lukale lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले लेखेको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लुका 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूका 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले लेखेको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लुका 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूका 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (ne)", ->
`
expect(p.parse("yuhannako pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yuhannāko pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yūhannako pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yūhannāko pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("यूहन्नाको पहिलो पत्र 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yuhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yuhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yūhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yūhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yuhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yuhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yūhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yūhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNAKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YUHANNĀKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YŪHANNAKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YŪHANNĀKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("यूहन्नाको पहिलो पत्र 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YUHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YUHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YŪHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YŪHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YUHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YUHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YŪHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YŪHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (ne)", ->
`
expect(p.parse("यूहन्नाको दोस्त्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yuhannako dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yuhannāko dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yūhannako dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yūhannāko dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("यूहन्नाको दोस्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yuhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yuhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yūhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yūhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yuhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yuhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yūhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yūhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("यूहन्नाको दोस्त्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YUHANNAKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YUHANNĀKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YŪHANNAKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YŪHANNĀKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("यूहन्नाको दोस्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YUHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YUHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YŪHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YŪHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YUHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YUHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YŪHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YŪHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (ne)", ->
`
expect(p.parse("यूहन्नाको तेस्त्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yuhannako testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yuhannāko testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yūhannako testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yūhannāko testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("यूहन्नाको तेस्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yuhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yuhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yūhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yūhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yuhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yuhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yūhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yūhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("यूहन्नाको तेस्त्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YUHANNAKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YUHANNĀKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YŪHANNAKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YŪHANNĀKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("यूहन्नाको तेस्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YUHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YUHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YŪHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YŪHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YUHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YUHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YŪHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YŪHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: John (ne)", ->
`
expect(p.parse("yuhannale lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले लेखेको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहान्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यहून्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("युहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले लेखेको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहान्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यहून्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("युहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (ne)", ->
`
expect(p.parse("preritharuka kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharuka kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharukā kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharukā kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūka kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūka kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūkā kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūkā kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरितहरूका काम 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरित 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("PRERITHARUKA KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKA KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKĀ KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKĀ KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKA KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKA KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKĀ KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKĀ KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरितहरूका काम 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरित 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (ne)", ->
`
expect(p.parse("रोमीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमी 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("रोमीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमी 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (ne)", ->
`
expect(p.parse("कोरिन्थीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("कोरिन्थीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (ne)", ->
`
expect(p.parse("कोरिन्थीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("कोरिन्थीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (ne)", ->
`
expect(p.parse("गलातीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलाती 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("गलातीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलाती 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (ne)", ->
`
expect(p.parse("एफिसीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसी 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("एफिसीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसी 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (ne)", ->
`
expect(p.parse("फिलिप्पीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पी 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("फिलिप्पीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पी 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (ne)", ->
`
expect(p.parse("कलस्सीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सी 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("कलस्सीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सी 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (ne)", ->
`
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (ne)", ->
`
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (ne)", ->
`
expect(p.parse("तिमोथीलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilai dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilaī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilāi dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilāī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlai dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlaī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlāi dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlāī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("तिमोथीलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("तिमोथीलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILAI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILĀI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("तिमोथीलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (ne)", ->
`
expect(p.parse("तिमोथीलाईर् पावलको पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilai pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilaī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilāi pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilāī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlai pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlaī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlāi pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlāī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("तिमोथीलाई पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("तिमोथीलाईर् पावलको पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILAI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILAĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILĀI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLAI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("तिमोथीलाई पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (ne)", ->
`
expect(p.parse("तीतसलाई पावलको पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslai patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslaī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāi patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslai patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslaī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāi patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslai 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslaī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslai 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslaī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतस 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("तीतसलाई पावलको पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतस 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (ne)", ->
`
expect(p.parse("फिलेमोनलाई पावलको पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlai patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlaī patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāi patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāī patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlai 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlaī 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāi 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāī 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोन 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("फिलेमोनलाई पावलको पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAI PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAĪ PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀI PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀĪ PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAĪ 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀĪ 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोन 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (ne)", ->
`
expect(p.parse("hibruharuko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharūko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharuko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharūko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति पत्र 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharuko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharūko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharuko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharūko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रू 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HIBRUHARUKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARŪKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARUKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARŪKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति पत्र 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARUKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARŪKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARUKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARŪKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रू 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (ne)", ->
`
expect(p.parse("yakubko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakūbko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākubko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākūbko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको पत्र 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakubko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakūbko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākubko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākūbko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूब 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("YAKUBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKŪBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKUBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKŪBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको पत्र 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKUBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKŪBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKUBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKŪBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूब 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (ne)", ->
`
expect(p.parse("पत्रुसको दोस्त्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("patrusko dostro patra 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("पत्रुसको दोस्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. patrusko 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 patrusko 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("पत्रुसको दोस्त्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("PATRUSKO DOSTRO PATRA 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("पत्रुसको दोस्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PATRUSKO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PATRUSKO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (ne)", ->
`
expect(p.parse("patrusko pahilo patra 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("पत्रुसको पहिलो पत्र 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. patrusko 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 patrusko 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("PATRUSKO PAHILO PATRA 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("पत्रुसको पहिलो पत्र 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PATRUSKO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PATRUSKO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (ne)", ->
`
expect(p.parse("yahudako patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudāko patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdako patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdāko patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको पत्र 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudako 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudāko 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdako 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdāko 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदा 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("YAHUDAKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDĀKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDAKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDĀKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको पत्र 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDAKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDĀKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDAKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDĀKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदा 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (ne)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (ne)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (ne)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (ne)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (ne)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (ne)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (ne)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (ne)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["ne"]
it "should handle ranges (ne)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (ne)", ->
expect(p.parse("Titus 1:1, chapter 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 CHAPTER 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (ne)", ->
expect(p.parse("Exod 1:1 verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (ne)", ->
expect(p.parse("Exod 1:1 and 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 AND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (ne)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (ne)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (ne)", ->
expect(p.parse("Lev 1 (ERV)").osis_and_translations()).toEqual [["Lev.1", "ERV"]]
expect(p.parse("lev 1 erv").osis_and_translations()).toEqual [["Lev.1", "ERV"]]
it "should handle book ranges (ne)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 - 3 yuhannako").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yuhannāko").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yūhannako").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yūhannāko").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (ne)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| 222687 | bcv_parser = require("../../js/ne_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","<NAME>","<NAME>","<NAME>","<NAME>mos","<NAME>ad","<NAME>","<NAME>","<NAME>ah","Hab","Zeph","Hag","Zech","<NAME>","<NAME>","<NAME>","<NAME>","<NAME>","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (ne)", ->
`
expect(p.parse("उत्पत्तिको पुस्तक 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("utpattiko pustak 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्तिको 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("utpattiko 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्ति 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("उत्पत्तिको पुस्तक 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("UTPATTIKO PUSTAK 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्तिको 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("UTPATTIKO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्ति 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (ne)", ->
`
expect(p.parse("prastʰanko pustak 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰānko pustak 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको पुस्तक 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰanko 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰānko 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थान 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("PRASTʰANKO PUSTAK 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰĀNKO PUSTAK 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको पुस्तक 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰANKO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰĀNKO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थान 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (ne)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (ne)", ->
`
expect(p.parse("leviharuko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharūko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharuko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharūko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको पुस्तक 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharuko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharūko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharuko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharūko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवि 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवी 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVIHARUKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARŪKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARUKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARŪKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको पुस्तक 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARUKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARŪKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARUKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARŪKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवि 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवी 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (ne)", ->
`
expect(p.parse("gantiko pustak 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantīko pustak 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको पुस्तक 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantiko 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantīko 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्ती 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("GANTIKO PUSTAK 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTĪKO PUSTAK 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको पुस्तक 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTIKO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTĪKO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्ती 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (ne)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (ne)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (ne)", ->
`
expect(p.parse("yarmiyako vilap 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyako vilāp 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyāko vilap 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyāko vilāp 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("यर्मियाको विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("YARMIYAKO VILAP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYAKO VILĀP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYĀKO VILAP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYĀKO VILĀP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("यर्मियाको विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (ne)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (ne)", ->
`
expect(p.parse("yuhannalai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("यूहन्नालाई भएको प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNALAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("यूहन्नालाई भएको प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (ne)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (ne)", ->
`
expect(p.parse("vyavastʰako pustak 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰāko pustak 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको पुस्तक 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰako 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰāko 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यावस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("VYAVASTʰAKO PUSTAK 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰĀKO PUSTAK 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको पुस्तक 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰAKO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰĀKO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यावस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book <NAME> (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("yahosuko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosūko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošuko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošūko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको पुस्तक 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosuko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosūko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošuko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošūko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशू 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("YAHOSUKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSŪKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠUKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠŪKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको पुस्तक 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSUKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSŪKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠUKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠŪKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशू 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (ne)", ->
`
expect(p.parse("nyayakarttaharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ता 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("NYAYAKARTTAHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ता 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (ne)", ->
`
expect(p.parse("rutʰko pustak 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rūtʰko pustak 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको पुस्तक 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rutʰko 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rūtʰko 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथ 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTʰKO PUSTAK 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RŪTʰKO PUSTAK 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको पुस्तक 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTʰKO 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RŪTʰKO 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथ 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (ne)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (ne)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (ne)", ->
`
expect(p.parse("yasəiyako pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyāko pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyako pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyāko pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको पुस्तक 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyako 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyāko 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyako 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyāko 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("येशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("YASƏIYAKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYĀKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYAKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYĀKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको पुस्तक 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYAKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYĀKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYAKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYĀKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("येशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (ne)", ->
`
expect(p.parse("शमूएलको दोस्रो पुस्तक 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. samuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. samūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. šamuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. šamūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 samuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 samūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 šamuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 šamūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("शमूएलको दोस्रो पुस्तक 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. ŠAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. ŠAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 ŠAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 ŠAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (ne)", ->
`
expect(p.parse("शमूएलको पहिलो पुस्तक 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("samuelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("samūelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("šamuelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("šamūelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. samuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. samūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. šamuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. šamūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 samuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 samūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 šamuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 šamūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("शमूएलको पहिलो पुस्तक 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMUELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMŪELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ŠAMUELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ŠAMŪELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. ŠAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. ŠAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 ŠAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 ŠAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (ne)", ->
`
expect(p.parse("राजाहरूको दोस्रो पुस्तक 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("राजाहरूको दोस्रो पुस्तक 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (ne)", ->
`
expect(p.parse("राजाहरूक पहिल पुस्तक 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzaharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzaharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzāharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzāharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzaharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzaharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzāharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzāharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("राजाहरूक पहिल पुस्तक 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZAHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZAHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZĀHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZĀHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZAHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZAHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZĀHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZĀHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (ne)", ->
`
expect(p.parse("इतिहासको दोस्रो पुस्तक 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. itihasko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. itihāsko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 itihasko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 itihāsko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("इतिहासको दोस्रो पुस्तक 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. ITIHASKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. ITIHĀSKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 ITIHASKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 ITIHĀSKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (ne)", ->
`
expect(p.parse("इतिहासको पहिलो पुस्तक 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("itihasko pustak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("itihāsko pustak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. itihasko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. itihāsko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 itihasko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 itihāsko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("इतिहासको पहिलो पुस्तक 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ITIHASKO PUSTAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ITIHĀSKO PUSTAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. ITIHASKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. ITIHĀSKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 ITIHASKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 ITIHĀSKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (ne)", ->
`
expect(p.parse("एज्राको पुस्तक 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्राको 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("edzrako 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("edzrāko 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्रा 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("एज्राको पुस्तक 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्राको 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EDZRAKO 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EDZRĀKO 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्रा 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (ne)", ->
`
expect(p.parse("nahemyahko pustak 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyāhko pustak 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको पुस्तक 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyahko 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyāhko 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याह 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHEMYAHKO PUSTAK 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYĀHKO PUSTAK 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको पुस्तक 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYAHKO 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYĀHKO 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याह 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (ne)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (ne)", ->
`
expect(p.parse("estarko pustak 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको पुस्तक 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("estarko 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तर 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTARKO PUSTAK 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको पुस्तक 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTARKO 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तर 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (ne)", ->
`
expect(p.parse("अय्यूबको पुस्तक 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyubko pustak 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyūbko pustak 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूबको 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyubko 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyūbko 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूब 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("अय्यूबको पुस्तक 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYUBKO PUSTAK 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYŪBKO PUSTAK 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूबको 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYUBKO 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYŪBKO 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूब 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (ne)", ->
`
expect(p.parse("bʰadzansamgrah 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("bʰadzansaṃgrah 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसंग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजन 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("BʰADZANSAMGRAH 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("BʰADZANSAṂGRAH 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसंग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजन 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (ne)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (ne)", ->
`
expect(p.parse("hitopadesko pustak 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadeško pustak 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको पुस्तक 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadesko 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadeško 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेश 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("HITOPADESKO PUSTAK 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADEŠKO PUSTAK 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको पुस्तक 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADESKO 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADEŠKO 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेश 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (ne)", ->
`
expect(p.parse("upadesakko pustak 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadešakko pustak 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको पुस्तक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadesakko 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadešakko 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("UPADESAKKO PUSTAK 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADEŠAKKO PUSTAK 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको पुस्तक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADESAKKO 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADEŠAKKO 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (ne)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (ne)", ->
`
expect(p.parse("sulemanko srestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko srestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šrestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šrestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko srestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko srestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šrestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šrestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("सुलेमानको श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("SULEMANKO SRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("सुलेमानको श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("yarmiyako pustak 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyāko pustak 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको पुस्तक 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyako 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyāko 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मिया 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("YARMIYAKO PUSTAK 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYĀKO PUSTAK 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको पुस्तक 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYAKO 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYĀKO 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मिया 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (ne)", ->
`
expect(p.parse("idzakielko pustak 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको पुस्तक 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("idzakielko 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएल 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("IDZAKIELKO PUSTAK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको पुस्तक 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("IDZAKIELKO 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएल 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book <NAME> (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("daniyalko pustak 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("dāniyalko pustak 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको पुस्तक 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("daniyalko 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("dāniyalko 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियल 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIYALKO PUSTAK 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DĀNIYALKO PUSTAK 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको पुस्तक 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIYALKO 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DĀNIYALKO 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियल 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (ne)", ->
`
expect(p.parse("होशेको पुस्तक 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("hose 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("hoše 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("होशे 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("होशेको पुस्तक 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOSE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOŠE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("होशे 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (ne)", ->
`
expect(p.parse("योएलको पुस्तक 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("yoel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("योएल 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("योएलको पुस्तक 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("YOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("योएल 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (ne)", ->
`
expect(p.parse("आमोसको पुस्तक 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("āmos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("अमोस 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("आमोस 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("आमोसको पुस्तक 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("ĀMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("अमोस 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("आमोस 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("ओबदियाको पुस्तक 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("obadiyā 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ओबदिया 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ओबदियाको पुस्तक 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYĀ 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ओबदिया 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book <NAME> (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("योनाको पुस्तक 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("yona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("yonā 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("योना 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("योनाको पुस्तक 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("YONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("YONĀ 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("योना 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (ne)", ->
`
expect(p.parse("मीकाको पुस्तक 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mikā 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mīka 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mīkā 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मिका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मीका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("मीकाको पुस्तक 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKĀ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MĪKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MĪKĀ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मिका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मीका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (ne)", ->
`
expect(p.parse("नहूमको पुस्तक 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("nahūm 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("नहूम 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("नहूमको पुस्तक 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHŪM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("नहूम 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (ne)", ->
`
expect(p.parse("हबकूकको पुस्तक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("habakūk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("हबकूक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("हबकूकको पुस्तक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKŪK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("हबकूक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (ne)", ->
`
expect(p.parse("सपन्याहको पुस्तक 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("sapanyah 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("sapanyāh 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("सपन्याह 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("सपन्याहको पुस्तक 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SAPANYAH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SAPANYĀH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("सपन्याह 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (ne)", ->
`
expect(p.parse("हाग्गैको पुस्तक 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("haggəi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("hāggəi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("हाग्गै 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("हाग्गैको पुस्तक 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGƏI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HĀGGƏI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("हाग्गै 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (ne)", ->
`
expect(p.parse("जकरियाको पुस्तक 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("jakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("jakariyā 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("जकरिया 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("जकरियाको पुस्तक 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("JAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("JAKARIYĀ 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("जकरिया 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (ne)", ->
`
expect(p.parse("मलाकीको पुस्तक 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malakī 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malāki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malākī 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलाकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("मलाकीको पुस्तक 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKĪ 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALĀKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALĀKĪ 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलाकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("mattile lekʰeko susmacar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmacār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmācar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmācār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmacar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmacār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmācar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmācār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले लेखेको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ति 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ती 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTILE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले लेखेको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ति 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ती 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (ne)", ->
`
expect(p.parse("markusle lekʰeko susmacar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmacār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmācar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmācār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmacar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmacār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmācar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmācār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले लेखेको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कुस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूश 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUSLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले लेखेको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कुस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूश 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book <NAME> (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("lukale lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले लेखेको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लुका 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूका 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले लेखेको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लुका 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूका 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book <NAME>John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>John (ne)", ->
`
expect(p.parse("yuhannako pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yuhannāko pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yūhannako pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yūhannāko pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("यूहन्नाको पहिलो पत्र 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yuhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yuhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yūhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yūhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yuhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yuhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yūhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yūhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNAKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YUHANNĀKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YŪHANNAKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YŪHANNĀKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("यूहन्नाको पहिलो पत्र 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YUHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YUHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YŪHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YŪHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YUHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YUHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YŪHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YŪHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (ne)", ->
`
expect(p.parse("यूहन्नाको दोस्त्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yuhannako dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yuhannāko dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yūhannako dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yūhannāko dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("यूहन्नाको दोस्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yuhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yuhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yūhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yūhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yuhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yuhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yūhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yūhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("यूहन्नाको दोस्त्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YUHANNAKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YUHANNĀKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YŪHANNAKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YŪHANNĀKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("यूहन्नाको दोस्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YUHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YUHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YŪHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YŪHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YUHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YUHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YŪHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YŪHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (ne)", ->
`
expect(p.parse("यूहन्नाको तेस्त्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yuhannako testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yuhannāko testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yūhannako testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yūhannāko testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("यूहन्नाको तेस्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yuhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yuhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yūhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yūhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yuhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yuhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yūhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yūhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("यूहन्नाको तेस्त्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YUHANNAKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YUHANNĀKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YŪHANNAKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YŪHANNĀKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("यूहन्नाको तेस्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YUHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YUHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YŪHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YŪHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YUHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YUHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YŪHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YŪHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book <NAME> (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("yuhannale lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले लेखेको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहान्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यहून्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("युहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले लेखेको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहान्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यहून्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("युहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (ne)", ->
`
expect(p.parse("preritharuka kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharuka kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharukā kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharukā kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūka kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūka kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūkā kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūkā kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरितहरूका काम 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरित 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("PRERITHARUKA KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKA KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKĀ KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKĀ KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKA KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKA KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKĀ KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKĀ KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरितहरूका काम 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरित 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (ne)", ->
`
expect(p.parse("रोमीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमी 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("रोमीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमी 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (ne)", ->
`
expect(p.parse("कोरिन्थीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("कोरिन्थीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (ne)", ->
`
expect(p.parse("कोरिन्थीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("कोरिन्थीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (ne)", ->
`
expect(p.parse("गलातीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलाती 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("गलातीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलाती 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (ne)", ->
`
expect(p.parse("एफिसीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसी 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("एफिसीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसी 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book <NAME> (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("फिलिप्पीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पी 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("फिलिप्पीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पी 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (ne)", ->
`
expect(p.parse("कलस्सीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सी 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("कलस्सीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सी 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (ne)", ->
`
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (ne)", ->
`
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book <NAME> (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("तिमोथीलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilai dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilaī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilāi dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilāī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlai dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlaī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlāi dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlāī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("तिमोथीलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("तिमोथीलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILAI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILĀI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("तिमोथीलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (ne)", ->
`
expect(p.parse("तिमोथीलाईर् पावलको पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilai pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilaī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilāi pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilāī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlai pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlaī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlāi pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlāī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("तिमोथीलाई पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("तिमोथीलाईर् पावलको पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILAI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILAĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILĀI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLAI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("तिमोथीलाई पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (ne)", ->
`
expect(p.parse("तीतसलाई पावलको पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslai patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslaī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāi patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslai patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslaī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāi patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslai 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslaī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslai 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslaī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतस 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("तीतसलाई पावलको पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतस 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (ne)", ->
`
expect(p.parse("फिलेमोनलाई पावलको पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlai patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlaī patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāi patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāī patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlai 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlaī 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāi 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāī 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोन 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("फिलेमोनलाई पावलको पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAI PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAĪ PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀI PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀĪ PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAĪ 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀĪ 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोन 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (ne)", ->
`
expect(p.parse("hibruharuko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharūko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharuko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharūko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति पत्र 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharuko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharūko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharuko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharūko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रू 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HIBRUHARUKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARŪKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARUKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARŪKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति पत्र 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARUKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARŪKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARUKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARŪKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रू 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (ne)", ->
`
expect(p.parse("yakubko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakūbko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākubko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākūbko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको पत्र 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakubko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakūbko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākubko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākūbko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूब 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("YAKUBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKŪBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKUBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKŪBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको पत्र 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKUBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKŪBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKUBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKŪBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूब 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (ne)", ->
`
expect(p.parse("पत्रुसको दोस्त्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("patrusko dostro patra 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("पत्रुसको दोस्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. patrusko 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 patrusko 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("पत्रुसको दोस्त्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("PATRUSKO DOSTRO PATRA 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("पत्रुसको दोस्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PATRUSKO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PATRUSKO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (ne)", ->
`
expect(p.parse("patrusko pahilo patra 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("पत्रुसको पहिलो पत्र 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. patrusko 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 patrusko 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("PATRUSKO PAHILO PATRA 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("पत्रुसको पहिलो पत्र 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PATRUSKO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PATRUSKO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book <NAME> (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (ne)", ->
`
expect(p.parse("yahudako patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudāko patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdako patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdāko patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको पत्र 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudako 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudāko 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdako 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdāko 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदा 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("YAHUDAKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDĀKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDAKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDĀKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको पत्र 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDAKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDĀKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDAKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDĀKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदा 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (ne)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (ne)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (ne)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (ne)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (ne)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (ne)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (ne)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (ne)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["ne"]
it "should handle ranges (ne)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (ne)", ->
expect(p.parse("Titus 1:1, chapter 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 CHAPTER 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (ne)", ->
expect(p.parse("Exod 1:1 verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (ne)", ->
expect(p.parse("Exod 1:1 and 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 AND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (ne)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (ne)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (ne)", ->
expect(p.parse("Lev 1 (ERV)").osis_and_translations()).toEqual [["Lev.1", "ERV"]]
expect(p.parse("lev 1 erv").osis_and_translations()).toEqual [["Lev.1", "ERV"]]
it "should handle book ranges (ne)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 - 3 yuhannako").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yuhannāko").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yūhannako").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yūhannāko").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (ne)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| true | bcv_parser = require("../../js/ne_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PImos","PI:NAME:<NAME>END_PIad","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PIah","Hab","Zeph","Hag","Zech","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (ne)", ->
`
expect(p.parse("उत्पत्तिको पुस्तक 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("utpattiko pustak 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्तिको 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("utpattiko 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्ति 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("उत्पत्तिको पुस्तक 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("UTPATTIKO PUSTAK 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्तिको 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("UTPATTIKO 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("उत्पत्ति 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (ne)", ->
`
expect(p.parse("prastʰanko pustak 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰānko pustak 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको पुस्तक 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰanko 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("prastʰānko 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थान 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("PRASTʰANKO PUSTAK 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰĀNKO PUSTAK 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको पुस्तक 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰANKO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("PRASTʰĀNKO 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थानको 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("प्रस्थान 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (ne)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (ne)", ->
`
expect(p.parse("leviharuko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharūko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharuko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharūko pustak 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको पुस्तक 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharuko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("leviharūko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharuko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("levīharūko 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवि 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवी 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVIHARUKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARŪKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARUKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARŪKO PUSTAK 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको पुस्तक 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARUKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVIHARŪKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARUKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVĪHARŪKO 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवीहरूको 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवि 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("लेवी 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (ne)", ->
`
expect(p.parse("gantiko pustak 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantīko pustak 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको पुस्तक 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantiko 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("gantīko 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्ती 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("GANTIKO PUSTAK 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTĪKO PUSTAK 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको पुस्तक 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTIKO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("GANTĪKO 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्तीको 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("गन्ती 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (ne)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (ne)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (ne)", ->
`
expect(p.parse("yarmiyako vilap 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyako vilāp 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyāko vilap 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("yarmiyāko vilāp 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("यर्मियाको विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("YARMIYAKO VILAP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYAKO VILĀP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYĀKO VILAP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("YARMIYĀKO VILĀP 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("यर्मियाको विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("विलाप 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (ne)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (ne)", ->
`
expect(p.parse("yuhannalai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannalāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yuhannālāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannalāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālai bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālaī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāi bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakas 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakaš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakās 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("yūhannālāī bʰaeko prakāš 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("यूहन्नालाई भएको प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNALAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNALĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YUHANNĀLĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNALĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLAĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀI BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKAS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKAŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKĀS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("YŪHANNĀLĀĪ BʰAEKO PRAKĀŠ 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("यूहन्नालाई भएको प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("प्रकाश 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (ne)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (ne)", ->
`
expect(p.parse("vyavastʰako pustak 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰāko pustak 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको पुस्तक 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰako 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("vyavastʰāko 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यावस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("VYAVASTʰAKO PUSTAK 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰĀKO PUSTAK 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको पुस्तक 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰAKO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VYAVASTʰĀKO 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्थाको 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यावस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("व्यवस्था 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("yahosuko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosūko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošuko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošūko pustak 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको पुस्तक 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosuko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahosūko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošuko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("yahošūko 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशू 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("YAHOSUKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSŪKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠUKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠŪKO PUSTAK 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको पुस्तक 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSUKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOSŪKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠUKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("YAHOŠŪKO 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशूको 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("यहोशू 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (ne)", ->
`
expect(p.parse("nyayakarttaharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharuko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharūko pustak 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttaharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyayakarttāharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttaharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharuko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("nyāyakarttāharūko 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ता 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("NYAYAKARTTAHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARUKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARŪKO PUSTAK 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ताहरूको पुस्तक 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTAHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYAYAKARTTĀHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTAHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARUKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("NYĀYAKARTTĀHARŪKO 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्त्ताहरूको 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("न्यायकर्ता 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (ne)", ->
`
expect(p.parse("rutʰko pustak 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rūtʰko pustak 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको पुस्तक 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rutʰko 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("rūtʰko 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथ 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTʰKO PUSTAK 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RŪTʰKO PUSTAK 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको पुस्तक 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTʰKO 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RŪTʰKO 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथको 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("रूथ 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (ne)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (ne)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (ne)", ->
`
expect(p.parse("yasəiyako pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyāko pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyako pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyāko pustak 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको पुस्तक 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyako 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yasəiyāko 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyako 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("yašəiyāko 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("येशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("YASƏIYAKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYĀKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYAKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYĀKO PUSTAK 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको पुस्तक 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYAKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YASƏIYĀKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYAKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("YAŠƏIYĀKO 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैयाको 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("येशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("यशैया 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (ne)", ->
`
expect(p.parse("शमूएलको दोस्रो पुस्तक 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. samuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. samūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. šamuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. šamūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 samuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 samūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 šamuelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 šamūelko 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("शमूएलको दोस्रो पुस्तक 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. ŠAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. ŠAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 ŠAMUELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 ŠAMŪELKO 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएलको 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शामुएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 शमूएल 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (ne)", ->
`
expect(p.parse("शमूएलको पहिलो पुस्तक 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("samuelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("samūelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("šamuelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("šamūelko pustak 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. samuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. samūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. šamuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. šamūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 samuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 samūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 šamuelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 šamūelko 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("शमूएलको पहिलो पुस्तक 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMUELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("SAMŪELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ŠAMUELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ŠAMŪELKO PUSTAK 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. ŠAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. ŠAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 ŠAMUELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 ŠAMŪELKO 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएलको 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शामुएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 शमूएल 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (ne)", ->
`
expect(p.parse("राजाहरूको दोस्रो पुस्तक 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. radzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. rādzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 radzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzaharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzaharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzāharuko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 rādzāharūko 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("राजाहरूको दोस्रो पुस्तक 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RADZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. RĀDZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RADZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZAHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZAHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZĀHARUKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 RĀDZĀHARŪKO 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजाहरूको 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 राजा 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (ne)", ->
`
expect(p.parse("राजाहरूक पहिल पुस्तक 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzaharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzaharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzāharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("radzāharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzaharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzaharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzāharuko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("rādzāharūko pustak 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. radzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. rādzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 radzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzaharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzaharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzāharuko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 rādzāharūko 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("राजाहरूक पहिल पुस्तक 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZAHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZAHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZĀHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RADZĀHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZAHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZAHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZĀHARUKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("RĀDZĀHARŪKO PUSTAK 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RADZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. RĀDZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RADZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZAHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZAHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZĀHARUKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 RĀDZĀHARŪKO 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजाहरूको 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 राजा 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (ne)", ->
`
expect(p.parse("इतिहासको दोस्रो पुस्तक 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. itihasko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. itihāsko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 itihasko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 itihāsko 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("इतिहासको दोस्रो पुस्तक 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. ITIHASKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. ITIHĀSKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 ITIHASKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 ITIHĀSKO 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहासको 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 इतिहास 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (ne)", ->
`
expect(p.parse("इतिहासको पहिलो पुस्तक 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("itihasko pustak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("itihāsko pustak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. itihasko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. itihāsko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 itihasko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 itihāsko 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("इतिहासको पहिलो पुस्तक 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ITIHASKO PUSTAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ITIHĀSKO PUSTAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. ITIHASKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. ITIHĀSKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 ITIHASKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 ITIHĀSKO 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहासको 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 इतिहास 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (ne)", ->
`
expect(p.parse("एज्राको पुस्तक 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्राको 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("edzrako 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("edzrāko 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्रा 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("एज्राको पुस्तक 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्राको 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EDZRAKO 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EDZRĀKO 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("एज्रा 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (ne)", ->
`
expect(p.parse("nahemyahko pustak 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyāhko pustak 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको पुस्तक 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyahko 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("nahemyāhko 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याह 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHEMYAHKO PUSTAK 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYĀHKO PUSTAK 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको पुस्तक 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYAHKO 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NAHEMYĀHKO 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याहको 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("नहेम्याह 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (ne)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (ne)", ->
`
expect(p.parse("estarko pustak 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको पुस्तक 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("estarko 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तर 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTARKO PUSTAK 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको पुस्तक 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTARKO 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तरको 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("एस्तर 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (ne)", ->
`
expect(p.parse("अय्यूबको पुस्तक 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyubko pustak 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyūbko pustak 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूबको 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyubko 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("ayyūbko 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूब 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("अय्यूबको पुस्तक 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYUBKO PUSTAK 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYŪBKO PUSTAK 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूबको 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYUBKO 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("AYYŪBKO 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("अय्यूब 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (ne)", ->
`
expect(p.parse("bʰadzansamgrah 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("bʰadzansaṃgrah 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसंग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजन 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("BʰADZANSAMGRAH 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("BʰADZANSAṂGRAH 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसंग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजनसग्रह 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("भजन 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (ne)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (ne)", ->
`
expect(p.parse("hitopadesko pustak 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadeško pustak 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको पुस्तक 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadesko 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("hitopadeško 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेश 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("HITOPADESKO PUSTAK 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADEŠKO PUSTAK 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको पुस्तक 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADESKO 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("HITOPADEŠKO 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेशको 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("हितोपदेश 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (ne)", ->
`
expect(p.parse("upadesakko pustak 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadešakko pustak 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको पुस्तक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadesakko 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("upadešakko 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("UPADESAKKO PUSTAK 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADEŠAKKO PUSTAK 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको पुस्तक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADESAKKO 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("UPADEŠAKKO 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशकको 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("उपदेशक 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (ne)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (ne)", ->
`
expect(p.parse("sulemanko srestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko srestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko sreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šrestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šrestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemanko šreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko srestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko srestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko sreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šrestʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šrestʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šresṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šresṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣtʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣtʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣṭʰagit 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("sulemānko šreṣṭʰagīt 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("सुलेमानको श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("SULEMANKO SRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO SREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMANKO ŠREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO SREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠRESṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢTʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢTʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢṬʰAGIT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SULEMĀNKO ŠREṢṬʰAGĪT 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("सुलेमानको श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("श्रेष्ठगीत 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("yarmiyako pustak 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyāko pustak 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको पुस्तक 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyako 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("yarmiyāko 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मिया 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("YARMIYAKO PUSTAK 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYĀKO PUSTAK 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको पुस्तक 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYAKO 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("YARMIYĀKO 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मियाको 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("यर्मिया 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (ne)", ->
`
expect(p.parse("idzakielko pustak 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको पुस्तक 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("idzakielko 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएल 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("IDZAKIELKO PUSTAK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको पुस्तक 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("IDZAKIELKO 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएलको 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("इजकिएल 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("daniyalko pustak 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("dāniyalko pustak 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको पुस्तक 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("daniyalko 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("dāniyalko 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियल 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIYALKO PUSTAK 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DĀNIYALKO PUSTAK 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको पुस्तक 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIYALKO 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DĀNIYALKO 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियलको 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("दानियल 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (ne)", ->
`
expect(p.parse("होशेको पुस्तक 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("hose 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("hoše 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("होशे 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("होशेको पुस्तक 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOSE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOŠE 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("होशे 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (ne)", ->
`
expect(p.parse("योएलको पुस्तक 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("yoel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("योएल 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("योएलको पुस्तक 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("YOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("योएल 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (ne)", ->
`
expect(p.parse("आमोसको पुस्तक 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("āmos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("अमोस 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("आमोस 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("आमोसको पुस्तक 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("ĀMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("अमोस 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("आमोस 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("ओबदियाको पुस्तक 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("obadiyā 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ओबदिया 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("ओबदियाको पुस्तक 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYĀ 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("ओबदिया 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("योनाको पुस्तक 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("yona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("yonā 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("योना 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("योनाको पुस्तक 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("YONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("YONĀ 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("योना 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (ne)", ->
`
expect(p.parse("मीकाको पुस्तक 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mikā 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mīka 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("mīkā 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मिका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मीका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("मीकाको पुस्तक 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKĀ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MĪKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MĪKĀ 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मिका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("मीका 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (ne)", ->
`
expect(p.parse("नहूमको पुस्तक 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("nahūm 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("नहूम 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("नहूमको पुस्तक 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHŪM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("नहूम 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (ne)", ->
`
expect(p.parse("हबकूकको पुस्तक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("habakūk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("हबकूक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("हबकूकको पुस्तक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKŪK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("हबकूक 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (ne)", ->
`
expect(p.parse("सपन्याहको पुस्तक 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("sapanyah 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("sapanyāh 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("सपन्याह 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("सपन्याहको पुस्तक 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SAPANYAH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SAPANYĀH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("सपन्याह 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (ne)", ->
`
expect(p.parse("हाग्गैको पुस्तक 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("haggəi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("hāggəi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("हाग्गै 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("हाग्गैको पुस्तक 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGƏI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HĀGGƏI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("हाग्गै 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (ne)", ->
`
expect(p.parse("जकरियाको पुस्तक 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("jakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("jakariyā 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("जकरिया 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("जकरियाको पुस्तक 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("JAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("JAKARIYĀ 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("जकरिया 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (ne)", ->
`
expect(p.parse("मलाकीको पुस्तक 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malakī 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malāki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("malākī 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलाकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("मलाकीको पुस्तक 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKĪ 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALĀKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALĀKĪ 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलाकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("मलकी 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("mattile lekʰeko susmacar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmacār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmācar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile lekʰeko susmācār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmacar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmacār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmācar 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle lekʰeko susmācār 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले लेखेको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattile 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("mattīle 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ति 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ती 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTILE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले लेखेको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीको सुसमाचार 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTILE 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTĪLE 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्तीले 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ति 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("मत्ती 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (ne)", ->
`
expect(p.parse("markusle lekʰeko susmacar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmacār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmācar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle lekʰeko susmācār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmacar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmacār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmācar 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle lekʰeko susmācār 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले लेखेको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markusle 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("markūsle 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कुस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूश 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUSLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले लेखेको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसको सुसमाचार 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUSLE 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKŪSLE 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूसले 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कुस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूश 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("मर्कूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकूस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("र्मकस 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("lukale lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmacar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmacār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmācar 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle lekʰeko susmācār 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले लेखेको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukale 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lukāle 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkale 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("lūkāle 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लुका 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूका 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले लेखेको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाको सुसमाचार 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKALE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKĀLE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKALE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LŪKĀLE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूकाले 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लुका 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("लूका 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIJohn (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIJohn (ne)", ->
`
expect(p.parse("yuhannako pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yuhannāko pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yūhannako pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("yūhannāko pahilo patra 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("यूहन्नाको पहिलो पत्र 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yuhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yuhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yūhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. yūhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yuhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yuhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yūhannako 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 yūhannāko 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNAKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YUHANNĀKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YŪHANNAKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("YŪHANNĀKO PAHILO PATRA 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("यूहन्नाको पहिलो पत्र 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YUHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YUHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YŪHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. YŪHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YUHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YUHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YŪHANNAKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 YŪHANNĀKO 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्नाको 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 यूहन्ना 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (ne)", ->
`
expect(p.parse("यूहन्नाको दोस्त्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yuhannako dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yuhannāko dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yūhannako dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("yūhannāko dostro patra 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("यूहन्नाको दोस्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yuhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yuhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yūhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. yūhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yuhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yuhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yūhannako 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 yūhannāko 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("यूहन्नाको दोस्त्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YUHANNAKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YUHANNĀKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YŪHANNAKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("YŪHANNĀKO DOSTRO PATRA 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("यूहन्नाको दोस्रो पत्र 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YUHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YUHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YŪHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. YŪHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YUHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YUHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YŪHANNAKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 YŪHANNĀKO 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्नाको 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 यूहन्ना 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (ne)", ->
`
expect(p.parse("यूहन्नाको तेस्त्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yuhannako testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yuhannāko testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yūhannako testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("yūhannāko testro patra 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("यूहन्नाको तेस्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yuhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yuhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yūhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. yūhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yuhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yuhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yūhannako 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 yūhannāko 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("यूहन्नाको तेस्त्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YUHANNAKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YUHANNĀKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YŪHANNAKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("YŪHANNĀKO TESTRO PATRA 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("यूहन्नाको तेस्रो पत्र 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YUHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YUHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YŪHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. YŪHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YUHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YUHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YŪHANNAKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 YŪHANNĀKO 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्नाको 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 यूहन्ना 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("yuhannale lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmacar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmacār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmācar 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle lekʰeko susmācār 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले लेखेको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannale 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yuhannāle 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannale 1:1").osis()).toEqual("John.1.1")
expect(p.parse("yūhannāle 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहान्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यहून्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("युहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("YUHANNALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMACAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMACĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMĀCAR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE LEKʰEKO SUSMĀCĀR 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले लेखेको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाको सुसमाचार 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNALE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YUHANNĀLE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNALE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("YŪHANNĀLE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्नाले 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहान्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यहून्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("युहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहन्ना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("यूहना 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (ne)", ->
`
expect(p.parse("preritharuka kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharuka kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharukā kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharukā kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūka kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūka kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūkā kam 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("preritharūkā kām 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरितहरूका काम 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरित 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("PRERITHARUKA KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKA KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKĀ KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARUKĀ KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKA KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKA KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKĀ KAM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("PRERITHARŪKĀ KĀM 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरितहरूका काम 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("प्रेरित 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (ne)", ->
`
expect(p.parse("रोमीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlai patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlaī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāi patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāī patra 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharulāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romiharūlāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharulāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlai 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlaī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāi 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("romīharūlāī 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमी 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("रोमीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀI PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई पत्र 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARULĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMIHARŪLĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARULĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLAĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀI 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMĪHARŪLĀĪ 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमीहरूलाई 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("रोमी 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (ne)", ->
`
expect(p.parse("कोरिन्थीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharulāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰiharūlāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharulāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlai dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlaī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlāi dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("korintʰīharūlāī dostro patra 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰiharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. korintʰīharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰiharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharulāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlai 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlaī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlāi 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 korintʰīharūlāī 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("कोरिन्थीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLAI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थीहरूलाई 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 कोरिन्थी 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (ne)", ->
`
expect(p.parse("कोरिन्थीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharulāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰiharūlāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharulāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlai pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlaī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlāi pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("korintʰīharūlāī pahilo patra 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰiharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. korintʰīharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰiharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharulāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlai 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlaī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlāi 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 korintʰīharūlāī 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("कोरिन्थीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰIHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("KORINTʰĪHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("कोरिन्थीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰIHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARULĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLAI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLAĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLĀI 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTʰĪHARŪLĀĪ 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थीहरूलाई 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 कोरिन्थी 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (ne)", ->
`
expect(p.parse("गलातीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlai patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlaī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāi patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāī patra 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatiharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galatīharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātiharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharulāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlai 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlaī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāi 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("galātīharūlāī 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलाती 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("गलातीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀI PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई पत्र 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATIHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATĪHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTIHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARULĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLAĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀI 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALĀTĪHARŪLĀĪ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलातीहरूलाई 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("गलाती 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (ne)", ->
`
expect(p.parse("एफिसीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlai patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlaī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāi patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāī patra 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharulāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisiharūlāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharulāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlai 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlaī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāi 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("epʰisīharūlāī 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसी 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("एफिसीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀI PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई पत्र 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARULĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISIHARŪLĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARULĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLAĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀI 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPʰISĪHARŪLĀĪ 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसीहरूलाई 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("एफिसी 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("फिलिप्पीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlai patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlaī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāi patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāī patra 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharulāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippiharūlāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharulāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlai 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlaī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("pʰilippīharūlāī 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पी 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("फिलिप्पीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀI PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई पत्र 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARULĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPIHARŪLĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARULĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLAĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PʰILIPPĪHARŪLĀĪ 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पीहरूलाई 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("फिलिप्पी 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (ne)", ->
`
expect(p.parse("कलस्सीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlai patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlaī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāi patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāī patra 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharulāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassiharūlāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharulāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlai 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlaī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāi 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("kalassīharūlāī 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सी 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("कलस्सीहरूलाई पावलको पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀI PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀĪ PATRA 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई पत्र 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARULĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSIHARŪLĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARULĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLAĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀI 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KALASSĪHARŪLĀĪ 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सीहरूलाई 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("कलस्सी 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (ne)", ->
`
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharulāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikiharūlāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharulāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlai dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlaī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlāi dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("tʰissalonikīharūlāī dostro patra 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikiharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. tʰissalonikīharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikiharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharulāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlai 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlaī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlāi 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 tʰissalonikīharūlāī 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 थिस्सलोनिकी 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (ne)", ->
`
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharulāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikiharūlāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharulāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlai pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlaī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlāi pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("tʰissalonikīharūlāī pahilo patra 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikiharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. tʰissalonikīharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikiharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharulāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlai 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlaī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlāi 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 tʰissalonikīharūlāī 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("थिस्सलोनिकीहरूलाई पावलको पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKIHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARULĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("TʰISSALONIKĪHARŪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("थिस्सलोनिकीहरूलाई पहिलो पत्र 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKIHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARULĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLAI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLAĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLĀI 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TʰISSALONIKĪHARŪLĀĪ 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकीहरूलाई 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 थिस्सलोनिकी 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("तिमोथीलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilai dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilaī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilāi dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰilāī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlai dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlaī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlāi dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("timotʰīlāī dostro patra 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("तिमोथीलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰilāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. timotʰīlāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰilāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlai 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlaī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlāi 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 timotʰīlāī 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("तिमोथीलाई पावलको दोस्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILAI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILĀI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰILĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLAI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLAĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLĀI DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TIMOTʰĪLĀĪ DOSTRO PATRA 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("तिमोथीलाई दोस्त्रो पत्र 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰILĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTʰĪLĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰILĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLAI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLAĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLĀI 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTʰĪLĀĪ 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथीलाई 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 तिमोथी 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (ne)", ->
`
expect(p.parse("तिमोथीलाईर् पावलको पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilai pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilaī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilāi pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰilāī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlai pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlaī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlāi pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("timotʰīlāī pahilo patra 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("तिमोथीलाई पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰilāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. timotʰīlāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰilāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlai 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlaī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlāi 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 timotʰīlāī 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("तिमोथीलाईर् पावलको पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILAI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILAĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILĀI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰILĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLAI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLAĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLĀI PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("TIMOTʰĪLĀĪ PAHILO PATRA 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("तिमोथीलाई पहिलो पत्र 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰILĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTʰĪLĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰILĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLAI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLAĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLĀI 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTʰĪLĀĪ 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथीलाई 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 तिमोथी 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (ne)", ->
`
expect(p.parse("तीतसलाई पावलको पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslai patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslaī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāi patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslai patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslaī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāi patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāī patra 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslai 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslaī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("titaslāī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslai 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslaī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāi 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("tītaslāī 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतस 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("तीतसलाई पावलको पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀI PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀĪ PATRA 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई पत्र 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLAĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITASLĀĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLAĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀI 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TĪTASLĀĪ 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतसलाई 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("तीतस 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (ne)", ->
`
expect(p.parse("फिलेमोनलाई पावलको पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlai patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlaī patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāi patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāī patra 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlai 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlaī 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāi 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("pʰilemonlāī 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोन 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("फिलेमोनलाई पावलको पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAI PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAĪ PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀI PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀĪ PATRA 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई पत्र 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLAĪ 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PʰILEMONLĀĪ 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोनलाई 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("फिलेमोन 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (ne)", ->
`
expect(p.parse("hibruharuko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharūko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharuko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharūko nimti patra 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति पत्र 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharuko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibruharūko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharuko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("hibrūharūko nimti 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रू 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HIBRUHARUKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARŪKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARUKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARŪKO NIMTI PATRA 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति पत्र 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रूहरूको निम्ति 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARUKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRUHARŪKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARUKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HIBRŪHARŪKO NIMTI 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("हिब्रू 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (ne)", ->
`
expect(p.parse("yakubko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakūbko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākubko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākūbko patra 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको पत्र 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakubko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yakūbko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākubko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("yākūbko 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूब 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("YAKUBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKŪBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKUBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKŪBKO PATRA 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको पत्र 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKUBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YAKŪBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKUBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("YĀKŪBKO 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूबको 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("याकूब 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (ne)", ->
`
expect(p.parse("पत्रुसको दोस्त्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("patrusko dostro patra 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("पत्रुसको दोस्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. patrusko 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 patrusko 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("पत्रुसको दोस्त्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("PATRUSKO DOSTRO PATRA 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("पत्रुसको दोस्रो पत्र 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PATRUSKO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PATRUSKO 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुसको 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 पत्रुस 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (ne)", ->
`
expect(p.parse("patrusko pahilo patra 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("पत्रुसको पहिलो पत्र 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. patrusko 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 patrusko 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("PATRUSKO PAHILO PATRA 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("पत्रुसको पहिलो पत्र 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PATRUSKO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PATRUSKO 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुसको 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 पत्रुस 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (ne)", ->
`
expect(p.parse("yahudako patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudāko patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdako patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdāko patra 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको पत्र 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudako 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahudāko 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdako 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("yahūdāko 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदा 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("YAHUDAKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDĀKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDAKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDĀKO PATRA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको पत्र 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDAKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHUDĀKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDAKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("YAHŪDĀKO 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदाको 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("यहूदा 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (ne)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (ne)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (ne)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (ne)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (ne)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (ne)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (ne)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (ne)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (ne)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["ne"]
it "should handle ranges (ne)", ->
expect(p.parse("Titus 1:1 - 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1-2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 - 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (ne)", ->
expect(p.parse("Titus 1:1, chapter 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 CHAPTER 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (ne)", ->
expect(p.parse("Exod 1:1 verse 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERSE 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (ne)", ->
expect(p.parse("Exod 1:1 and 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 AND 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (ne)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (ne)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (ne)", ->
expect(p.parse("Lev 1 (ERV)").osis_and_translations()).toEqual [["Lev.1", "ERV"]]
expect(p.parse("lev 1 erv").osis_and_translations()).toEqual [["Lev.1", "ERV"]]
it "should handle book ranges (ne)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 - 3 yuhannako").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yuhannāko").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yūhannako").osis()).toEqual "1John.1-3John.1"
expect(p.parse("1 - 3 yūhannāko").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (ne)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
|
[
{
"context": ".createHash('sha1')\n user = req.body\n password = sha1.update(user.password)\n crypto.randomBytes 12, (ex, chro",
"end": 577,
"score": 0.9412283897399902,
"start": 566,
"tag": "PASSWORD",
"value": "sha1.update"
},
{
"context": "')\n user = req.body\n password = sh... | src/server/frontend.coffee | streed/hydraconfig | 0 | log4js = require 'log4js'
app = require('./app').app
crypto = require 'crypto'
Q = require 'q'
_ = require 'underscore'
LOG = log4js.getLogger 'frontend'
checkAuth = (req, res, next) ->
if req.user
return next()
res.redirect "/login"
app.get '/', (req, res) ->
if req.user
res.redirect '/configs'
else
res.render 'index'
app.get '/register', (req, res) ->
res.render 'register',
error: req.query.error
message: req.query.message
app.post '/register', (req, res) ->
sha1 = crypto.createHash('sha1')
user = req.body
password = sha1.update(user.password)
crypto.randomBytes 12, (ex, chroot) ->
app.db.User.create(
email: user.email
password: password.digest('hex')
firstName: user.firstName
lastName: user.lastName
zkChroot: app.zoo.zkBase + chroot.toString('hex') + "/"
).success( (user) ->
crypto.randomBytes 12, (ex, buf) ->
clientId = buf.toString 'hex'
crypto.randomBytes 24, (ex, buf2) ->
clientSecret = buf2.toString 'hex'
app.db.OauthClient.create({
clientId: clientId
clientSecret: clientSecret
type: "internal"
}).success (oauthClient) ->
oauthClient.setUser(user).success (client) ->
app.zoo.create user.zkChroot.slice(0, -1), new Buffer(JSON.stringify({"total": 0, "configs": {}})), (err, stat) ->
if err
LOG.error err
if stat
res.redirect '/login'
else
LOG.error stat
).error( (errors) ->
if errors.code == 'ER_DUP_ENTRY'
res.redirect '/register?error=Could not Register&message=Are you already registered?'
)
app.get '/login', (req, res) ->
if req.user
res.redirect '/configs'
res.render 'login',
error: req.query.error
message: req.query.message
app.post '/login', app.passport.authenticate('local', {successRedirect: '/configs', failureRedirect: '/login?error=Could not Login&message=Email or Password were invalid.'})
app.get '/logout', checkAuth, (req, res) ->
app.db.AccessToken.destroy({accessToken: req.session.accessToken, session: true}).success () ->
req.logout()
req.session.accessToken = null
res.redirect '/login'
app.get '/configs', checkAuth, (req, res) ->
res.render 'configs',
error: req.query.error
message: req.query.message
app.post '/configs', checkAuth, (req, res) ->
if not /^[a-z0-9]+(-[a-z0-9]+)*$/ig.test req.body.configName
res.redirect '/configs?error=' + req.body.configName + '&message=Config name must follow the following format: ^[a-z0-9]+(-[a-z0-9]+)*$'
else
LOG.info "Checking if " + req.user.zkChroot + req.body.configName + " exists"
app.zoo.exists req.user.zkChroot + req.body.configName, (err, stat) ->
if err
LOG.error err.stack
if stat
LOG.error req.body.configName + " exists"
else
LOG.info "Creating " + req.user.zkChroot + req.body.configName + "'"
data = JSON.stringify
name: req.body.configName
conf: []
app.zoo.create req.user.zkChroot + req.body.configName, new Buffer(data), (err, path) ->
if err
LOG.error err.stack
if path
LOG.info "Created new config '/configs/" + req.body.configName + "'"
res.redirect '/configs/' + req.body.configName
else
LOG.error "Could not create the path...for some reason."
res.redirect '/configs?error=' + req.body.configName + '&message=Could not create or exists already'
app.get '/configs/:config', checkAuth, (req, res) ->
if /^[a-z0-9]+(-[a-z0-9]+)*$/ig.test req.params.config
res.render 'view',
config: req.params.config
else
res.status(500).end()
app.get '/access', checkAuth, (req, res) ->
res.render 'access',
user: req.user
| 83451 | log4js = require 'log4js'
app = require('./app').app
crypto = require 'crypto'
Q = require 'q'
_ = require 'underscore'
LOG = log4js.getLogger 'frontend'
checkAuth = (req, res, next) ->
if req.user
return next()
res.redirect "/login"
app.get '/', (req, res) ->
if req.user
res.redirect '/configs'
else
res.render 'index'
app.get '/register', (req, res) ->
res.render 'register',
error: req.query.error
message: req.query.message
app.post '/register', (req, res) ->
sha1 = crypto.createHash('sha1')
user = req.body
password = <PASSWORD>(user.<PASSWORD>)
crypto.randomBytes 12, (ex, chroot) ->
app.db.User.create(
email: user.email
password: <PASSWORD>')
firstName: user.firstName
lastName: user.lastName
zkChroot: app.zoo.zkBase + chroot.toString('hex') + "/"
).success( (user) ->
crypto.randomBytes 12, (ex, buf) ->
clientId = buf.toString 'hex'
crypto.randomBytes 24, (ex, buf2) ->
clientSecret = buf2.toString 'hex'
app.db.OauthClient.create({
clientId: clientId
clientSecret: clientSecret
type: "internal"
}).success (oauthClient) ->
oauthClient.setUser(user).success (client) ->
app.zoo.create user.zkChroot.slice(0, -1), new Buffer(JSON.stringify({"total": 0, "configs": {}})), (err, stat) ->
if err
LOG.error err
if stat
res.redirect '/login'
else
LOG.error stat
).error( (errors) ->
if errors.code == 'ER_DUP_ENTRY'
res.redirect '/register?error=Could not Register&message=Are you already registered?'
)
app.get '/login', (req, res) ->
if req.user
res.redirect '/configs'
res.render 'login',
error: req.query.error
message: req.query.message
app.post '/login', app.passport.authenticate('local', {successRedirect: '/configs', failureRedirect: '/login?error=Could not Login&message=Email or Password were invalid.'})
app.get '/logout', checkAuth, (req, res) ->
app.db.AccessToken.destroy({accessToken: req.session.accessToken, session: true}).success () ->
req.logout()
req.session.accessToken = null
res.redirect '/login'
app.get '/configs', checkAuth, (req, res) ->
res.render 'configs',
error: req.query.error
message: req.query.message
app.post '/configs', checkAuth, (req, res) ->
if not /^[a-z0-9]+(-[a-z0-9]+)*$/ig.test req.body.configName
res.redirect '/configs?error=' + req.body.configName + '&message=Config name must follow the following format: ^[a-z0-9]+(-[a-z0-9]+)*$'
else
LOG.info "Checking if " + req.user.zkChroot + req.body.configName + " exists"
app.zoo.exists req.user.zkChroot + req.body.configName, (err, stat) ->
if err
LOG.error err.stack
if stat
LOG.error req.body.configName + " exists"
else
LOG.info "Creating " + req.user.zkChroot + req.body.configName + "'"
data = JSON.stringify
name: req.body.configName
conf: []
app.zoo.create req.user.zkChroot + req.body.configName, new Buffer(data), (err, path) ->
if err
LOG.error err.stack
if path
LOG.info "Created new config '/configs/" + req.body.configName + "'"
res.redirect '/configs/' + req.body.configName
else
LOG.error "Could not create the path...for some reason."
res.redirect '/configs?error=' + req.body.configName + '&message=Could not create or exists already'
app.get '/configs/:config', checkAuth, (req, res) ->
if /^[a-z0-9]+(-[a-z0-9]+)*$/ig.test req.params.config
res.render 'view',
config: req.params.config
else
res.status(500).end()
app.get '/access', checkAuth, (req, res) ->
res.render 'access',
user: req.user
| true | log4js = require 'log4js'
app = require('./app').app
crypto = require 'crypto'
Q = require 'q'
_ = require 'underscore'
LOG = log4js.getLogger 'frontend'
checkAuth = (req, res, next) ->
if req.user
return next()
res.redirect "/login"
app.get '/', (req, res) ->
if req.user
res.redirect '/configs'
else
res.render 'index'
app.get '/register', (req, res) ->
res.render 'register',
error: req.query.error
message: req.query.message
app.post '/register', (req, res) ->
sha1 = crypto.createHash('sha1')
user = req.body
password = PI:PASSWORD:<PASSWORD>END_PI(user.PI:PASSWORD:<PASSWORD>END_PI)
crypto.randomBytes 12, (ex, chroot) ->
app.db.User.create(
email: user.email
password: PI:PASSWORD:<PASSWORD>END_PI')
firstName: user.firstName
lastName: user.lastName
zkChroot: app.zoo.zkBase + chroot.toString('hex') + "/"
).success( (user) ->
crypto.randomBytes 12, (ex, buf) ->
clientId = buf.toString 'hex'
crypto.randomBytes 24, (ex, buf2) ->
clientSecret = buf2.toString 'hex'
app.db.OauthClient.create({
clientId: clientId
clientSecret: clientSecret
type: "internal"
}).success (oauthClient) ->
oauthClient.setUser(user).success (client) ->
app.zoo.create user.zkChroot.slice(0, -1), new Buffer(JSON.stringify({"total": 0, "configs": {}})), (err, stat) ->
if err
LOG.error err
if stat
res.redirect '/login'
else
LOG.error stat
).error( (errors) ->
if errors.code == 'ER_DUP_ENTRY'
res.redirect '/register?error=Could not Register&message=Are you already registered?'
)
app.get '/login', (req, res) ->
if req.user
res.redirect '/configs'
res.render 'login',
error: req.query.error
message: req.query.message
app.post '/login', app.passport.authenticate('local', {successRedirect: '/configs', failureRedirect: '/login?error=Could not Login&message=Email or Password were invalid.'})
app.get '/logout', checkAuth, (req, res) ->
app.db.AccessToken.destroy({accessToken: req.session.accessToken, session: true}).success () ->
req.logout()
req.session.accessToken = null
res.redirect '/login'
app.get '/configs', checkAuth, (req, res) ->
res.render 'configs',
error: req.query.error
message: req.query.message
app.post '/configs', checkAuth, (req, res) ->
if not /^[a-z0-9]+(-[a-z0-9]+)*$/ig.test req.body.configName
res.redirect '/configs?error=' + req.body.configName + '&message=Config name must follow the following format: ^[a-z0-9]+(-[a-z0-9]+)*$'
else
LOG.info "Checking if " + req.user.zkChroot + req.body.configName + " exists"
app.zoo.exists req.user.zkChroot + req.body.configName, (err, stat) ->
if err
LOG.error err.stack
if stat
LOG.error req.body.configName + " exists"
else
LOG.info "Creating " + req.user.zkChroot + req.body.configName + "'"
data = JSON.stringify
name: req.body.configName
conf: []
app.zoo.create req.user.zkChroot + req.body.configName, new Buffer(data), (err, path) ->
if err
LOG.error err.stack
if path
LOG.info "Created new config '/configs/" + req.body.configName + "'"
res.redirect '/configs/' + req.body.configName
else
LOG.error "Could not create the path...for some reason."
res.redirect '/configs?error=' + req.body.configName + '&message=Could not create or exists already'
app.get '/configs/:config', checkAuth, (req, res) ->
if /^[a-z0-9]+(-[a-z0-9]+)*$/ig.test req.params.config
res.render 'view',
config: req.params.config
else
res.status(500).end()
app.get '/access', checkAuth, (req, res) ->
res.render 'access',
user: req.user
|
[
{
"context": "erleaf.com\"\n\t\t\t\t\t\tuser: \"sharelatex\"\n\t\t\t\t\t\tpass: \"password\"\n\t\t\t\toverleaf:\n\t\t\t\t\thost: @v1Url\n\t\t\t\"uuid\":v4:=>@",
"end": 2297,
"score": 0.9995548725128174,
"start": 2289,
"tag": "PASSWORD",
"value": "password"
}
] | test/unit/coffee/Templates/TemplatesManagerTests.coffee | shyoshyo/web-sharelatex | 1 | SandboxedModule = require('sandboxed-module')
assert = require('assert')
chai = require('chai')
sinon = require('sinon')
sinonChai = require('sinon-chai')
should = require('chai').should()
chai.use(sinonChai)
modulePath = '../../../../app/js/Features/Templates/TemplatesManager'
describe 'TemplatesManager', ->
beforeEach ->
@project_id = "project-id"
@brandVariationId = "brand-variation-id"
@compiler = "pdflatex"
@imageName = "TL2017"
@mainFile = "main.tex"
@templateId = "template-id"
@templateName = "template name"
@templateVersionId = "template-version-id"
@user_id = "user-id"
@dumpPath = "#{@dumpFolder}/#{@uuid}"
@callback = sinon.stub()
@request = sinon.stub().returns {
pipe:->
on:->
response: statusCode: 200
}
@fs = {
unlink : sinon.stub()
createWriteStream : sinon.stub().returns(on: sinon.stub().yields())
}
@ProjectUploadManager = {createProjectFromZipArchiveWithName : sinon.stub().callsArgWith(3, null, {_id:@project_id})}
@dumpFolder = "dump/path"
@ProjectOptionsHandler = {
setCompiler:sinon.stub().callsArgWith(2)
setImageName:sinon.stub().callsArgWith(2)
setBrandVariationId:sinon.stub().callsArgWith(2)
}
@uuid = "1234"
@ProjectRootDocManager = {
setRootDocFromName: sinon.stub().callsArgWith(2)
}
@ProjectDetailsHandler =
getProjectDescription:sinon.stub()
fixProjectName: sinon.stub().returns(@templateName)
@Project =
update: sinon.stub().callsArgWith(3, null)
@TemplatesManager = SandboxedModule.require modulePath, requires:
'../../../js/Features/Uploads/ProjectUploadManager':@ProjectUploadManager
'../../../js/Features/Project/ProjectOptionsHandler':@ProjectOptionsHandler
'../../../js/Features/Project/ProjectRootDocManager':@ProjectRootDocManager
'../../../js/Features/Project/ProjectDetailsHandler':@ProjectDetailsHandler
'../../../js/Features/Authentication/AuthenticationController': @AuthenticationController = {getLoggedInUserId: sinon.stub()}
'./TemplatesPublisher':@TemplatesPublisher
"logger-sharelatex":
log:->
err:->
"settings-sharelatex":
path:
dumpFolder:@dumpFolder
siteUrl: @siteUrl = "http://localhost:3000"
apis:
v1:
url: @v1Url="http://overleaf.com"
user: "sharelatex"
pass: "password"
overleaf:
host: @v1Url
"uuid":v4:=>@uuid
"request": @request
"fs":@fs
"../../../js/models/Project": {Project: @Project}
@zipUrl = "%2Ftemplates%2F52fb86a81ae1e566597a25f6%2Fv%2F4%2Fzip&templateName=Moderncv%20Banking&compiler=pdflatex"
describe 'createProjectFromV1Template', ->
describe "when all options passed", ->
beforeEach ->
@TemplatesManager.createProjectFromV1Template @brandVariationId, @compiler, @mainFile, @templateId, @templateName, @templateVersionId, @user_id, @imageName, @callback
it "should fetch zip from v1 based on template id", ->
@request.should.have.been.calledWith "#{@v1Url}/api/v1/sharelatex/templates/#{@templateVersionId}"
it "should save temporary file", ->
@fs.createWriteStream.should.have.been.calledWith @dumpPath
it "should create project", ->
@ProjectUploadManager.createProjectFromZipArchiveWithName.should.have.been.calledWithMatch @user_id, @templateName, @dumpPath
it "should unlink file", ->
@fs.unlink.should.have.been.calledWith @dumpPath
it "should set project options when passed", ->
@ProjectOptionsHandler.setCompiler.should.have.been.calledWithMatch @project_id, @compiler
@ProjectOptionsHandler.setImageName.should.have.been.calledWithMatch @project_id, @imageName
@ProjectRootDocManager.setRootDocFromName.should.have.been.calledWithMatch @project_id, @mainFile
@ProjectOptionsHandler.setBrandVariationId.should.have.been.calledWithMatch @project_id, @brandVariationId
it "should update project", ->
@Project.update.should.have.been.calledWithMatch { _id: @project_id }, { fromV1TemplateId: @templateId, fromV1TemplateVersionId: @templateVersionId }
describe "when some options not set", ->
beforeEach ->
@TemplatesManager.createProjectFromV1Template null, null, null, @templateId, @templateName, @templateVersionId, @user_id, null, @callback
it "should not set missing project options", ->
@ProjectOptionsHandler.setCompiler.called.should.equal false
@ProjectRootDocManager.setRootDocFromName.called.should.equal false
@ProjectOptionsHandler.setBrandVariationId.called.should.equal false
@ProjectOptionsHandler.setImageName.should.have.been.calledWithMatch @project_id, "wl_texlive:2018.1"
| 168460 | SandboxedModule = require('sandboxed-module')
assert = require('assert')
chai = require('chai')
sinon = require('sinon')
sinonChai = require('sinon-chai')
should = require('chai').should()
chai.use(sinonChai)
modulePath = '../../../../app/js/Features/Templates/TemplatesManager'
describe 'TemplatesManager', ->
beforeEach ->
@project_id = "project-id"
@brandVariationId = "brand-variation-id"
@compiler = "pdflatex"
@imageName = "TL2017"
@mainFile = "main.tex"
@templateId = "template-id"
@templateName = "template name"
@templateVersionId = "template-version-id"
@user_id = "user-id"
@dumpPath = "#{@dumpFolder}/#{@uuid}"
@callback = sinon.stub()
@request = sinon.stub().returns {
pipe:->
on:->
response: statusCode: 200
}
@fs = {
unlink : sinon.stub()
createWriteStream : sinon.stub().returns(on: sinon.stub().yields())
}
@ProjectUploadManager = {createProjectFromZipArchiveWithName : sinon.stub().callsArgWith(3, null, {_id:@project_id})}
@dumpFolder = "dump/path"
@ProjectOptionsHandler = {
setCompiler:sinon.stub().callsArgWith(2)
setImageName:sinon.stub().callsArgWith(2)
setBrandVariationId:sinon.stub().callsArgWith(2)
}
@uuid = "1234"
@ProjectRootDocManager = {
setRootDocFromName: sinon.stub().callsArgWith(2)
}
@ProjectDetailsHandler =
getProjectDescription:sinon.stub()
fixProjectName: sinon.stub().returns(@templateName)
@Project =
update: sinon.stub().callsArgWith(3, null)
@TemplatesManager = SandboxedModule.require modulePath, requires:
'../../../js/Features/Uploads/ProjectUploadManager':@ProjectUploadManager
'../../../js/Features/Project/ProjectOptionsHandler':@ProjectOptionsHandler
'../../../js/Features/Project/ProjectRootDocManager':@ProjectRootDocManager
'../../../js/Features/Project/ProjectDetailsHandler':@ProjectDetailsHandler
'../../../js/Features/Authentication/AuthenticationController': @AuthenticationController = {getLoggedInUserId: sinon.stub()}
'./TemplatesPublisher':@TemplatesPublisher
"logger-sharelatex":
log:->
err:->
"settings-sharelatex":
path:
dumpFolder:@dumpFolder
siteUrl: @siteUrl = "http://localhost:3000"
apis:
v1:
url: @v1Url="http://overleaf.com"
user: "sharelatex"
pass: "<PASSWORD>"
overleaf:
host: @v1Url
"uuid":v4:=>@uuid
"request": @request
"fs":@fs
"../../../js/models/Project": {Project: @Project}
@zipUrl = "%2Ftemplates%2F52fb86a81ae1e566597a25f6%2Fv%2F4%2Fzip&templateName=Moderncv%20Banking&compiler=pdflatex"
describe 'createProjectFromV1Template', ->
describe "when all options passed", ->
beforeEach ->
@TemplatesManager.createProjectFromV1Template @brandVariationId, @compiler, @mainFile, @templateId, @templateName, @templateVersionId, @user_id, @imageName, @callback
it "should fetch zip from v1 based on template id", ->
@request.should.have.been.calledWith "#{@v1Url}/api/v1/sharelatex/templates/#{@templateVersionId}"
it "should save temporary file", ->
@fs.createWriteStream.should.have.been.calledWith @dumpPath
it "should create project", ->
@ProjectUploadManager.createProjectFromZipArchiveWithName.should.have.been.calledWithMatch @user_id, @templateName, @dumpPath
it "should unlink file", ->
@fs.unlink.should.have.been.calledWith @dumpPath
it "should set project options when passed", ->
@ProjectOptionsHandler.setCompiler.should.have.been.calledWithMatch @project_id, @compiler
@ProjectOptionsHandler.setImageName.should.have.been.calledWithMatch @project_id, @imageName
@ProjectRootDocManager.setRootDocFromName.should.have.been.calledWithMatch @project_id, @mainFile
@ProjectOptionsHandler.setBrandVariationId.should.have.been.calledWithMatch @project_id, @brandVariationId
it "should update project", ->
@Project.update.should.have.been.calledWithMatch { _id: @project_id }, { fromV1TemplateId: @templateId, fromV1TemplateVersionId: @templateVersionId }
describe "when some options not set", ->
beforeEach ->
@TemplatesManager.createProjectFromV1Template null, null, null, @templateId, @templateName, @templateVersionId, @user_id, null, @callback
it "should not set missing project options", ->
@ProjectOptionsHandler.setCompiler.called.should.equal false
@ProjectRootDocManager.setRootDocFromName.called.should.equal false
@ProjectOptionsHandler.setBrandVariationId.called.should.equal false
@ProjectOptionsHandler.setImageName.should.have.been.calledWithMatch @project_id, "wl_texlive:2018.1"
| true | SandboxedModule = require('sandboxed-module')
assert = require('assert')
chai = require('chai')
sinon = require('sinon')
sinonChai = require('sinon-chai')
should = require('chai').should()
chai.use(sinonChai)
modulePath = '../../../../app/js/Features/Templates/TemplatesManager'
describe 'TemplatesManager', ->
beforeEach ->
@project_id = "project-id"
@brandVariationId = "brand-variation-id"
@compiler = "pdflatex"
@imageName = "TL2017"
@mainFile = "main.tex"
@templateId = "template-id"
@templateName = "template name"
@templateVersionId = "template-version-id"
@user_id = "user-id"
@dumpPath = "#{@dumpFolder}/#{@uuid}"
@callback = sinon.stub()
@request = sinon.stub().returns {
pipe:->
on:->
response: statusCode: 200
}
@fs = {
unlink : sinon.stub()
createWriteStream : sinon.stub().returns(on: sinon.stub().yields())
}
@ProjectUploadManager = {createProjectFromZipArchiveWithName : sinon.stub().callsArgWith(3, null, {_id:@project_id})}
@dumpFolder = "dump/path"
@ProjectOptionsHandler = {
setCompiler:sinon.stub().callsArgWith(2)
setImageName:sinon.stub().callsArgWith(2)
setBrandVariationId:sinon.stub().callsArgWith(2)
}
@uuid = "1234"
@ProjectRootDocManager = {
setRootDocFromName: sinon.stub().callsArgWith(2)
}
@ProjectDetailsHandler =
getProjectDescription:sinon.stub()
fixProjectName: sinon.stub().returns(@templateName)
@Project =
update: sinon.stub().callsArgWith(3, null)
@TemplatesManager = SandboxedModule.require modulePath, requires:
'../../../js/Features/Uploads/ProjectUploadManager':@ProjectUploadManager
'../../../js/Features/Project/ProjectOptionsHandler':@ProjectOptionsHandler
'../../../js/Features/Project/ProjectRootDocManager':@ProjectRootDocManager
'../../../js/Features/Project/ProjectDetailsHandler':@ProjectDetailsHandler
'../../../js/Features/Authentication/AuthenticationController': @AuthenticationController = {getLoggedInUserId: sinon.stub()}
'./TemplatesPublisher':@TemplatesPublisher
"logger-sharelatex":
log:->
err:->
"settings-sharelatex":
path:
dumpFolder:@dumpFolder
siteUrl: @siteUrl = "http://localhost:3000"
apis:
v1:
url: @v1Url="http://overleaf.com"
user: "sharelatex"
pass: "PI:PASSWORD:<PASSWORD>END_PI"
overleaf:
host: @v1Url
"uuid":v4:=>@uuid
"request": @request
"fs":@fs
"../../../js/models/Project": {Project: @Project}
@zipUrl = "%2Ftemplates%2F52fb86a81ae1e566597a25f6%2Fv%2F4%2Fzip&templateName=Moderncv%20Banking&compiler=pdflatex"
describe 'createProjectFromV1Template', ->
describe "when all options passed", ->
beforeEach ->
@TemplatesManager.createProjectFromV1Template @brandVariationId, @compiler, @mainFile, @templateId, @templateName, @templateVersionId, @user_id, @imageName, @callback
it "should fetch zip from v1 based on template id", ->
@request.should.have.been.calledWith "#{@v1Url}/api/v1/sharelatex/templates/#{@templateVersionId}"
it "should save temporary file", ->
@fs.createWriteStream.should.have.been.calledWith @dumpPath
it "should create project", ->
@ProjectUploadManager.createProjectFromZipArchiveWithName.should.have.been.calledWithMatch @user_id, @templateName, @dumpPath
it "should unlink file", ->
@fs.unlink.should.have.been.calledWith @dumpPath
it "should set project options when passed", ->
@ProjectOptionsHandler.setCompiler.should.have.been.calledWithMatch @project_id, @compiler
@ProjectOptionsHandler.setImageName.should.have.been.calledWithMatch @project_id, @imageName
@ProjectRootDocManager.setRootDocFromName.should.have.been.calledWithMatch @project_id, @mainFile
@ProjectOptionsHandler.setBrandVariationId.should.have.been.calledWithMatch @project_id, @brandVariationId
it "should update project", ->
@Project.update.should.have.been.calledWithMatch { _id: @project_id }, { fromV1TemplateId: @templateId, fromV1TemplateVersionId: @templateVersionId }
describe "when some options not set", ->
beforeEach ->
@TemplatesManager.createProjectFromV1Template null, null, null, @templateId, @templateName, @templateVersionId, @user_id, null, @callback
it "should not set missing project options", ->
@ProjectOptionsHandler.setCompiler.called.should.equal false
@ProjectRootDocManager.setRootDocFromName.called.should.equal false
@ProjectOptionsHandler.setBrandVariationId.called.should.equal false
@ProjectOptionsHandler.setImageName.should.have.been.calledWithMatch @project_id, "wl_texlive:2018.1"
|
[
{
"context": "#\n knockback-validation.js 0.18.5\n (c) 2011-2013 Kevin Malakoff.\n Knockback.Observable is freely distributable u",
"end": 67,
"score": 0.99981290102005,
"start": 53,
"tag": "NAME",
"value": "Kevin Malakoff"
},
{
"context": " for full license details:\n https://... | src/validation/validation.coffee | npmcomponent/kmalakoff-knockback | 1 | ###
knockback-validation.js 0.18.5
(c) 2011-2013 Kevin Malakoff.
Knockback.Observable is freely distributable under the MIT license.
See the following for full license details:
https://github.com/kmalakoff/knockback/blob/master/LICENSE
###
# internal helper
callOrGet = (value) ->
value = _unwrapObservable(value)
return if typeof(value) is 'function' then value.apply(null, Array.prototype.slice.call(arguments, 1)) else value
# Helpers for validating forms, inputs, and values.
# @example A Named Form
# <form name="myForm">
# <input name="input1", data-bind="value: input1" required>
# <input type="url" name="input2", data-bind="value: input2">
# </form>
# Because there is a form name, it will add the following property to your ViewModel (wrapped in an observable):
# $myForm: {
# input1: {required: boolean, valid: boolean, invalid: boolean},
# input2: {url: boolean, valid: boolean, invalid: boolean},
# valid: boolean,
# invalid: boolean
# }
# @example A Unnamed Form
# <form>
# <input name="input1", data-bind="value: input1" required>
# <input type="url" name="input2", data-bind="value: input2">
# </form>
# Because there is not a form name, it will extend the following on your ViewModel (each wrapped separately in an observable):
# {
# $input1: {required: boolean, valid: boolean, invalid: boolean},
# $input2: {url: boolean, valid: boolean, invalid: boolean}
# }
#
# @method .valueValidator(value, bindings, validation_options={})
# Used to create an observable that wraps all of the validators for a value and also generates helpers for $valid, $error_count, $enabled, $disabled, and $active_error.
# @note Called using `kb.valueValidator` (not kb.Validation.valueValidator)
# @param [Observable] value the value to validate
# @param [Object] bindings the named validators to use to validate the value
# @param [Object] validation_options the validation options
# @option validation_options [Boolean|Function] disable the test for disabling validations
# @option validation_options [Boolean|Function] enable the test for enabling validations
# @option validation_options [String|Array] priorities the priority order of the validators (used to set $active_error in the case of multiple being active simulateously)
# @return [ko.computed] a single observable storing an Object with all of the validators and generated helpers
#
# @method .inputValidator(view_model, el, validation_options={})
# Used to create an observable that wraps all of the validators for an HTML input element using `kb.valueValidator`. See kb.valueValidator for shared options.
# In addition, you can add custom bindings by including a `validations` Object in your data-bind statement where each property has a function(value) that returns true if there are errors.
# It will automatically generate validators from the input for the following attributes:
# * type: for url, email, and number
# * required: must have a length or a value
# @note Called using `kb.inputValidator` (not kb.Validation.inputValidator)
# @return [ko.computed] a single observable storing an Object with all of the validators and generated helpers
# @example Binding an input using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# @example Binding an input with custom validations using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator, validations: {unique: nameTaken}" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, unique: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# @example Binding an input with validation options using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator, validation_options: {disable: disable, priorities: 'url'}" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, unique: Boolean, $valid: Boolean, $error_count: Number, $enabled: Boolean, $disabled: Boolean, $active_error: String})
#
# @method .formValidator(view_model, el)
# Used to create an observable that wraps all of the validators for all the inputs on an HTML form element using `kb.inputValidator`. See kb.inputValidator for per input options.
# In addition, the formValidator aggregates the following helpers for its inputs: $valid, $error_count, $enabled, and $disabled. Also, if you provide a name attribute for the form, it will attach all of the inputs to a $name property on your view model.
# @note Called using `kb.formValidator` (not kb.Validation.formValidator)
# @return [Object] an Object with all of the validators and generated helpers
# @example Binding a form by name using Knockback inject.
# <form name='my_form' data-bind="inject: kb.formValidator, validation_options: {priorities: ['required', 'url']}">
# <input type="text" name="name" data-bind="value: name" required>
# <input type="url" name="site" data-bind="value: site" required>
# </form>
# Adds the following to your ViewModel:
# $my_form: {
# name: kb.observable({required: Boolean, $valid: Boolean, $error_count: Number, $active_error: String}),
# site: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# }
# @example Binding a form without a name using Knockback inject.
# <form data-bind="inject: kb.formValidator, validation_options: {priorities: ['required', 'url']}">
# <input type="text" name="name" data-bind="value: name" required>
# <input type="url" name="site" data-bind="value: site" required>
# </form>
# Extends your ViewModel with the following Object:
# {
# $name: kb.observable({required: Boolean, $valid: Boolean, $error_count: Number, $active_error: String}),
# $site: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# }
# @method .hasChangedFn(model)
# A validation helper that can be used to wait for a change before enabling validations.
# @note Called using `kb.hasChangedFn` (not kb.Validation.hasChangedFn)
# @return [Function] Validator function bound with model
# @example Enabling validations after a change has been made to a model.
# <form class="form-horizontal" data-bind="inject: kb.formValidator, validation_options: {enable: kb.hasChangedFn(model)}">
# @method .minLengthFn(length)
# A validator that will be invalid until the length of the value is below a minimum value.
# @note Called using `kb.minLengthFn` (not kb.Validation.minLengthFn)
# @return [Function] Validator function bound with min length
# @example Validations will be invalid until the name is at least 4 characters long.
# <input type="text" name="name" data-bind="value: name, validations: {length: kb.minLengthFn(4)}">
# @method .uniqueValueFn(model, key, collection)
# Checks for a unique attribute value by key in a collection
# @note Called using `kb.uniqueValueFn` (not kb.Validation.uniqueValueFn)
# @return [Function] Validator function bound with model, attribute key, and collection
# @example Validations will be invalid until the name attribute is unique in the collection.
# <input type="text" name="name" data-bind="value: name, validations: {unique: kb.uniqueValueFn(model, 'name', some_collection)}">
# @method .untilTrueFn(stand_in, fn, model)
# Used to combine conditions.
# @note Called using `kb.untilTrueFn` (not kb.Validation.untilTrueFn)
# @return [Function] Validator function bound with stand_in value before condition is met, validator function, and optionally model (will reset if the model changes).
# @example Filter the minimum length test of name until it has be valid (that way, won't report invalid while typing in a new input).
# <input type="text" name="name" data-bind="value: name, validations: {length_filtered: kb.untilFalseFn(false, kb.minLengthFn(4), model)}">
# @method .untilFalseFn(stand_in, fn, model)
# Used to combine conditions.
# @note Called using `kb.untilFalseFn` (not kb.Validation.untilFalseFn)
# @return [Function] Validator function bound with stand_in value before condition is met, validator function, and optionally model (will reset if the model changes).
class kb.Validation
#############################
# Aliases
#############################
kb.valueValidator = (value, bindings, validation_options={}) ->
(validation_options and not (typeof(validation_options) is 'function')) or (validation_options = {})
return ko.dependentObservable(->
results = {$error_count: 0}
current_value = _unwrapObservable(value)
not ('disable' of validation_options) or (disabled = callOrGet(validation_options.disable))
not ('enable' of validation_options) or (disabled = not callOrGet(validation_options.enable))
priorities = validation_options.priorities or []
_.isArray(priorities) or (priorities = [priorities]) # ensure priorities is an array
# then add the rest
active_index = priorities.length + 1
for identifier, validator of bindings
results[identifier] = not disabled and callOrGet(validator, current_value) # update validity
if results[identifier]
results.$error_count++
# check priorities
(identifier_index = _.indexOf(priorities, identifier)>=0) or (identifier_index = priorities.length)
if results.$active_error and identifier_index < active_index
results.$active_error = identifier; active_index = identifier_index
else
results.$active_error or (results.$active_error = identifier; active_index = identifier_index)
# add the inverse and ensure a boolean
results.$enabled = not disabled
results.$disable = !!disabled
results.$valid = results.$error_count is 0
return results
)
kb.inputValidator = (view_model, el, validation_options={}) ->
(validation_options and not (typeof(validation_options) is 'function')) or (validation_options = {})
validators = kb.valid
$input_el = $(el)
input_name = null if (input_name = $input_el.attr('name')) and not _.isString(input_name)
# only set up form elements with a value bindings
return null unless (bindings = $input_el.attr('data-bind'))
options = (new Function("sc", "with(sc[0]) { return { #{bindings} } }"))([view_model])
return null if not (options and options.value)
(not options.validation_options) or (_.defaults(options.validation_options, validation_options); validation_options = options.validation_options)
# collect the types to identifier
bindings = {}
(not validators[type = $input_el.attr('type')]) or (bindings[type] = validators[type])
(not $input_el.attr('required')) or (bindings.required = validators.required)
if options.validations
bindings[identifier] = validator for identifier, validator of options.validations
result = kb.valueValidator(options.value, bindings, validation_options)
# if there is a name, add to the view_model with $scoping
(not input_name and not validation_options.no_attach) or (view_model["$#{input_name}"] = result)
return result
kb.formValidator = (view_model, el) ->
results = {}
validators = []
$root_el = $(el)
form_name = null if (form_name = $root_el.attr('name')) and not _.isString(form_name)
if (bindings = $root_el.attr('data-bind'))
options = (new Function("sc", "with(sc[0]) { return { #{bindings} } }"))([view_model])
validation_options = options.validation_options
validation_options or= {}
validation_options.no_attach = !!form_name
# build up the results
for input_el in $root_el.find('input')
continue unless (name = $(input_el).attr('name')) # need named inputs to set up an object
validator = kb.inputValidator(view_model, input_el, validation_options)
not validator or validators.push(results[name] = validator)
# collect stats, error count and valid
results.$error_count = ko.dependentObservable(->
error_count = 0
for validator in validators
error_count += validator().$error_count
return error_count
)
results.$valid = ko.dependentObservable(-> return results.$error_count() is 0)
# enabled and disabled
results.$enabled = ko.dependentObservable(->
enabled = true
for validator in validators
enabled &= validator().$enabled
return enabled
)
results.$disabled = ko.dependentObservable(-> return not results.$enabled())
# if there is a name, add to the view_model with $scoping
view_model["$#{form_name}"] = results if form_name
return results | 76928 | ###
knockback-validation.js 0.18.5
(c) 2011-2013 <NAME>.
Knockback.Observable is freely distributable under the MIT license.
See the following for full license details:
https://github.com/kmalakoff/knockback/blob/master/LICENSE
###
# internal helper
callOrGet = (value) ->
value = _unwrapObservable(value)
return if typeof(value) is 'function' then value.apply(null, Array.prototype.slice.call(arguments, 1)) else value
# Helpers for validating forms, inputs, and values.
# @example A Named Form
# <form name="myForm">
# <input name="input1", data-bind="value: input1" required>
# <input type="url" name="input2", data-bind="value: input2">
# </form>
# Because there is a form name, it will add the following property to your ViewModel (wrapped in an observable):
# $myForm: {
# input1: {required: boolean, valid: boolean, invalid: boolean},
# input2: {url: boolean, valid: boolean, invalid: boolean},
# valid: boolean,
# invalid: boolean
# }
# @example A Unnamed Form
# <form>
# <input name="input1", data-bind="value: input1" required>
# <input type="url" name="input2", data-bind="value: input2">
# </form>
# Because there is not a form name, it will extend the following on your ViewModel (each wrapped separately in an observable):
# {
# $input1: {required: boolean, valid: boolean, invalid: boolean},
# $input2: {url: boolean, valid: boolean, invalid: boolean}
# }
#
# @method .valueValidator(value, bindings, validation_options={})
# Used to create an observable that wraps all of the validators for a value and also generates helpers for $valid, $error_count, $enabled, $disabled, and $active_error.
# @note Called using `kb.valueValidator` (not kb.Validation.valueValidator)
# @param [Observable] value the value to validate
# @param [Object] bindings the named validators to use to validate the value
# @param [Object] validation_options the validation options
# @option validation_options [Boolean|Function] disable the test for disabling validations
# @option validation_options [Boolean|Function] enable the test for enabling validations
# @option validation_options [String|Array] priorities the priority order of the validators (used to set $active_error in the case of multiple being active simulateously)
# @return [ko.computed] a single observable storing an Object with all of the validators and generated helpers
#
# @method .inputValidator(view_model, el, validation_options={})
# Used to create an observable that wraps all of the validators for an HTML input element using `kb.valueValidator`. See kb.valueValidator for shared options.
# In addition, you can add custom bindings by including a `validations` Object in your data-bind statement where each property has a function(value) that returns true if there are errors.
# It will automatically generate validators from the input for the following attributes:
# * type: for url, email, and number
# * required: must have a length or a value
# @note Called using `kb.inputValidator` (not kb.Validation.inputValidator)
# @return [ko.computed] a single observable storing an Object with all of the validators and generated helpers
# @example Binding an input using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# @example Binding an input with custom validations using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator, validations: {unique: nameTaken}" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, unique: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# @example Binding an input with validation options using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator, validation_options: {disable: disable, priorities: 'url'}" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, unique: Boolean, $valid: Boolean, $error_count: Number, $enabled: Boolean, $disabled: Boolean, $active_error: String})
#
# @method .formValidator(view_model, el)
# Used to create an observable that wraps all of the validators for all the inputs on an HTML form element using `kb.inputValidator`. See kb.inputValidator for per input options.
# In addition, the formValidator aggregates the following helpers for its inputs: $valid, $error_count, $enabled, and $disabled. Also, if you provide a name attribute for the form, it will attach all of the inputs to a $name property on your view model.
# @note Called using `kb.formValidator` (not kb.Validation.formValidator)
# @return [Object] an Object with all of the validators and generated helpers
# @example Binding a form by name using Knockback inject.
# <form name='my_form' data-bind="inject: kb.formValidator, validation_options: {priorities: ['required', 'url']}">
# <input type="text" name="name" data-bind="value: name" required>
# <input type="url" name="site" data-bind="value: site" required>
# </form>
# Adds the following to your ViewModel:
# $my_form: {
# name: kb.observable({required: Boolean, $valid: Boolean, $error_count: Number, $active_error: String}),
# site: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# }
# @example Binding a form without a name using Knockback inject.
# <form data-bind="inject: kb.formValidator, validation_options: {priorities: ['required', 'url']}">
# <input type="text" name="name" data-bind="value: name" required>
# <input type="url" name="site" data-bind="value: site" required>
# </form>
# Extends your ViewModel with the following Object:
# {
# $name: kb.observable({required: Boolean, $valid: Boolean, $error_count: Number, $active_error: String}),
# $site: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# }
# @method .hasChangedFn(model)
# A validation helper that can be used to wait for a change before enabling validations.
# @note Called using `kb.hasChangedFn` (not kb.Validation.hasChangedFn)
# @return [Function] Validator function bound with model
# @example Enabling validations after a change has been made to a model.
# <form class="form-horizontal" data-bind="inject: kb.formValidator, validation_options: {enable: kb.hasChangedFn(model)}">
# @method .minLengthFn(length)
# A validator that will be invalid until the length of the value is below a minimum value.
# @note Called using `kb.minLengthFn` (not kb.Validation.minLengthFn)
# @return [Function] Validator function bound with min length
# @example Validations will be invalid until the name is at least 4 characters long.
# <input type="text" name="name" data-bind="value: name, validations: {length: kb.minLengthFn(4)}">
# @method .uniqueValueFn(model, key, collection)
# Checks for a unique attribute value by key in a collection
# @note Called using `kb.uniqueValueFn` (not kb.Validation.uniqueValueFn)
# @return [Function] Validator function bound with model, attribute key, and collection
# @example Validations will be invalid until the name attribute is unique in the collection.
# <input type="text" name="name" data-bind="value: name, validations: {unique: kb.uniqueValueFn(model, 'name', some_collection)}">
# @method .untilTrueFn(stand_in, fn, model)
# Used to combine conditions.
# @note Called using `kb.untilTrueFn` (not kb.Validation.untilTrueFn)
# @return [Function] Validator function bound with stand_in value before condition is met, validator function, and optionally model (will reset if the model changes).
# @example Filter the minimum length test of name until it has be valid (that way, won't report invalid while typing in a new input).
# <input type="text" name="name" data-bind="value: name, validations: {length_filtered: kb.untilFalseFn(false, kb.minLengthFn(4), model)}">
# @method .untilFalseFn(stand_in, fn, model)
# Used to combine conditions.
# @note Called using `kb.untilFalseFn` (not kb.Validation.untilFalseFn)
# @return [Function] Validator function bound with stand_in value before condition is met, validator function, and optionally model (will reset if the model changes).
class kb.Validation
#############################
# Aliases
#############################
kb.valueValidator = (value, bindings, validation_options={}) ->
(validation_options and not (typeof(validation_options) is 'function')) or (validation_options = {})
return ko.dependentObservable(->
results = {$error_count: 0}
current_value = _unwrapObservable(value)
not ('disable' of validation_options) or (disabled = callOrGet(validation_options.disable))
not ('enable' of validation_options) or (disabled = not callOrGet(validation_options.enable))
priorities = validation_options.priorities or []
_.isArray(priorities) or (priorities = [priorities]) # ensure priorities is an array
# then add the rest
active_index = priorities.length + 1
for identifier, validator of bindings
results[identifier] = not disabled and callOrGet(validator, current_value) # update validity
if results[identifier]
results.$error_count++
# check priorities
(identifier_index = _.indexOf(priorities, identifier)>=0) or (identifier_index = priorities.length)
if results.$active_error and identifier_index < active_index
results.$active_error = identifier; active_index = identifier_index
else
results.$active_error or (results.$active_error = identifier; active_index = identifier_index)
# add the inverse and ensure a boolean
results.$enabled = not disabled
results.$disable = !!disabled
results.$valid = results.$error_count is 0
return results
)
kb.inputValidator = (view_model, el, validation_options={}) ->
(validation_options and not (typeof(validation_options) is 'function')) or (validation_options = {})
validators = kb.valid
$input_el = $(el)
input_name = null if (input_name = $input_el.attr('name')) and not _.isString(input_name)
# only set up form elements with a value bindings
return null unless (bindings = $input_el.attr('data-bind'))
options = (new Function("sc", "with(sc[0]) { return { #{bindings} } }"))([view_model])
return null if not (options and options.value)
(not options.validation_options) or (_.defaults(options.validation_options, validation_options); validation_options = options.validation_options)
# collect the types to identifier
bindings = {}
(not validators[type = $input_el.attr('type')]) or (bindings[type] = validators[type])
(not $input_el.attr('required')) or (bindings.required = validators.required)
if options.validations
bindings[identifier] = validator for identifier, validator of options.validations
result = kb.valueValidator(options.value, bindings, validation_options)
# if there is a name, add to the view_model with $scoping
(not input_name and not validation_options.no_attach) or (view_model["$#{input_name}"] = result)
return result
kb.formValidator = (view_model, el) ->
results = {}
validators = []
$root_el = $(el)
form_name = null if (form_name = $root_el.attr('name')) and not _.isString(form_name)
if (bindings = $root_el.attr('data-bind'))
options = (new Function("sc", "with(sc[0]) { return { #{bindings} } }"))([view_model])
validation_options = options.validation_options
validation_options or= {}
validation_options.no_attach = !!form_name
# build up the results
for input_el in $root_el.find('input')
continue unless (name = $(input_el).attr('name')) # need named inputs to set up an object
validator = kb.inputValidator(view_model, input_el, validation_options)
not validator or validators.push(results[name] = validator)
# collect stats, error count and valid
results.$error_count = ko.dependentObservable(->
error_count = 0
for validator in validators
error_count += validator().$error_count
return error_count
)
results.$valid = ko.dependentObservable(-> return results.$error_count() is 0)
# enabled and disabled
results.$enabled = ko.dependentObservable(->
enabled = true
for validator in validators
enabled &= validator().$enabled
return enabled
)
results.$disabled = ko.dependentObservable(-> return not results.$enabled())
# if there is a name, add to the view_model with $scoping
view_model["$#{form_name}"] = results if form_name
return results | true | ###
knockback-validation.js 0.18.5
(c) 2011-2013 PI:NAME:<NAME>END_PI.
Knockback.Observable is freely distributable under the MIT license.
See the following for full license details:
https://github.com/kmalakoff/knockback/blob/master/LICENSE
###
# internal helper
callOrGet = (value) ->
value = _unwrapObservable(value)
return if typeof(value) is 'function' then value.apply(null, Array.prototype.slice.call(arguments, 1)) else value
# Helpers for validating forms, inputs, and values.
# @example A Named Form
# <form name="myForm">
# <input name="input1", data-bind="value: input1" required>
# <input type="url" name="input2", data-bind="value: input2">
# </form>
# Because there is a form name, it will add the following property to your ViewModel (wrapped in an observable):
# $myForm: {
# input1: {required: boolean, valid: boolean, invalid: boolean},
# input2: {url: boolean, valid: boolean, invalid: boolean},
# valid: boolean,
# invalid: boolean
# }
# @example A Unnamed Form
# <form>
# <input name="input1", data-bind="value: input1" required>
# <input type="url" name="input2", data-bind="value: input2">
# </form>
# Because there is not a form name, it will extend the following on your ViewModel (each wrapped separately in an observable):
# {
# $input1: {required: boolean, valid: boolean, invalid: boolean},
# $input2: {url: boolean, valid: boolean, invalid: boolean}
# }
#
# @method .valueValidator(value, bindings, validation_options={})
# Used to create an observable that wraps all of the validators for a value and also generates helpers for $valid, $error_count, $enabled, $disabled, and $active_error.
# @note Called using `kb.valueValidator` (not kb.Validation.valueValidator)
# @param [Observable] value the value to validate
# @param [Object] bindings the named validators to use to validate the value
# @param [Object] validation_options the validation options
# @option validation_options [Boolean|Function] disable the test for disabling validations
# @option validation_options [Boolean|Function] enable the test for enabling validations
# @option validation_options [String|Array] priorities the priority order of the validators (used to set $active_error in the case of multiple being active simulateously)
# @return [ko.computed] a single observable storing an Object with all of the validators and generated helpers
#
# @method .inputValidator(view_model, el, validation_options={})
# Used to create an observable that wraps all of the validators for an HTML input element using `kb.valueValidator`. See kb.valueValidator for shared options.
# In addition, you can add custom bindings by including a `validations` Object in your data-bind statement where each property has a function(value) that returns true if there are errors.
# It will automatically generate validators from the input for the following attributes:
# * type: for url, email, and number
# * required: must have a length or a value
# @note Called using `kb.inputValidator` (not kb.Validation.inputValidator)
# @return [ko.computed] a single observable storing an Object with all of the validators and generated helpers
# @example Binding an input using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# @example Binding an input with custom validations using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator, validations: {unique: nameTaken}" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, unique: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# @example Binding an input with validation options using Knockback inject.
# <input type="url" name="name" data-bind="value: name, inject: kb.inputValidator, validation_options: {disable: disable, priorities: 'url'}" required>
# Adds the following to your ViewModel:
# $name: kb.observable({required: Boolean, url: Boolean, unique: Boolean, $valid: Boolean, $error_count: Number, $enabled: Boolean, $disabled: Boolean, $active_error: String})
#
# @method .formValidator(view_model, el)
# Used to create an observable that wraps all of the validators for all the inputs on an HTML form element using `kb.inputValidator`. See kb.inputValidator for per input options.
# In addition, the formValidator aggregates the following helpers for its inputs: $valid, $error_count, $enabled, and $disabled. Also, if you provide a name attribute for the form, it will attach all of the inputs to a $name property on your view model.
# @note Called using `kb.formValidator` (not kb.Validation.formValidator)
# @return [Object] an Object with all of the validators and generated helpers
# @example Binding a form by name using Knockback inject.
# <form name='my_form' data-bind="inject: kb.formValidator, validation_options: {priorities: ['required', 'url']}">
# <input type="text" name="name" data-bind="value: name" required>
# <input type="url" name="site" data-bind="value: site" required>
# </form>
# Adds the following to your ViewModel:
# $my_form: {
# name: kb.observable({required: Boolean, $valid: Boolean, $error_count: Number, $active_error: String}),
# site: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# }
# @example Binding a form without a name using Knockback inject.
# <form data-bind="inject: kb.formValidator, validation_options: {priorities: ['required', 'url']}">
# <input type="text" name="name" data-bind="value: name" required>
# <input type="url" name="site" data-bind="value: site" required>
# </form>
# Extends your ViewModel with the following Object:
# {
# $name: kb.observable({required: Boolean, $valid: Boolean, $error_count: Number, $active_error: String}),
# $site: kb.observable({required: Boolean, url: Boolean, $valid: Boolean, $error_count: Number, $active_error: String})
# }
# @method .hasChangedFn(model)
# A validation helper that can be used to wait for a change before enabling validations.
# @note Called using `kb.hasChangedFn` (not kb.Validation.hasChangedFn)
# @return [Function] Validator function bound with model
# @example Enabling validations after a change has been made to a model.
# <form class="form-horizontal" data-bind="inject: kb.formValidator, validation_options: {enable: kb.hasChangedFn(model)}">
# @method .minLengthFn(length)
# A validator that will be invalid until the length of the value is below a minimum value.
# @note Called using `kb.minLengthFn` (not kb.Validation.minLengthFn)
# @return [Function] Validator function bound with min length
# @example Validations will be invalid until the name is at least 4 characters long.
# <input type="text" name="name" data-bind="value: name, validations: {length: kb.minLengthFn(4)}">
# @method .uniqueValueFn(model, key, collection)
# Checks for a unique attribute value by key in a collection
# @note Called using `kb.uniqueValueFn` (not kb.Validation.uniqueValueFn)
# @return [Function] Validator function bound with model, attribute key, and collection
# @example Validations will be invalid until the name attribute is unique in the collection.
# <input type="text" name="name" data-bind="value: name, validations: {unique: kb.uniqueValueFn(model, 'name', some_collection)}">
# @method .untilTrueFn(stand_in, fn, model)
# Used to combine conditions.
# @note Called using `kb.untilTrueFn` (not kb.Validation.untilTrueFn)
# @return [Function] Validator function bound with stand_in value before condition is met, validator function, and optionally model (will reset if the model changes).
# @example Filter the minimum length test of name until it has be valid (that way, won't report invalid while typing in a new input).
# <input type="text" name="name" data-bind="value: name, validations: {length_filtered: kb.untilFalseFn(false, kb.minLengthFn(4), model)}">
# @method .untilFalseFn(stand_in, fn, model)
# Used to combine conditions.
# @note Called using `kb.untilFalseFn` (not kb.Validation.untilFalseFn)
# @return [Function] Validator function bound with stand_in value before condition is met, validator function, and optionally model (will reset if the model changes).
class kb.Validation
#############################
# Aliases
#############################
kb.valueValidator = (value, bindings, validation_options={}) ->
(validation_options and not (typeof(validation_options) is 'function')) or (validation_options = {})
return ko.dependentObservable(->
results = {$error_count: 0}
current_value = _unwrapObservable(value)
not ('disable' of validation_options) or (disabled = callOrGet(validation_options.disable))
not ('enable' of validation_options) or (disabled = not callOrGet(validation_options.enable))
priorities = validation_options.priorities or []
_.isArray(priorities) or (priorities = [priorities]) # ensure priorities is an array
# then add the rest
active_index = priorities.length + 1
for identifier, validator of bindings
results[identifier] = not disabled and callOrGet(validator, current_value) # update validity
if results[identifier]
results.$error_count++
# check priorities
(identifier_index = _.indexOf(priorities, identifier)>=0) or (identifier_index = priorities.length)
if results.$active_error and identifier_index < active_index
results.$active_error = identifier; active_index = identifier_index
else
results.$active_error or (results.$active_error = identifier; active_index = identifier_index)
# add the inverse and ensure a boolean
results.$enabled = not disabled
results.$disable = !!disabled
results.$valid = results.$error_count is 0
return results
)
kb.inputValidator = (view_model, el, validation_options={}) ->
(validation_options and not (typeof(validation_options) is 'function')) or (validation_options = {})
validators = kb.valid
$input_el = $(el)
input_name = null if (input_name = $input_el.attr('name')) and not _.isString(input_name)
# only set up form elements with a value bindings
return null unless (bindings = $input_el.attr('data-bind'))
options = (new Function("sc", "with(sc[0]) { return { #{bindings} } }"))([view_model])
return null if not (options and options.value)
(not options.validation_options) or (_.defaults(options.validation_options, validation_options); validation_options = options.validation_options)
# collect the types to identifier
bindings = {}
(not validators[type = $input_el.attr('type')]) or (bindings[type] = validators[type])
(not $input_el.attr('required')) or (bindings.required = validators.required)
if options.validations
bindings[identifier] = validator for identifier, validator of options.validations
result = kb.valueValidator(options.value, bindings, validation_options)
# if there is a name, add to the view_model with $scoping
(not input_name and not validation_options.no_attach) or (view_model["$#{input_name}"] = result)
return result
kb.formValidator = (view_model, el) ->
results = {}
validators = []
$root_el = $(el)
form_name = null if (form_name = $root_el.attr('name')) and not _.isString(form_name)
if (bindings = $root_el.attr('data-bind'))
options = (new Function("sc", "with(sc[0]) { return { #{bindings} } }"))([view_model])
validation_options = options.validation_options
validation_options or= {}
validation_options.no_attach = !!form_name
# build up the results
for input_el in $root_el.find('input')
continue unless (name = $(input_el).attr('name')) # need named inputs to set up an object
validator = kb.inputValidator(view_model, input_el, validation_options)
not validator or validators.push(results[name] = validator)
# collect stats, error count and valid
results.$error_count = ko.dependentObservable(->
error_count = 0
for validator in validators
error_count += validator().$error_count
return error_count
)
results.$valid = ko.dependentObservable(-> return results.$error_count() is 0)
# enabled and disabled
results.$enabled = ko.dependentObservable(->
enabled = true
for validator in validators
enabled &= validator().$enabled
return enabled
)
results.$disabled = ko.dependentObservable(-> return not results.$enabled())
# if there is a name, add to the view_model with $scoping
view_model["$#{form_name}"] = results if form_name
return results |
[
{
"context": "r\"\n and: \"i\"\n back: \"enrere\"\n changePassword: \"Canviar contrasenya\"\n choosePassword: \"Escollir contrasenya\"\n click",
"end": 147,
"score": 0.9975948929786682,
"start": 128,
"tag": "PASSWORD",
"value": "Canviar contrasenya"
},
{
"context": "assword: \"C... | t9n/ca.coffee | coWorkr-InSights/meteor-accounts-t9n | 80 | #Language: Catalan
#Translators: ixdi
ca =
t9Name: 'Català'
add: "afegir"
and: "i"
back: "enrere"
changePassword: "Canviar contrasenya"
choosePassword: "Escollir contrasenya"
clickAgree: "Al fer clic a Subscriure aproves la"
configure: "Disposició"
createAccount: "Crear compte"
currentPassword: "Contrasenya actual"
dontHaveAnAccount: "No tens compte?"
email: "Correu"
emailAddress: "Adreça de correu"
emailResetLink: "Restablir correu"
forgotPassword: "Has oblidat la contrasenya?"
ifYouAlreadyHaveAnAccount: "Si ja tens un compte"
newPassword: "Nova contrasenya"
newPasswordAgain: "Nova contrasenya (repetir)"
optional: "Opcional"
OR: "O"
password: "Contrasenya"
passwordAgain: "Contrasenya (repetir)"
privacyPolicy: "Política de Privacitat"
remove: "eliminar"
resetYourPassword: "Restablir la teva contrasenya"
setPassword: "Definir contrasenya"
sign: "Entra"
signIn: "Entra"
signin: "entra"
signOut: "Surt"
signUp: "Subscriure's"
signupCode: "Còdi de subscripció"
signUpWithYourEmailAddress: "Subscriure-te amb el correu"
terms: "Termes d'ús"
updateYourPassword: "Actualitzar la teva contrasenya"
username: "Usuari"
usernameOrEmail: "Usuari o correu"
with: "amb"
maxAllowedLength: "Longitud màxima permesa"
minRequiredLength: "Longitud mínima requerida"
resendVerificationEmail: "Envia el correu de nou"
resendVerificationEmailLink_pre: "Correu de verificació perdut?"
resendVerificationEmailLink_link: "Envia de nou"
info:
emailSent: "Correu enviat"
emailVerified: "Correu verificat"
passwordChanged: "Contrasenya canviada"
passwordReset: "Restablir contrasenya"
error:
emailRequired: "Es requereix el correu."
minChar: "7 caràcters mínim."
pwdsDontMatch: "Les contrasenyes no coincideixen"
pwOneDigit: "mínim un dígit."
pwOneLetter: "mínim una lletra."
signInRequired: "Has d'iniciar sessió per a fer això."
signupCodeIncorrect: "El còdi de subscripció no coincideix."
signupCodeRequired: "Es requereix el còdi de subscripció."
usernameIsEmail: "L'usuari no pot ser el correu."
usernameRequired: "Es requereix un usuari."
accounts:
#---- accounts-base
#"@" + domain + " email required":
#"A login handler should return a result or undefined":
"Email already exists.": "El correu ja existeix."
"Email doesn't match the criteria.": "El correu no coincideix amb els criteris."
"Invalid login token": "Token d'entrada invàlid"
"Login forbidden": "No es permet entrar en aquests moments"
#"Service " + options.service + " already configured":
"Service unknown": "Servei desconegut"
"Unrecognized options for login request": "Opcions desconegudes per la petició d'entrada"
"User validation failed": "No s'ha pogut validar l'usuari"
"Username already exists.": "L'usuari ja existeix."
"You are not logged in.": "No has iniciat sessió"
"You've been logged out by the server. Please log in again.": "Has estat desconnectat pel servidor. Si us plau, entra de nou."
"Your session has expired. Please log in again.": "La teva sessió ha expirat. Si us plau, entra de nou."
"Already verified": "Ja està verificat"
#---- accounts-oauth
"No matching login attempt found": "No s'ha trobat un intent de login vàlid"
#---- accounts-password-client
"Password is old. Please reset your password.": "La contrasenya és antiga, si us plau, restableix una contrasenya nova"
#---- accounts-password
"Incorrect password": "Contrasenya invàlida"
"Invalid email": "Correu invàlid"
"Must be logged in": "Has d'iniciar sessió"
"Need to set a username or email": "Has d'especificar un usuari o un correu"
"old password format": "Format de contrasenya antic"
"Password may not be empty": "La contrasenya no pot ser buida"
"Signups forbidden": "Subscripció no permesa en aquest moment"
"Token expired": "Token expirat"
"Token has invalid email address": "El token conté un correu invàlid"
"User has no password set": "Usuari no té contrasenya"
"User not found": "Usuari no trobat"
"Verify email link expired": "L'enllaç per a verificar el correu ha expirat"
"Verify email link is for unknown address": "L'enllaç per a verificar el correu conté una adreça desconeguda"
"At least 1 digit, 1 lowercase and 1 uppercase": "Al menys 1 dígit, 1 lletra minúscula i 1 majúscula"
"Please verify your email first. Check the email and follow the link!": "Si us plau, verifica el teu correu primer. Comprova el correu i segueix l'enllaç que conté!"
"A new email has been sent to you. If the email doesn't show up in your inbox, be sure to check your spam folder.": "Un nou correu ha estat enviat a la teva bústia. Si no reps el correu assegura't de comprovar la bústia de correu no desitjat."
#---- match
"Match failed": "Comprovació fallida"
#---- Misc...
"Unknown error": "Error desconegut"
T9n?.map "ca", ca
module?.exports = ca
| 4732 | #Language: Catalan
#Translators: ixdi
ca =
t9Name: 'Català'
add: "afegir"
and: "i"
back: "enrere"
changePassword: "<PASSWORD>"
choosePassword: "<PASSWORD>"
clickAgree: "Al fer clic a Subscriure aproves la"
configure: "Disposició"
createAccount: "Crear compte"
currentPassword: "<PASSWORD>"
dontHaveAnAccount: "No tens compte?"
email: "Correu"
emailAddress: "Adreça de correu"
emailResetLink: "Restablir correu"
forgotPassword: "<PASSWORD>?"
ifYouAlreadyHaveAnAccount: "Si ja tens un compte"
newPassword: "<PASSWORD>"
newPasswordAgain: "<PASSWORD> (<PASSWORD>)"
optional: "Opcional"
OR: "O"
password: "<PASSWORD>"
passwordAgain: "<PASSWORD>)"
privacyPolicy: "Política de Privacitat"
remove: "eliminar"
resetYourPassword: "<PASSWORD>"
setPassword: "<PASSWORD>"
sign: "Entra"
signIn: "Entra"
signin: "entra"
signOut: "Surt"
signUp: "Subscriure's"
signupCode: "Còdi de subscripció"
signUpWithYourEmailAddress: "Subscriure-te amb el correu"
terms: "Termes d'ús"
updateYourPassword: "<PASSWORD>"
username: "Usuari"
usernameOrEmail: "Usuari o correu"
with: "amb"
maxAllowedLength: "Longitud màxima permesa"
minRequiredLength: "Longitud mínima requerida"
resendVerificationEmail: "Envia el correu de nou"
resendVerificationEmailLink_pre: "Correu de verificació perdut?"
resendVerificationEmailLink_link: "Envia de nou"
info:
emailSent: "Correu enviat"
emailVerified: "Correu verificat"
passwordChanged: "<PASSWORD>"
passwordReset: "<PASSWORD>"
error:
emailRequired: "Es requereix el correu."
minChar: "7 caràcters mínim."
pwdsDontMatch: "Les contrasenyes no coincideixen"
pwOneDigit: "mínim un dígit."
pwOneLetter: "mínim una lletra."
signInRequired: "Has d'iniciar sessió per a fer això."
signupCodeIncorrect: "El còdi de subscripció no coincideix."
signupCodeRequired: "Es requereix el còdi de subscripció."
usernameIsEmail: "L'usuari no pot ser el correu."
usernameRequired: "Es requereix un usuari."
accounts:
#---- accounts-base
#"@" + domain + " email required":
#"A login handler should return a result or undefined":
"Email already exists.": "El correu ja existeix."
"Email doesn't match the criteria.": "El correu no coincideix amb els criteris."
"Invalid login token": "Token d'entrada invàlid"
"Login forbidden": "No es permet entrar en aquests moments"
#"Service " + options.service + " already configured":
"Service unknown": "Servei desconegut"
"Unrecognized options for login request": "Opcions desconegudes per la petició d'entrada"
"User validation failed": "No s'ha pogut validar l'usuari"
"Username already exists.": "L'usuari ja existeix."
"You are not logged in.": "No has iniciat sessió"
"You've been logged out by the server. Please log in again.": "Has estat desconnectat pel servidor. Si us plau, entra de nou."
"Your session has expired. Please log in again.": "La teva sessió ha expirat. Si us plau, entra de nou."
"Already verified": "Ja està verificat"
#---- accounts-oauth
"No matching login attempt found": "No s'ha trobat un intent de login vàlid"
#---- accounts-password-client
"Password is old. Please reset your password.": "La contrasenya és antiga, si us plau, restableix una contrasenya nova"
#---- accounts-password
"Incorrect password": "<PASSWORD>"
"Invalid email": "Correu invàlid"
"Must be logged in": "Has d'iniciar sessió"
"Need to set a username or email": "Has d'especificar un usuari o un correu"
"old password format": "Format de contrasenya antic"
"Password may not be empty": "La contrasenya no pot ser buida"
"Signups forbidden": "Subscripció no permesa en aquest moment"
"Token expired": "Token expirat"
"Token has invalid email address": "El token conté un correu invàlid"
"User has no password set": "Usuari no té contrasenya"
"User not found": "Usuari no trobat"
"Verify email link expired": "L'enllaç per a verificar el correu ha expirat"
"Verify email link is for unknown address": "L'enllaç per a verificar el correu conté una adreça desconeguda"
"At least 1 digit, 1 lowercase and 1 uppercase": "Al menys 1 dígit, 1 lletra minúscula i 1 majúscula"
"Please verify your email first. Check the email and follow the link!": "Si us plau, verifica el teu correu primer. Comprova el correu i segueix l'enllaç que conté!"
"A new email has been sent to you. If the email doesn't show up in your inbox, be sure to check your spam folder.": "Un nou correu ha estat enviat a la teva bústia. Si no reps el correu assegura't de comprovar la bústia de correu no desitjat."
#---- match
"Match failed": "Comprovació fallida"
#---- Misc...
"Unknown error": "Error desconegut"
T9n?.map "ca", ca
module?.exports = ca
| true | #Language: Catalan
#Translators: ixdi
ca =
t9Name: 'Català'
add: "afegir"
and: "i"
back: "enrere"
changePassword: "PI:PASSWORD:<PASSWORD>END_PI"
choosePassword: "PI:PASSWORD:<PASSWORD>END_PI"
clickAgree: "Al fer clic a Subscriure aproves la"
configure: "Disposició"
createAccount: "Crear compte"
currentPassword: "PI:PASSWORD:<PASSWORD>END_PI"
dontHaveAnAccount: "No tens compte?"
email: "Correu"
emailAddress: "Adreça de correu"
emailResetLink: "Restablir correu"
forgotPassword: "PI:PASSWORD:<PASSWORD>END_PI?"
ifYouAlreadyHaveAnAccount: "Si ja tens un compte"
newPassword: "PI:PASSWORD:<PASSWORD>END_PI"
newPasswordAgain: "PI:PASSWORD:<PASSWORD>END_PI (PI:PASSWORD:<PASSWORD>END_PI)"
optional: "Opcional"
OR: "O"
password: "PI:PASSWORD:<PASSWORD>END_PI"
passwordAgain: "PI:PASSWORD:<PASSWORD>END_PI)"
privacyPolicy: "Política de Privacitat"
remove: "eliminar"
resetYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
setPassword: "PI:PASSWORD:<PASSWORD>END_PI"
sign: "Entra"
signIn: "Entra"
signin: "entra"
signOut: "Surt"
signUp: "Subscriure's"
signupCode: "Còdi de subscripció"
signUpWithYourEmailAddress: "Subscriure-te amb el correu"
terms: "Termes d'ús"
updateYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
username: "Usuari"
usernameOrEmail: "Usuari o correu"
with: "amb"
maxAllowedLength: "Longitud màxima permesa"
minRequiredLength: "Longitud mínima requerida"
resendVerificationEmail: "Envia el correu de nou"
resendVerificationEmailLink_pre: "Correu de verificació perdut?"
resendVerificationEmailLink_link: "Envia de nou"
info:
emailSent: "Correu enviat"
emailVerified: "Correu verificat"
passwordChanged: "PI:PASSWORD:<PASSWORD>END_PI"
passwordReset: "PI:PASSWORD:<PASSWORD>END_PI"
error:
emailRequired: "Es requereix el correu."
minChar: "7 caràcters mínim."
pwdsDontMatch: "Les contrasenyes no coincideixen"
pwOneDigit: "mínim un dígit."
pwOneLetter: "mínim una lletra."
signInRequired: "Has d'iniciar sessió per a fer això."
signupCodeIncorrect: "El còdi de subscripció no coincideix."
signupCodeRequired: "Es requereix el còdi de subscripció."
usernameIsEmail: "L'usuari no pot ser el correu."
usernameRequired: "Es requereix un usuari."
accounts:
#---- accounts-base
#"@" + domain + " email required":
#"A login handler should return a result or undefined":
"Email already exists.": "El correu ja existeix."
"Email doesn't match the criteria.": "El correu no coincideix amb els criteris."
"Invalid login token": "Token d'entrada invàlid"
"Login forbidden": "No es permet entrar en aquests moments"
#"Service " + options.service + " already configured":
"Service unknown": "Servei desconegut"
"Unrecognized options for login request": "Opcions desconegudes per la petició d'entrada"
"User validation failed": "No s'ha pogut validar l'usuari"
"Username already exists.": "L'usuari ja existeix."
"You are not logged in.": "No has iniciat sessió"
"You've been logged out by the server. Please log in again.": "Has estat desconnectat pel servidor. Si us plau, entra de nou."
"Your session has expired. Please log in again.": "La teva sessió ha expirat. Si us plau, entra de nou."
"Already verified": "Ja està verificat"
#---- accounts-oauth
"No matching login attempt found": "No s'ha trobat un intent de login vàlid"
#---- accounts-password-client
"Password is old. Please reset your password.": "La contrasenya és antiga, si us plau, restableix una contrasenya nova"
#---- accounts-password
"Incorrect password": "PI:PASSWORD:<PASSWORD>END_PI"
"Invalid email": "Correu invàlid"
"Must be logged in": "Has d'iniciar sessió"
"Need to set a username or email": "Has d'especificar un usuari o un correu"
"old password format": "Format de contrasenya antic"
"Password may not be empty": "La contrasenya no pot ser buida"
"Signups forbidden": "Subscripció no permesa en aquest moment"
"Token expired": "Token expirat"
"Token has invalid email address": "El token conté un correu invàlid"
"User has no password set": "Usuari no té contrasenya"
"User not found": "Usuari no trobat"
"Verify email link expired": "L'enllaç per a verificar el correu ha expirat"
"Verify email link is for unknown address": "L'enllaç per a verificar el correu conté una adreça desconeguda"
"At least 1 digit, 1 lowercase and 1 uppercase": "Al menys 1 dígit, 1 lletra minúscula i 1 majúscula"
"Please verify your email first. Check the email and follow the link!": "Si us plau, verifica el teu correu primer. Comprova el correu i segueix l'enllaç que conté!"
"A new email has been sent to you. If the email doesn't show up in your inbox, be sure to check your spam folder.": "Un nou correu ha estat enviat a la teva bústia. Si no reps el correu assegura't de comprovar la bústia de correu no desitjat."
#---- match
"Match failed": "Comprovació fallida"
#---- Misc...
"Unknown error": "Error desconegut"
T9n?.map "ca", ca
module?.exports = ca
|
[
{
"context": "y need to go over public channels\nhttpAuthUser = \"sharelatex\"\nhttpAuthPass = \"rAp8aFvtk77m20PG6Kedzt3iOOrWKJ3p",
"end": 171,
"score": 0.9902275800704956,
"start": 161,
"tag": "USERNAME",
"value": "sharelatex"
},
{
"context": "nnels\nhttpAuthUser = \"sharelatex\"\nh... | settings.coffee | kingzevin/web-microservice | 0 | Path = require('path')
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUser = "sharelatex"
httpAuthPass = "rAp8aFvtk77m20PG6Kedzt3iOOrWKJ3pL5eiaQsP6s"
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
parse = (option)->
if option?
try
opt = JSON.parse(option)
return opt
catch err
console.error "problem parsing #{option}, invalid JSON"
return undefined
DATA_DIR = '/var/lib/sharelatex/data'
TMP_DIR = '/var/lib/sharelatex/tmp'
settings =
brandPrefix: ""
allowAnonymousReadAndWriteSharing:
process.env['SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING'] == 'true'
# Databases
# ---------
# ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
# Documentation about the URL connection string format can be found at:
#
# http://docs.mongodb.org/manual/reference/connection-string/
#
# The following works out of the box with Mongo's default settings:
mongo:
url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
# Redis is used in ShareLaTeX for high volume queries, like real-time
# editing, and session management.
#
# The following config will work with Redis's default settings:
redis:
web: redisConfig =
host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
password: process.env["SHARELATEX_REDIS_PASS"] or ""
key_schema:
# document-updater
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
docLines: ({doc_id}) -> "doclines:#{doc_id}"
docOps: ({doc_id}) -> "DocOps:#{doc_id}"
docVersion: ({doc_id}) -> "DocVersion:#{doc_id}"
docHash: ({doc_id}) -> "DocHash:#{doc_id}"
projectKey: ({doc_id}) -> "ProjectId:#{doc_id}"
docsInProject: ({project_id}) -> "DocsIn:#{project_id}"
ranges: ({doc_id}) -> "Ranges:#{doc_id}"
# document-updater:realtime
pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}"
# document-updater:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# document-updater:lock
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
# track-changes:lock
historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}"
historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}"
# track-chanegs:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# realtime
clientsInProject: ({project_id}) -> "clients_in_project:#{project_id}"
connectedUser: ({project_id, client_id})-> "connected_user:#{project_id}:#{client_id}"
fairy: redisConfig
# track-changes and document-updater
realtime: redisConfig
documentupdater: redisConfig
lock: redisConfig
history: redisConfig
websessions: redisConfig
# The compile server (the clsi) uses a SQL database to cache files and
# meta-data. sqllite is the default, and the load is low enough that this will
# be fine in production (we use sqllite at sharelatex.com).
#
# If you want to configure a different database, see the Sequelize documentation
# for available options:
#
# https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
#
mysql:
clsi:
database: "clsi"
username: "clsi"
password: ""
dialect: "sqlite"
storage: Path.join(DATA_DIR, "db.sqlite")
# File storage
# ------------
# ShareLaTeX can store binary files like images either locally or in Amazon
# S3. The default is locally:
filestore:
backend: "fs"
stores:
user_files: Path.join(DATA_DIR, "user_files")
template_files: Path.join(DATA_DIR, "template_files")
# To use Amazon S3 as a storage backend, comment out the above config, and
# uncomment the following, filling in your key, secret, and bucket name:
#
# filestore:
# backend: "s3"
# stores:
# user_files: "BUCKET_NAME"
# s3:
# key: "AWS_KEY"
# secret: "AWS_SECRET"
#
trackchanges:
continueOnError: true
# Local disk caching
# ------------------
path:
# If we ever need to write something to disk (e.g. incoming requests
# that need processing but may be too big for memory), then write
# them to disk here:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
# Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, "uploads")
# Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, "compiles")
# Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, "cache")
# Server Config
# -------------
# Where your instance of ShareLaTeX can be found publicly. This is used
# when emails are sent out and in generated links:
siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
# The name this is used to describe your ShareLaTeX Installation
appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
restrictInvitesToExistingAccounts: process.env["SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS"] == 'true'
nav:
title: process.env["SHARELATEX_NAV_TITLE"] or process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX Community Edition"
# The email address which users will be directed to as the main point of
# contact for this installation of ShareLaTeX.
adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "placeholder@example.com"
# If provided, a sessionSecret is used to sign cookies so that they cannot be
# spoofed. This is recommended.
security:
sessionSecret: process.env["SHARELATEX_SESSION_SECRET"] or process.env["CRYPTO_RANDOM"]
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUsers: httpAuthUsers
# Should javascript assets be served minified or not.
useMinifiedJs: true
# Should static assets be sent with a header to tell the browser to cache
# them. This should be false in development where changes are being made,
# but should be set to true in production.
cacheStaticAssets: true
# If you are running ShareLaTeX over https, set this to true to send the
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.
behindProxy: process.env["SHARELATEX_BEHIND_PROXY"] or false
i18n:
subdomainLang:
www: {lngCode:process.env["SHARELATEX_SITE_LANGUAGE"] or "en", url: siteUrl}
defaultLng: process.env["SHARELATEX_SITE_LANGUAGE"] or "en"
apis:
web:
# zevin
url: process.env['WEB_URL'] or "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}"
user: httpAuthUser
pass: httpAuthPass
project_history:
enabled: false
references:{}
notifications:undefined
defaultFeatures:
collaborators: -1
dropbox: true
versioning: true
compileTimeout: 180
compileGroup: "standard"
trackChanges: true
templates: true
references: true
## OPTIONAL CONFIGERABLE SETTINGS
if process.env["SHARELATEX_LEFT_FOOTER"]?
try
settings.nav.left_footer = JSON.parse(process.env["SHARELATEX_LEFT_FOOTER"])
catch e
console.error("could not parse SHARELATEX_LEFT_FOOTER, not valid JSON")
if process.env["SHARELATEX_RIGHT_FOOTER"]?
settings.nav.right_footer = process.env["SHARELATEX_RIGHT_FOOTER"]
try
settings.nav.right_footer = JSON.parse(process.env["SHARELATEX_RIGHT_FOOTER"])
catch e
console.error("could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON")
if process.env["SHARELATEX_HEADER_IMAGE_URL"]?
settings.nav.custom_logo = process.env["SHARELATEX_HEADER_IMAGE_URL"]
if process.env["SHARELATEX_HEADER_NAV_LINKS"]?
console.error """
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
if process.env["SHARELATEX_HEADER_EXTRAS"]?
try
settings.nav.header_extras = JSON.parse(process.env["SHARELATEX_HEADER_EXTRAS"])
catch e
console.error("could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON")
# Sending Email
# -------------
#
# You must configure a mail server to be able to send invite emails from
# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
# documentation for available options:
#
# http://www.nodemailer.com/docs/transports
if process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]?
settings.email =
fromAddress: process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]
replyTo: process.env["SHARELATEX_EMAIL_REPLY_TO"] or ""
driver: process.env["SHARELATEX_EMAIL_DRIVER"]
parameters:
#AWS Creds
AWSAccessKeyID: process.env["SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID"]
AWSSecretKey: process.env["SHARELATEX_EMAIL_AWS_SES_SECRET_KEY"]
#SMTP Creds
host: process.env["SHARELATEX_EMAIL_SMTP_HOST"]
port: process.env["SHARELATEX_EMAIL_SMTP_PORT"],
secure: parse(process.env["SHARELATEX_EMAIL_SMTP_SECURE"])
ignoreTLS: parse(process.env["SHARELATEX_EMAIL_SMTP_IGNORE_TLS"])
textEncoding: process.env["SHARELATEX_EMAIL_TEXT_ENCODING"]
templates:
customFooter: process.env["SHARELATEX_CUSTOM_EMAIL_FOOTER"]
if process.env["SHARELATEX_EMAIL_SMTP_USER"]? or process.env["SHARELATEX_EMAIL_SMTP_PASS"]?
settings.email.parameters.auth =
user: process.env["SHARELATEX_EMAIL_SMTP_USER"]
pass: process.env["SHARELATEX_EMAIL_SMTP_PASS"]
if process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"]?
settings.email.parameters.tls =
rejectUnauthorized: parse(process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"])
# i18n
if process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]?
settings.i18n.subdomainLang = parse(process.env["SHARELATEX_LANG_DOMAIN_MAPPING"])
# Password Settings
# -----------
# These restrict the passwords users can use when registering
# opts are from http://antelle.github.io/passfield
if process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"]
settings.passwordStrengthOptions =
pattern: process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or "aA$3"
length: {min:process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or 8, max: process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] or 150}
#######################
# ShareLaTeX Server Pro
#######################
if parse(process.env["SHARELATEX_IS_SERVER_PRO"]) == true
settings.bypassPercentageRollouts = true
settings.apis.references =
url: "http://localhost:3040"
# LDAP - SERVER PRO ONLY
# ----------
if process.env["SHARELATEX_LDAP_HOST"]
console.error """
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: The LDAP configuration format has changed in version 0.5.1
# See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
if process.env["SHARELATEX_LDAP_URL"]
settings.externalAuth = true
settings.ldap =
emailAtt: process.env["SHARELATEX_LDAP_EMAIL_ATT"]
nameAtt: process.env["SHARELATEX_LDAP_NAME_ATT"]
lastNameAtt: process.env["SHARELATEX_LDAP_LAST_NAME_ATT"]
updateUserDetailsOnLogin: process.env["SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
placeholder: process.env["SHARELATEX_LDAP_PLACEHOLDER"]
server:
url: process.env["SHARELATEX_LDAP_URL"]
bindDn: process.env["SHARELATEX_LDAP_BIND_DN"]
bindCredentials: process.env["SHARELATEX_LDAP_BIND_CREDENTIALS"]
bindProperty: process.env["SHARELATEX_LDAP_BIND_PROPERTY"]
searchBase: process.env["SHARELATEX_LDAP_SEARCH_BASE"]
searchScope: process.env["SHARELATEX_LDAP_SEARCH_SCOPE"]
searchFilter: process.env["SHARELATEX_LDAP_SEARCH_FILTER"]
searchAttributes: (
if _ldap_search_attribs = process.env["SHARELATEX_LDAP_SEARCH_ATTRIBUTES"]
try
JSON.parse(_ldap_search_attribs)
catch e
console.error "could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES"
else
undefined
)
groupDnProperty: process.env["SHARELATEX_LDAP_GROUP_DN_PROPERTY"]
groupSearchBase: process.env["SHARELATEX_LDAP_GROUP_SEARCH_BASE"]
groupSearchScope: process.env["SHARELATEX_LDAP_GROUP_SEARCH_SCOPE"]
groupSearchFilter: process.env["SHARELATEX_LDAP_GROUP_SEARCH_FILTER"]
groupSearchAttributes: (
if _ldap_group_search_attribs = process.env["SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"]
try
JSON.parse(_ldap_group_search_attribs)
catch e
console.error "could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"
else
undefined
)
cache: process.env["SHARELATEX_LDAP_CACHE"] == 'true'
timeout: (
if _ldap_timeout = process.env["SHARELATEX_LDAP_TIMEOUT"]
try
parseInt(_ldap_timeout)
catch e
console.error "Cannot parse SHARELATEX_LDAP_TIMEOUT"
else
undefined
)
connectTimeout: (
if _ldap_connect_timeout = process.env["SHARELATEX_LDAP_CONNECT_TIMEOUT"]
try
parseInt(_ldap_connect_timeout)
catch e
console.error "Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT"
else
undefined
)
if process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]
try
ca = JSON.parse(process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"])
catch e
console.error "could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON"
if typeof(ca) == 'string'
ca_paths = [ca]
else if typeof(ca) == 'object' && ca?.length?
ca_paths = ca
else
console.error "problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH"
settings.ldap.server.tlsOptions =
rejectUnauthorized: process.env["SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH"] == "true"
ca:ca_paths # e.g.'/etc/ldap/ca_certs.pem'
if process.env["SHARELATEX_SAML_ENTRYPOINT"]
# NOTE: see https://github.com/bergie/passport-saml/blob/master/README.md for docs of `server` options
settings.externalAuth = true
settings.saml =
updateUserDetailsOnLogin: process.env["SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
identityServiceName: process.env["SHARELATEX_SAML_IDENTITY_SERVICE_NAME"]
emailField: process.env["SHARELATEX_SAML_EMAIL_FIELD"] || process.env["SHARELATEX_SAML_EMAIL_FIELD_NAME"]
firstNameField: process.env["SHARELATEX_SAML_FIRST_NAME_FIELD"]
lastNameField: process.env["SHARELATEX_SAML_LAST_NAME_FIELD"]
server:
# strings
entryPoint: process.env["SHARELATEX_SAML_ENTRYPOINT"]
callbackUrl: process.env["SHARELATEX_SAML_CALLBACK_URL"]
issuer: process.env["SHARELATEX_SAML_ISSUER"]
decryptionPvk: process.env["SHARELATEX_SAML_DECRYPTION_PVK"]
signatureAlgorithm: process.env["SHARELATEX_SAML_SIGNATURE_ALGORITHM"]
identifierFormat: process.env["SHARELATEX_SAML_IDENTIFIER_FORMAT"]
attributeConsumingServiceIndex: process.env["SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX"]
authnContext: process.env["SHARELATEX_SAML_AUTHN_CONTEXT"]
authnRequestBinding: process.env["SHARELATEX_SAML_AUTHN_REQUEST_BINDING"]
validateInResponseTo: process.env["SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO"]
cacheProvider: process.env["SHARELATEX_SAML_CACHE_PROVIDER"]
logoutUrl: process.env["SHARELATEX_SAML_LOGOUT_URL"]
logoutCallbackUrl: process.env["SHARELATEX_SAML_LOGOUT_CALLBACK_URL"]
disableRequestedAuthnContext: process.env["SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT"] == 'true'
forceAuthn: process.env["SHARELATEX_SAML_FORCE_AUTHN"] == 'true'
skipRequestCompression: process.env["SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION"] == 'true'
acceptedClockSkewMs: (
if _saml_skew = process.env["SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"]
try
parseInt(_saml_skew)
catch e
console.error "Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"
else
undefined
)
requestIdExpirationPeriodMs: (
if _saml_exiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]
try
parseInt(_saml_expiration)
catch e
console.error "Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"
else
undefined
)
additionalParams: (
if _saml_additionalParams = process.env["SHARELATEX_SAML_ADDITIONAL_PARAMS"]
try
JSON.parse(_saml_additionalParams)
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS"
else
undefined
)
additionalAuthorizeParams: (
if _saml_additionalAuthorizeParams = process.env["SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"]
try
JSON.parse(_saml_additionalAuthorizeParams )
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"
else
undefined
)
additionalLogoutParams: (
if _saml_additionalLogoutParams = process.env["SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"]
try
JSON.parse(_saml_additionalLogoutParams )
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"
else
undefined
)
# SHARELATEX_SAML_CERT cannot be empty
# https://github.com/bergie/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102
if process.env["SHARELATEX_SAML_CERT"]
settings.saml.server.cert = process.env["SHARELATEX_SAML_CERT"]
settings.saml.server.privateCert = process.env["SHARELATEX_SAML_PRIVATE_CERT"]
# Compiler
# --------
if process.env["SANDBOXED_COMPILES"] == "true"
settings.clsi =
dockerRunner: true
docker:
image: process.env["TEX_LIVE_DOCKER_IMAGE"]
env:
HOME: "/tmp"
PATH: process.env["COMPILER_PATH"] or "/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
user: "www-data"
if !settings.path?
settings.path = {}
settings.path.synctexBaseDir = () -> "/compile"
if process.env['SANDBOXED_COMPILES_SIBLING_CONTAINERS'] == 'true'
console.log("Using sibling containers for sandboxed compiles")
if process.env['SANDBOXED_COMPILES_HOST_DIR']
settings.path.sandboxedCompilesHostDir = process.env['SANDBOXED_COMPILES_HOST_DIR']
else
console.error('Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set')
# Templates
# ---------
if process.env["SHARELATEX_TEMPLATES_USER_ID"]
settings.templates =
mountPointUrl: "/templates"
user_id: process.env["SHARELATEX_TEMPLATES_USER_ID"]
settings.templateLinks = parse(process.env["SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS"])
# /Learn
# -------
if process.env["SHARELATEX_PROXY_LEARN"]?
settings.proxyLearn = parse(process.env["SHARELATEX_PROXY_LEARN"])
# /References
# -----------
if process.env["SHARELATEX_ELASTICSEARCH_URL"]?
settings.references.elasticsearch =
host: process.env["SHARELATEX_ELASTICSEARCH_URL"]
# With lots of incoming and outgoing HTTP connections to different services,
# sometimes long running, it is a good idea to increase the default number
# of sockets that Node will hold open.
http = require('http')
http.globalAgent.maxSockets = 300
https = require('https')
https.globalAgent.maxSockets = 300
module.exports = settings | 27729 | Path = require('path')
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUser = "sharelatex"
httpAuthPass = "<PASSWORD>"
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
parse = (option)->
if option?
try
opt = JSON.parse(option)
return opt
catch err
console.error "problem parsing #{option}, invalid JSON"
return undefined
DATA_DIR = '/var/lib/sharelatex/data'
TMP_DIR = '/var/lib/sharelatex/tmp'
settings =
brandPrefix: ""
allowAnonymousReadAndWriteSharing:
process.env['SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING'] == 'true'
# Databases
# ---------
# ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
# Documentation about the URL connection string format can be found at:
#
# http://docs.mongodb.org/manual/reference/connection-string/
#
# The following works out of the box with Mongo's default settings:
mongo:
url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
# Redis is used in ShareLaTeX for high volume queries, like real-time
# editing, and session management.
#
# The following config will work with Redis's default settings:
redis:
web: redisConfig =
host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
password: process.env["SHARELATEX_REDIS_PASS"] or ""
key_schema:
# document-updater
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
docLines: ({doc_id}) -> "doclines:#{doc_id}"
docOps: ({doc_id}) -> "DocOps:#{doc_id}"
docVersion: ({doc_id}) -> "DocVersion:#{doc_id}"
docHash: ({doc_id}) -> "DocHash:#{doc_id}"
projectKey: ({doc_id}) -> "ProjectId:#{doc_id}"
docsInProject: ({project_id}) -> "DocsIn:#{project_id}"
ranges: ({doc_id}) -> "Ranges:#{doc_id}"
# document-updater:realtime
pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}"
# document-updater:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# document-updater:lock
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
# track-changes:lock
historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}"
historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}"
# track-chanegs:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# realtime
clientsInProject: ({project_id}) -> "clients_in_project:#{project_id}"
connectedUser: ({project_id, client_id})-> "connected_user:#{project_id}:#{client_id}"
fairy: redisConfig
# track-changes and document-updater
realtime: redisConfig
documentupdater: redisConfig
lock: redisConfig
history: redisConfig
websessions: redisConfig
# The compile server (the clsi) uses a SQL database to cache files and
# meta-data. sqllite is the default, and the load is low enough that this will
# be fine in production (we use sqllite at sharelatex.com).
#
# If you want to configure a different database, see the Sequelize documentation
# for available options:
#
# https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
#
mysql:
clsi:
database: "clsi"
username: "clsi"
password: ""
dialect: "sqlite"
storage: Path.join(DATA_DIR, "db.sqlite")
# File storage
# ------------
# ShareLaTeX can store binary files like images either locally or in Amazon
# S3. The default is locally:
filestore:
backend: "fs"
stores:
user_files: Path.join(DATA_DIR, "user_files")
template_files: Path.join(DATA_DIR, "template_files")
# To use Amazon S3 as a storage backend, comment out the above config, and
# uncomment the following, filling in your key, secret, and bucket name:
#
# filestore:
# backend: "s3"
# stores:
# user_files: "BUCKET_NAME"
# s3:
# key: "AWS_KEY"
# secret: "AWS_SECRET"
#
trackchanges:
continueOnError: true
# Local disk caching
# ------------------
path:
# If we ever need to write something to disk (e.g. incoming requests
# that need processing but may be too big for memory), then write
# them to disk here:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
# Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, "uploads")
# Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, "compiles")
# Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, "cache")
# Server Config
# -------------
# Where your instance of ShareLaTeX can be found publicly. This is used
# when emails are sent out and in generated links:
siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
# The name this is used to describe your ShareLaTeX Installation
appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
restrictInvitesToExistingAccounts: process.env["SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS"] == 'true'
nav:
title: process.env["SHARELATEX_NAV_TITLE"] or process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX Community Edition"
# The email address which users will be directed to as the main point of
# contact for this installation of ShareLaTeX.
adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "<EMAIL>"
# If provided, a sessionSecret is used to sign cookies so that they cannot be
# spoofed. This is recommended.
security:
sessionSecret: process.env["SHARELATEX_SESSION_SECRET"] or process.env["CRYPTO_RANDOM"]
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUsers: httpAuthUsers
# Should javascript assets be served minified or not.
useMinifiedJs: true
# Should static assets be sent with a header to tell the browser to cache
# them. This should be false in development where changes are being made,
# but should be set to true in production.
cacheStaticAssets: true
# If you are running ShareLaTeX over https, set this to true to send the
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.
behindProxy: process.env["SHARELATEX_BEHIND_PROXY"] or false
i18n:
subdomainLang:
www: {lngCode:process.env["SHARELATEX_SITE_LANGUAGE"] or "en", url: siteUrl}
defaultLng: process.env["SHARELATEX_SITE_LANGUAGE"] or "en"
apis:
web:
# zevin
url: process.env['WEB_URL'] or "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}"
user: httpAuthUser
pass: httpAuthPass
project_history:
enabled: false
references:{}
notifications:undefined
defaultFeatures:
collaborators: -1
dropbox: true
versioning: true
compileTimeout: 180
compileGroup: "standard"
trackChanges: true
templates: true
references: true
## OPTIONAL CONFIGERABLE SETTINGS
if process.env["SHARELATEX_LEFT_FOOTER"]?
try
settings.nav.left_footer = JSON.parse(process.env["SHARELATEX_LEFT_FOOTER"])
catch e
console.error("could not parse SHARELATEX_LEFT_FOOTER, not valid JSON")
if process.env["SHARELATEX_RIGHT_FOOTER"]?
settings.nav.right_footer = process.env["SHARELATEX_RIGHT_FOOTER"]
try
settings.nav.right_footer = JSON.parse(process.env["SHARELATEX_RIGHT_FOOTER"])
catch e
console.error("could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON")
if process.env["SHARELATEX_HEADER_IMAGE_URL"]?
settings.nav.custom_logo = process.env["SHARELATEX_HEADER_IMAGE_URL"]
if process.env["SHARELATEX_HEADER_NAV_LINKS"]?
console.error """
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
if process.env["SHARELATEX_HEADER_EXTRAS"]?
try
settings.nav.header_extras = JSON.parse(process.env["SHARELATEX_HEADER_EXTRAS"])
catch e
console.error("could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON")
# Sending Email
# -------------
#
# You must configure a mail server to be able to send invite emails from
# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
# documentation for available options:
#
# http://www.nodemailer.com/docs/transports
if process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]?
settings.email =
fromAddress: process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]
replyTo: process.env["SHARELATEX_EMAIL_REPLY_TO"] or ""
driver: process.env["SHARELATEX_EMAIL_DRIVER"]
parameters:
#AWS Creds
AWSAccessKeyID: process.env["SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID"]
AWSSecretKey: process.env["SHARELATEX_EMAIL_AWS_SES_SECRET_KEY"]
#SMTP Creds
host: process.env["SHARELATEX_EMAIL_SMTP_HOST"]
port: process.env["SHARELATEX_EMAIL_SMTP_PORT"],
secure: parse(process.env["SHARELATEX_EMAIL_SMTP_SECURE"])
ignoreTLS: parse(process.env["SHARELATEX_EMAIL_SMTP_IGNORE_TLS"])
textEncoding: process.env["SHARELATEX_EMAIL_TEXT_ENCODING"]
templates:
customFooter: process.env["SHARELATEX_CUSTOM_EMAIL_FOOTER"]
if process.env["SHARELATEX_EMAIL_SMTP_USER"]? or process.env["SHARELATEX_EMAIL_SMTP_PASS"]?
settings.email.parameters.auth =
user: process.env["SHARELATEX_EMAIL_SMTP_USER"]
pass: process.env["SHARELATEX_EMAIL_SMTP_PASS"]
if process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"]?
settings.email.parameters.tls =
rejectUnauthorized: parse(process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"])
# i18n
if process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]?
settings.i18n.subdomainLang = parse(process.env["SHARELATEX_LANG_DOMAIN_MAPPING"])
# Password Settings
# -----------
# These restrict the passwords users can use when registering
# opts are from http://antelle.github.io/passfield
if process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"]
settings.passwordStrengthOptions =
pattern: process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or "<PASSWORD>"
length: {min:process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or 8, max: process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] or 150}
#######################
# ShareLaTeX Server Pro
#######################
if parse(process.env["SHARELATEX_IS_SERVER_PRO"]) == true
settings.bypassPercentageRollouts = true
settings.apis.references =
url: "http://localhost:3040"
# LDAP - SERVER PRO ONLY
# ----------
if process.env["SHARELATEX_LDAP_HOST"]
console.error """
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: The LDAP configuration format has changed in version 0.5.1
# See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
if process.env["SHARELATEX_LDAP_URL"]
settings.externalAuth = true
settings.ldap =
emailAtt: process.env["SHARELATEX_LDAP_EMAIL_ATT"]
nameAtt: process.env["SHARELATEX_LDAP_NAME_ATT"]
lastNameAtt: process.env["SHARELATEX_LDAP_LAST_NAME_ATT"]
updateUserDetailsOnLogin: process.env["SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
placeholder: process.env["SHARELATEX_LDAP_PLACEHOLDER"]
server:
url: process.env["SHARELATEX_LDAP_URL"]
bindDn: process.env["SHARELATEX_LDAP_BIND_DN"]
bindCredentials: process.env["SHARELATEX_LDAP_BIND_CREDENTIALS"]
bindProperty: process.env["SHARELATEX_LDAP_BIND_PROPERTY"]
searchBase: process.env["SHARELATEX_LDAP_SEARCH_BASE"]
searchScope: process.env["SHARELATEX_LDAP_SEARCH_SCOPE"]
searchFilter: process.env["SHARELATEX_LDAP_SEARCH_FILTER"]
searchAttributes: (
if _ldap_search_attribs = process.env["SHARELATEX_LDAP_SEARCH_ATTRIBUTES"]
try
JSON.parse(_ldap_search_attribs)
catch e
console.error "could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES"
else
undefined
)
groupDnProperty: process.env["SHARELATEX_LDAP_GROUP_DN_PROPERTY"]
groupSearchBase: process.env["SHARELATEX_LDAP_GROUP_SEARCH_BASE"]
groupSearchScope: process.env["SHARELATEX_LDAP_GROUP_SEARCH_SCOPE"]
groupSearchFilter: process.env["SHARELATEX_LDAP_GROUP_SEARCH_FILTER"]
groupSearchAttributes: (
if _ldap_group_search_attribs = process.env["SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"]
try
JSON.parse(_ldap_group_search_attribs)
catch e
console.error "could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"
else
undefined
)
cache: process.env["SHARELATEX_LDAP_CACHE"] == 'true'
timeout: (
if _ldap_timeout = process.env["SHARELATEX_LDAP_TIMEOUT"]
try
parseInt(_ldap_timeout)
catch e
console.error "Cannot parse SHARELATEX_LDAP_TIMEOUT"
else
undefined
)
connectTimeout: (
if _ldap_connect_timeout = process.env["SHARELATEX_LDAP_CONNECT_TIMEOUT"]
try
parseInt(_ldap_connect_timeout)
catch e
console.error "Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT"
else
undefined
)
if process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]
try
ca = JSON.parse(process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"])
catch e
console.error "could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON"
if typeof(ca) == 'string'
ca_paths = [ca]
else if typeof(ca) == 'object' && ca?.length?
ca_paths = ca
else
console.error "problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH"
settings.ldap.server.tlsOptions =
rejectUnauthorized: process.env["SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH"] == "true"
ca:ca_paths # e.g.'/etc/ldap/ca_certs.pem'
if process.env["SHARELATEX_SAML_ENTRYPOINT"]
# NOTE: see https://github.com/bergie/passport-saml/blob/master/README.md for docs of `server` options
settings.externalAuth = true
settings.saml =
updateUserDetailsOnLogin: process.env["SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
identityServiceName: process.env["SHARELATEX_SAML_IDENTITY_SERVICE_NAME"]
emailField: process.env["SHARELATEX_SAML_EMAIL_FIELD"] || process.env["SHARELATEX_SAML_EMAIL_FIELD_NAME"]
firstNameField: process.env["SHARELATEX_SAML_FIRST_NAME_FIELD"]
lastNameField: process.env["SHARELATEX_SAML_LAST_NAME_FIELD"]
server:
# strings
entryPoint: process.env["SHARELATEX_SAML_ENTRYPOINT"]
callbackUrl: process.env["SHARELATEX_SAML_CALLBACK_URL"]
issuer: process.env["SHARELATEX_SAML_ISSUER"]
decryptionPvk: process.env["SHARELATEX_SAML_DECRYPTION_PVK"]
signatureAlgorithm: process.env["SHARELATEX_SAML_SIGNATURE_ALGORITHM"]
identifierFormat: process.env["SHARELATEX_SAML_IDENTIFIER_FORMAT"]
attributeConsumingServiceIndex: process.env["SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX"]
authnContext: process.env["SHARELATEX_SAML_AUTHN_CONTEXT"]
authnRequestBinding: process.env["SHARELATEX_SAML_AUTHN_REQUEST_BINDING"]
validateInResponseTo: process.env["SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO"]
cacheProvider: process.env["SHARELATEX_SAML_CACHE_PROVIDER"]
logoutUrl: process.env["SHARELATEX_SAML_LOGOUT_URL"]
logoutCallbackUrl: process.env["SHARELATEX_SAML_LOGOUT_CALLBACK_URL"]
disableRequestedAuthnContext: process.env["SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT"] == 'true'
forceAuthn: process.env["SHARELATEX_SAML_FORCE_AUTHN"] == 'true'
skipRequestCompression: process.env["SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION"] == 'true'
acceptedClockSkewMs: (
if _saml_skew = process.env["SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"]
try
parseInt(_saml_skew)
catch e
console.error "Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"
else
undefined
)
requestIdExpirationPeriodMs: (
if _saml_exiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]
try
parseInt(_saml_expiration)
catch e
console.error "Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"
else
undefined
)
additionalParams: (
if _saml_additionalParams = process.env["SHARELATEX_SAML_ADDITIONAL_PARAMS"]
try
JSON.parse(_saml_additionalParams)
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS"
else
undefined
)
additionalAuthorizeParams: (
if _saml_additionalAuthorizeParams = process.env["SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"]
try
JSON.parse(_saml_additionalAuthorizeParams )
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"
else
undefined
)
additionalLogoutParams: (
if _saml_additionalLogoutParams = process.env["SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"]
try
JSON.parse(_saml_additionalLogoutParams )
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"
else
undefined
)
# SHARELATEX_SAML_CERT cannot be empty
# https://github.com/bergie/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102
if process.env["SHARELATEX_SAML_CERT"]
settings.saml.server.cert = process.env["SHARELATEX_SAML_CERT"]
settings.saml.server.privateCert = process.env["SHARELATEX_SAML_PRIVATE_CERT"]
# Compiler
# --------
if process.env["SANDBOXED_COMPILES"] == "true"
settings.clsi =
dockerRunner: true
docker:
image: process.env["TEX_LIVE_DOCKER_IMAGE"]
env:
HOME: "/tmp"
PATH: process.env["COMPILER_PATH"] or "/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
user: "www-data"
if !settings.path?
settings.path = {}
settings.path.synctexBaseDir = () -> "/compile"
if process.env['SANDBOXED_COMPILES_SIBLING_CONTAINERS'] == 'true'
console.log("Using sibling containers for sandboxed compiles")
if process.env['SANDBOXED_COMPILES_HOST_DIR']
settings.path.sandboxedCompilesHostDir = process.env['SANDBOXED_COMPILES_HOST_DIR']
else
console.error('Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set')
# Templates
# ---------
if process.env["SHARELATEX_TEMPLATES_USER_ID"]
settings.templates =
mountPointUrl: "/templates"
user_id: process.env["SHARELATEX_TEMPLATES_USER_ID"]
settings.templateLinks = parse(process.env["SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS"])
# /Learn
# -------
if process.env["SHARELATEX_PROXY_LEARN"]?
settings.proxyLearn = parse(process.env["SHARELATEX_PROXY_LEARN"])
# /References
# -----------
if process.env["SHARELATEX_ELASTICSEARCH_URL"]?
settings.references.elasticsearch =
host: process.env["SHARELATEX_ELASTICSEARCH_URL"]
# With lots of incoming and outgoing HTTP connections to different services,
# sometimes long running, it is a good idea to increase the default number
# of sockets that Node will hold open.
http = require('http')
http.globalAgent.maxSockets = 300
https = require('https')
https.globalAgent.maxSockets = 300
module.exports = settings | true | Path = require('path')
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUser = "sharelatex"
httpAuthPass = "PI:PASSWORD:<PASSWORD>END_PI"
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
parse = (option)->
if option?
try
opt = JSON.parse(option)
return opt
catch err
console.error "problem parsing #{option}, invalid JSON"
return undefined
DATA_DIR = '/var/lib/sharelatex/data'
TMP_DIR = '/var/lib/sharelatex/tmp'
settings =
brandPrefix: ""
allowAnonymousReadAndWriteSharing:
process.env['SHARELATEX_ALLOW_ANONYMOUS_READ_AND_WRITE_SHARING'] == 'true'
# Databases
# ---------
# ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
# Documentation about the URL connection string format can be found at:
#
# http://docs.mongodb.org/manual/reference/connection-string/
#
# The following works out of the box with Mongo's default settings:
mongo:
url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
# Redis is used in ShareLaTeX for high volume queries, like real-time
# editing, and session management.
#
# The following config will work with Redis's default settings:
redis:
web: redisConfig =
host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
password: process.env["SHARELATEX_REDIS_PASS"] or ""
key_schema:
# document-updater
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
docLines: ({doc_id}) -> "doclines:#{doc_id}"
docOps: ({doc_id}) -> "DocOps:#{doc_id}"
docVersion: ({doc_id}) -> "DocVersion:#{doc_id}"
docHash: ({doc_id}) -> "DocHash:#{doc_id}"
projectKey: ({doc_id}) -> "ProjectId:#{doc_id}"
docsInProject: ({project_id}) -> "DocsIn:#{project_id}"
ranges: ({doc_id}) -> "Ranges:#{doc_id}"
# document-updater:realtime
pendingUpdates: ({doc_id}) -> "PendingUpdates:#{doc_id}"
# document-updater:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# document-updater:lock
blockingKey: ({doc_id}) -> "Blocking:#{doc_id}"
# track-changes:lock
historyLock: ({doc_id}) -> "HistoryLock:#{doc_id}"
historyIndexLock: ({project_id}) -> "HistoryIndexLock:#{project_id}"
# track-chanegs:history
uncompressedHistoryOps: ({doc_id}) -> "UncompressedHistoryOps:#{doc_id}"
docsWithHistoryOps: ({project_id}) -> "DocsWithHistoryOps:#{project_id}"
# realtime
clientsInProject: ({project_id}) -> "clients_in_project:#{project_id}"
connectedUser: ({project_id, client_id})-> "connected_user:#{project_id}:#{client_id}"
fairy: redisConfig
# track-changes and document-updater
realtime: redisConfig
documentupdater: redisConfig
lock: redisConfig
history: redisConfig
websessions: redisConfig
# The compile server (the clsi) uses a SQL database to cache files and
# meta-data. sqllite is the default, and the load is low enough that this will
# be fine in production (we use sqllite at sharelatex.com).
#
# If you want to configure a different database, see the Sequelize documentation
# for available options:
#
# https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
#
mysql:
clsi:
database: "clsi"
username: "clsi"
password: ""
dialect: "sqlite"
storage: Path.join(DATA_DIR, "db.sqlite")
# File storage
# ------------
# ShareLaTeX can store binary files like images either locally or in Amazon
# S3. The default is locally:
filestore:
backend: "fs"
stores:
user_files: Path.join(DATA_DIR, "user_files")
template_files: Path.join(DATA_DIR, "template_files")
# To use Amazon S3 as a storage backend, comment out the above config, and
# uncomment the following, filling in your key, secret, and bucket name:
#
# filestore:
# backend: "s3"
# stores:
# user_files: "BUCKET_NAME"
# s3:
# key: "AWS_KEY"
# secret: "AWS_SECRET"
#
trackchanges:
continueOnError: true
# Local disk caching
# ------------------
path:
# If we ever need to write something to disk (e.g. incoming requests
# that need processing but may be too big for memory), then write
# them to disk here:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
# Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, "uploads")
# Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, "compiles")
# Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, "cache")
# Server Config
# -------------
# Where your instance of ShareLaTeX can be found publicly. This is used
# when emails are sent out and in generated links:
siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
# The name this is used to describe your ShareLaTeX Installation
appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
restrictInvitesToExistingAccounts: process.env["SHARELATEX_RESTRICT_INVITES_TO_EXISTING_ACCOUNTS"] == 'true'
nav:
title: process.env["SHARELATEX_NAV_TITLE"] or process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX Community Edition"
# The email address which users will be directed to as the main point of
# contact for this installation of ShareLaTeX.
adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "PI:EMAIL:<EMAIL>END_PI"
# If provided, a sessionSecret is used to sign cookies so that they cannot be
# spoofed. This is recommended.
security:
sessionSecret: process.env["SHARELATEX_SESSION_SECRET"] or process.env["CRYPTO_RANDOM"]
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUsers: httpAuthUsers
# Should javascript assets be served minified or not.
useMinifiedJs: true
# Should static assets be sent with a header to tell the browser to cache
# them. This should be false in development where changes are being made,
# but should be set to true in production.
cacheStaticAssets: true
# If you are running ShareLaTeX over https, set this to true to send the
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.
behindProxy: process.env["SHARELATEX_BEHIND_PROXY"] or false
i18n:
subdomainLang:
www: {lngCode:process.env["SHARELATEX_SITE_LANGUAGE"] or "en", url: siteUrl}
defaultLng: process.env["SHARELATEX_SITE_LANGUAGE"] or "en"
apis:
web:
# zevin
url: process.env['WEB_URL'] or "http://#{process.env['WEB_API_HOST'] or process.env['WEB_HOST'] or "localhost"}:#{process.env['WEB_API_PORT'] or process.env['WEB_PORT'] or 3000}"
user: httpAuthUser
pass: httpAuthPass
project_history:
enabled: false
references:{}
notifications:undefined
defaultFeatures:
collaborators: -1
dropbox: true
versioning: true
compileTimeout: 180
compileGroup: "standard"
trackChanges: true
templates: true
references: true
## OPTIONAL CONFIGERABLE SETTINGS
if process.env["SHARELATEX_LEFT_FOOTER"]?
try
settings.nav.left_footer = JSON.parse(process.env["SHARELATEX_LEFT_FOOTER"])
catch e
console.error("could not parse SHARELATEX_LEFT_FOOTER, not valid JSON")
if process.env["SHARELATEX_RIGHT_FOOTER"]?
settings.nav.right_footer = process.env["SHARELATEX_RIGHT_FOOTER"]
try
settings.nav.right_footer = JSON.parse(process.env["SHARELATEX_RIGHT_FOOTER"])
catch e
console.error("could not parse SHARELATEX_RIGHT_FOOTER, not valid JSON")
if process.env["SHARELATEX_HEADER_IMAGE_URL"]?
settings.nav.custom_logo = process.env["SHARELATEX_HEADER_IMAGE_URL"]
if process.env["SHARELATEX_HEADER_NAV_LINKS"]?
console.error """
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: SHARELATEX_HEADER_NAV_LINKS is no longer supported
# See https://github.com/sharelatex/sharelatex/wiki/Configuring-Headers,-Footers-&-Logo
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
if process.env["SHARELATEX_HEADER_EXTRAS"]?
try
settings.nav.header_extras = JSON.parse(process.env["SHARELATEX_HEADER_EXTRAS"])
catch e
console.error("could not parse SHARELATEX_HEADER_EXTRAS, not valid JSON")
# Sending Email
# -------------
#
# You must configure a mail server to be able to send invite emails from
# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
# documentation for available options:
#
# http://www.nodemailer.com/docs/transports
if process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]?
settings.email =
fromAddress: process.env["SHARELATEX_EMAIL_FROM_ADDRESS"]
replyTo: process.env["SHARELATEX_EMAIL_REPLY_TO"] or ""
driver: process.env["SHARELATEX_EMAIL_DRIVER"]
parameters:
#AWS Creds
AWSAccessKeyID: process.env["SHARELATEX_EMAIL_AWS_SES_ACCESS_KEY_ID"]
AWSSecretKey: process.env["SHARELATEX_EMAIL_AWS_SES_SECRET_KEY"]
#SMTP Creds
host: process.env["SHARELATEX_EMAIL_SMTP_HOST"]
port: process.env["SHARELATEX_EMAIL_SMTP_PORT"],
secure: parse(process.env["SHARELATEX_EMAIL_SMTP_SECURE"])
ignoreTLS: parse(process.env["SHARELATEX_EMAIL_SMTP_IGNORE_TLS"])
textEncoding: process.env["SHARELATEX_EMAIL_TEXT_ENCODING"]
templates:
customFooter: process.env["SHARELATEX_CUSTOM_EMAIL_FOOTER"]
if process.env["SHARELATEX_EMAIL_SMTP_USER"]? or process.env["SHARELATEX_EMAIL_SMTP_PASS"]?
settings.email.parameters.auth =
user: process.env["SHARELATEX_EMAIL_SMTP_USER"]
pass: process.env["SHARELATEX_EMAIL_SMTP_PASS"]
if process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"]?
settings.email.parameters.tls =
rejectUnauthorized: parse(process.env["SHARELATEX_EMAIL_SMTP_TLS_REJECT_UNAUTH"])
# i18n
if process.env["SHARELATEX_LANG_DOMAIN_MAPPING"]?
settings.i18n.subdomainLang = parse(process.env["SHARELATEX_LANG_DOMAIN_MAPPING"])
# Password Settings
# -----------
# These restrict the passwords users can use when registering
# opts are from http://antelle.github.io/passfield
if process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"]
settings.passwordStrengthOptions =
pattern: process.env["SHARELATEX_PASSWORD_VALIDATION_PATTERN"] or "PI:PASSWORD:<PASSWORD>END_PI"
length: {min:process.env["SHARELATEX_PASSWORD_VALIDATION_MIN_LENGTH"] or 8, max: process.env["SHARELATEX_PASSWORD_VALIDATION_MAX_LENGTH"] or 150}
#######################
# ShareLaTeX Server Pro
#######################
if parse(process.env["SHARELATEX_IS_SERVER_PRO"]) == true
settings.bypassPercentageRollouts = true
settings.apis.references =
url: "http://localhost:3040"
# LDAP - SERVER PRO ONLY
# ----------
if process.env["SHARELATEX_LDAP_HOST"]
console.error """
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# WARNING: The LDAP configuration format has changed in version 0.5.1
# See https://github.com/sharelatex/sharelatex/wiki/Server-Pro:-LDAP-Config
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
if process.env["SHARELATEX_LDAP_URL"]
settings.externalAuth = true
settings.ldap =
emailAtt: process.env["SHARELATEX_LDAP_EMAIL_ATT"]
nameAtt: process.env["SHARELATEX_LDAP_NAME_ATT"]
lastNameAtt: process.env["SHARELATEX_LDAP_LAST_NAME_ATT"]
updateUserDetailsOnLogin: process.env["SHARELATEX_LDAP_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
placeholder: process.env["SHARELATEX_LDAP_PLACEHOLDER"]
server:
url: process.env["SHARELATEX_LDAP_URL"]
bindDn: process.env["SHARELATEX_LDAP_BIND_DN"]
bindCredentials: process.env["SHARELATEX_LDAP_BIND_CREDENTIALS"]
bindProperty: process.env["SHARELATEX_LDAP_BIND_PROPERTY"]
searchBase: process.env["SHARELATEX_LDAP_SEARCH_BASE"]
searchScope: process.env["SHARELATEX_LDAP_SEARCH_SCOPE"]
searchFilter: process.env["SHARELATEX_LDAP_SEARCH_FILTER"]
searchAttributes: (
if _ldap_search_attribs = process.env["SHARELATEX_LDAP_SEARCH_ATTRIBUTES"]
try
JSON.parse(_ldap_search_attribs)
catch e
console.error "could not parse SHARELATEX_LDAP_SEARCH_ATTRIBUTES"
else
undefined
)
groupDnProperty: process.env["SHARELATEX_LDAP_GROUP_DN_PROPERTY"]
groupSearchBase: process.env["SHARELATEX_LDAP_GROUP_SEARCH_BASE"]
groupSearchScope: process.env["SHARELATEX_LDAP_GROUP_SEARCH_SCOPE"]
groupSearchFilter: process.env["SHARELATEX_LDAP_GROUP_SEARCH_FILTER"]
groupSearchAttributes: (
if _ldap_group_search_attribs = process.env["SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"]
try
JSON.parse(_ldap_group_search_attribs)
catch e
console.error "could not parse SHARELATEX_LDAP_GROUP_SEARCH_ATTRIBUTES"
else
undefined
)
cache: process.env["SHARELATEX_LDAP_CACHE"] == 'true'
timeout: (
if _ldap_timeout = process.env["SHARELATEX_LDAP_TIMEOUT"]
try
parseInt(_ldap_timeout)
catch e
console.error "Cannot parse SHARELATEX_LDAP_TIMEOUT"
else
undefined
)
connectTimeout: (
if _ldap_connect_timeout = process.env["SHARELATEX_LDAP_CONNECT_TIMEOUT"]
try
parseInt(_ldap_connect_timeout)
catch e
console.error "Cannot parse SHARELATEX_LDAP_CONNECT_TIMEOUT"
else
undefined
)
if process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"]
try
ca = JSON.parse(process.env["SHARELATEX_LDAP_TLS_OPTS_CA_PATH"])
catch e
console.error "could not parse SHARELATEX_LDAP_TLS_OPTS_CA_PATH, invalid JSON"
if typeof(ca) == 'string'
ca_paths = [ca]
else if typeof(ca) == 'object' && ca?.length?
ca_paths = ca
else
console.error "problem parsing SHARELATEX_LDAP_TLS_OPTS_CA_PATH"
settings.ldap.server.tlsOptions =
rejectUnauthorized: process.env["SHARELATEX_LDAP_TLS_OPTS_REJECT_UNAUTH"] == "true"
ca:ca_paths # e.g.'/etc/ldap/ca_certs.pem'
if process.env["SHARELATEX_SAML_ENTRYPOINT"]
# NOTE: see https://github.com/bergie/passport-saml/blob/master/README.md for docs of `server` options
settings.externalAuth = true
settings.saml =
updateUserDetailsOnLogin: process.env["SHARELATEX_SAML_UPDATE_USER_DETAILS_ON_LOGIN"] == 'true'
identityServiceName: process.env["SHARELATEX_SAML_IDENTITY_SERVICE_NAME"]
emailField: process.env["SHARELATEX_SAML_EMAIL_FIELD"] || process.env["SHARELATEX_SAML_EMAIL_FIELD_NAME"]
firstNameField: process.env["SHARELATEX_SAML_FIRST_NAME_FIELD"]
lastNameField: process.env["SHARELATEX_SAML_LAST_NAME_FIELD"]
server:
# strings
entryPoint: process.env["SHARELATEX_SAML_ENTRYPOINT"]
callbackUrl: process.env["SHARELATEX_SAML_CALLBACK_URL"]
issuer: process.env["SHARELATEX_SAML_ISSUER"]
decryptionPvk: process.env["SHARELATEX_SAML_DECRYPTION_PVK"]
signatureAlgorithm: process.env["SHARELATEX_SAML_SIGNATURE_ALGORITHM"]
identifierFormat: process.env["SHARELATEX_SAML_IDENTIFIER_FORMAT"]
attributeConsumingServiceIndex: process.env["SHARELATEX_SAML_ATTRIBUTE_CONSUMING_SERVICE_INDEX"]
authnContext: process.env["SHARELATEX_SAML_AUTHN_CONTEXT"]
authnRequestBinding: process.env["SHARELATEX_SAML_AUTHN_REQUEST_BINDING"]
validateInResponseTo: process.env["SHARELATEX_SAML_VALIDATE_IN_RESPONSE_TO"]
cacheProvider: process.env["SHARELATEX_SAML_CACHE_PROVIDER"]
logoutUrl: process.env["SHARELATEX_SAML_LOGOUT_URL"]
logoutCallbackUrl: process.env["SHARELATEX_SAML_LOGOUT_CALLBACK_URL"]
disableRequestedAuthnContext: process.env["SHARELATEX_SAML_DISABLE_REQUESTED_AUTHN_CONTEXT"] == 'true'
forceAuthn: process.env["SHARELATEX_SAML_FORCE_AUTHN"] == 'true'
skipRequestCompression: process.env["SHARELATEX_SAML_SKIP_REQUEST_COMPRESSION"] == 'true'
acceptedClockSkewMs: (
if _saml_skew = process.env["SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"]
try
parseInt(_saml_skew)
catch e
console.error "Cannot parse SHARELATEX_SAML_ACCEPTED_CLOCK_SKEW_MS"
else
undefined
)
requestIdExpirationPeriodMs: (
if _saml_exiration = process.env["SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"]
try
parseInt(_saml_expiration)
catch e
console.error "Cannot parse SHARELATEX_SAML_REQUEST_ID_EXPIRATION_PERIOD_MS"
else
undefined
)
additionalParams: (
if _saml_additionalParams = process.env["SHARELATEX_SAML_ADDITIONAL_PARAMS"]
try
JSON.parse(_saml_additionalParams)
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_PARAMS"
else
undefined
)
additionalAuthorizeParams: (
if _saml_additionalAuthorizeParams = process.env["SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"]
try
JSON.parse(_saml_additionalAuthorizeParams )
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_AUTHORIZE_PARAMS"
else
undefined
)
additionalLogoutParams: (
if _saml_additionalLogoutParams = process.env["SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"]
try
JSON.parse(_saml_additionalLogoutParams )
catch e
console.error "Cannot parse SHARELATEX_SAML_ADDITIONAL_LOGOUT_PARAMS"
else
undefined
)
# SHARELATEX_SAML_CERT cannot be empty
# https://github.com/bergie/passport-saml/commit/f6b1c885c0717f1083c664345556b535f217c102
if process.env["SHARELATEX_SAML_CERT"]
settings.saml.server.cert = process.env["SHARELATEX_SAML_CERT"]
settings.saml.server.privateCert = process.env["SHARELATEX_SAML_PRIVATE_CERT"]
# Compiler
# --------
if process.env["SANDBOXED_COMPILES"] == "true"
settings.clsi =
dockerRunner: true
docker:
image: process.env["TEX_LIVE_DOCKER_IMAGE"]
env:
HOME: "/tmp"
PATH: process.env["COMPILER_PATH"] or "/usr/local/texlive/2015/bin/x86_64-linux:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
user: "www-data"
if !settings.path?
settings.path = {}
settings.path.synctexBaseDir = () -> "/compile"
if process.env['SANDBOXED_COMPILES_SIBLING_CONTAINERS'] == 'true'
console.log("Using sibling containers for sandboxed compiles")
if process.env['SANDBOXED_COMPILES_HOST_DIR']
settings.path.sandboxedCompilesHostDir = process.env['SANDBOXED_COMPILES_HOST_DIR']
else
console.error('Sibling containers, but SANDBOXED_COMPILES_HOST_DIR not set')
# Templates
# ---------
if process.env["SHARELATEX_TEMPLATES_USER_ID"]
settings.templates =
mountPointUrl: "/templates"
user_id: process.env["SHARELATEX_TEMPLATES_USER_ID"]
settings.templateLinks = parse(process.env["SHARELATEX_NEW_PROJECT_TEMPLATE_LINKS"])
# /Learn
# -------
if process.env["SHARELATEX_PROXY_LEARN"]?
settings.proxyLearn = parse(process.env["SHARELATEX_PROXY_LEARN"])
# /References
# -----------
if process.env["SHARELATEX_ELASTICSEARCH_URL"]?
settings.references.elasticsearch =
host: process.env["SHARELATEX_ELASTICSEARCH_URL"]
# With lots of incoming and outgoing HTTP connections to different services,
# sometimes long running, it is a good idea to increase the default number
# of sockets that Node will hold open.
http = require('http')
http.globalAgent.maxSockets = 300
https = require('https')
https.globalAgent.maxSockets = 300
module.exports = settings |
[
{
"context": "ckrapi.js'\n\nflickr = new Flickr({\n api_key: \"1234ABCD1234ABCD1234ABCD1234ABCD\",\n format:'json'\n })\nclass FlickerService",
"end": 111,
"score": 0.999692976474762,
"start": 79,
"tag": "KEY",
"value": "1234ABCD1234ABCD1234ABCD1234ABCD"
}
] | src/services/FlickerService.coffee | vishnun/uibootcamp | 0 | Flickr = require '../libs/flickrapi.js'
flickr = new Flickr({
api_key: "1234ABCD1234ABCD1234ABCD1234ABCD",
format:'json'
})
class FlickerService
getImages: (searchKey) ->
flickr.photos.search {text: searchKey}, (err, result) ->
if err
throw new Error(err)
else
result
module.exports = FlickerService
| 89125 | Flickr = require '../libs/flickrapi.js'
flickr = new Flickr({
api_key: "<KEY>",
format:'json'
})
class FlickerService
getImages: (searchKey) ->
flickr.photos.search {text: searchKey}, (err, result) ->
if err
throw new Error(err)
else
result
module.exports = FlickerService
| true | Flickr = require '../libs/flickrapi.js'
flickr = new Flickr({
api_key: "PI:KEY:<KEY>END_PI",
format:'json'
})
class FlickerService
getImages: (searchKey) ->
flickr.photos.search {text: searchKey}, (err, result) ->
if err
throw new Error(err)
else
result
module.exports = FlickerService
|
[
{
"context": "# @see inspirate by https://github.com/wallabag/wallabagger/blob/master/wallabagger/js/wallabag-a",
"end": 47,
"score": 0.9996712803840637,
"start": 39,
"tag": "USERNAME",
"value": "wallabag"
},
{
"context": "ientSecret: null\n\t\tUserLogin: null\n\t\tUserPassword: null... | src/lib/wallabag-api.coffee | Grummfy/hubot-wallabag | 2 | # @see inspirate by https://github.com/wallabag/wallabagger/blob/master/wallabagger/js/wallabag-api.js
request = require('request');
class WallabagApi
data:
Url: null
ClientId: null
ClientSecret: null
UserLogin: null
UserPassword: null
ApiToken: null
RefreshToken: null
ExpireDateMs: null
constructor: (url, clientId, clientSecret, userLogin, userPassword) ->
@data.Url = url
@data.ClientId = clientId
@data.ClientSecret = clientSecret
@data.UserLogin = userLogin
@data.UserPassword = userPassword
@GetAppToken()
SavePage: (pageUrl, tags) ->
content = JSON.stringify({
url: pageUrl,
tags: tags.join(',')
})
self = this
savePage = ->
rinit = self.RequestInit('POST', self.AuhorizedHeader(), content)
rinit.url = self.data.Url + '/api/entries.json'
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to save page ' + pageUrl
console.log error
console.log body
# console.log response
return
console.log info
)
# check init of tockens
if @needNewAppToken()
console.log @RefreshToken(savePage)
else
savePage()
RefreshToken: (callback = null) ->
content = JSON.stringify(
grant_type: 'refresh_token'
refresh_token: @data.RefreshToken
client_id: @data.ClientId
client_secret: @data.ClientSecret
)
rinit = @RequestInit('POST', @NotAuhorizedHeader(), content)
rinit.url = @data.Url + '/oauth/v2/token'
self = this
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to refresh token ' + rinit.url
console.log error
console.log body
# console.log response
return
nowDate = new Date(Date.now())
self.data.ApiToken = info.access_token
self.data.RefreshToken = info.refresh_token
self.data.ExpireDateMs = nowDate.setSeconds(nowDate.getSeconds() + info.expires_in)
if callback
callback()
info
)
GetAppToken: ->
content = JSON.stringify(
grant_type: 'password'
client_id: @data.ClientId
client_secret: @data.ClientSecret
username: @data.UserLogin
password: @data.UserPassword
)
rinit = @RequestInit('POST', @NotAuhorizedHeader(), content)
rinit.url = @data.Url + '/oauth/v2/token'
self = this
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to get app token from ' + rinit.url
console.log error
console.log body
# console.log response
return
nowDate = new Date
self.data.ApiToken = info.access_token
self.data.RefreshToken = info.refresh_token
self.data.ExpireDateMs = nowDate.setSeconds(nowDate.getSeconds() + info.expires_in)
info
)
RequestInit: (rmethod, rheaders, content) ->
options =
method: rmethod
headers: rheaders
mode: 'cors'
cache: 'default'
if content != '' && content != null
options.body = content
options
AuhorizedHeader: ->
{
'Authorization': 'Bearer ' + @data.ApiToken
'Accept': 'application/json'
'Content-Type': 'application/json'
'Accept-Encoding': 'gzip, deflate'
}
NotAuhorizedHeader: ->
{
'Accept': 'application/json'
'Content-Type': 'application/json'
'Accept-Encoding': 'gzip, deflate'
}
needNewAppToken: ->
return @data.ApiToken == '' or @data.ApiToken == null or @expired()
expired: ->
return @data.ExpireDateMs != null and Date.now() > @data.ExpireDateMs
module.exports = WallabagApi
| 173102 | # @see inspirate by https://github.com/wallabag/wallabagger/blob/master/wallabagger/js/wallabag-api.js
request = require('request');
class WallabagApi
data:
Url: null
ClientId: null
ClientSecret: null
UserLogin: null
UserPassword: <PASSWORD>
ApiToken: null
RefreshToken: null
ExpireDateMs: null
constructor: (url, clientId, clientSecret, userLogin, userPassword) ->
@data.Url = url
@data.ClientId = clientId
@data.ClientSecret = clientSecret
@data.UserLogin = userLogin
@data.UserPassword = <PASSWORD>
@GetAppToken()
SavePage: (pageUrl, tags) ->
content = JSON.stringify({
url: pageUrl,
tags: tags.join(',')
})
self = this
savePage = ->
rinit = self.RequestInit('POST', self.AuhorizedHeader(), content)
rinit.url = self.data.Url + '/api/entries.json'
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to save page ' + pageUrl
console.log error
console.log body
# console.log response
return
console.log info
)
# check init of tockens
if @needNewAppToken()
console.log @RefreshToken(savePage)
else
savePage()
RefreshToken: (callback = null) ->
content = JSON.stringify(
grant_type: 'refresh_token'
refresh_token: @data.RefreshToken
client_id: @data.ClientId
client_secret: @data.ClientSecret
)
rinit = @RequestInit('POST', @NotAuhorizedHeader(), content)
rinit.url = @data.Url + '/oauth/v2/token'
self = this
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to refresh token ' + rinit.url
console.log error
console.log body
# console.log response
return
nowDate = new Date(Date.now())
self.data.ApiToken = info.access_token
self.data.RefreshToken = info.refresh_token
self.data.ExpireDateMs = nowDate.setSeconds(nowDate.getSeconds() + info.expires_in)
if callback
callback()
info
)
GetAppToken: ->
content = JSON.stringify(
grant_type: 'password'
client_id: @data.ClientId
client_secret: @data.ClientSecret
username: @data.UserLogin
password: <PASSWORD>
)
rinit = @RequestInit('POST', @NotAuhorizedHeader(), content)
rinit.url = @data.Url + '/oauth/v2/token'
self = this
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to get app token from ' + rinit.url
console.log error
console.log body
# console.log response
return
nowDate = new Date
self.data.ApiToken = info.access_token
self.data.RefreshToken = info.refresh_token
self.data.ExpireDateMs = nowDate.setSeconds(nowDate.getSeconds() + info.expires_in)
info
)
RequestInit: (rmethod, rheaders, content) ->
options =
method: rmethod
headers: rheaders
mode: 'cors'
cache: 'default'
if content != '' && content != null
options.body = content
options
AuhorizedHeader: ->
{
'Authorization': 'Bearer ' + @data.ApiToken
'Accept': 'application/json'
'Content-Type': 'application/json'
'Accept-Encoding': 'gzip, deflate'
}
NotAuhorizedHeader: ->
{
'Accept': 'application/json'
'Content-Type': 'application/json'
'Accept-Encoding': 'gzip, deflate'
}
needNewAppToken: ->
return @data.ApiToken == '' or @data.ApiToken == null or @expired()
expired: ->
return @data.ExpireDateMs != null and Date.now() > @data.ExpireDateMs
module.exports = WallabagApi
| true | # @see inspirate by https://github.com/wallabag/wallabagger/blob/master/wallabagger/js/wallabag-api.js
request = require('request');
class WallabagApi
data:
Url: null
ClientId: null
ClientSecret: null
UserLogin: null
UserPassword: PI:PASSWORD:<PASSWORD>END_PI
ApiToken: null
RefreshToken: null
ExpireDateMs: null
constructor: (url, clientId, clientSecret, userLogin, userPassword) ->
@data.Url = url
@data.ClientId = clientId
@data.ClientSecret = clientSecret
@data.UserLogin = userLogin
@data.UserPassword = PI:PASSWORD:<PASSWORD>END_PI
@GetAppToken()
SavePage: (pageUrl, tags) ->
content = JSON.stringify({
url: pageUrl,
tags: tags.join(',')
})
self = this
savePage = ->
rinit = self.RequestInit('POST', self.AuhorizedHeader(), content)
rinit.url = self.data.Url + '/api/entries.json'
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to save page ' + pageUrl
console.log error
console.log body
# console.log response
return
console.log info
)
# check init of tockens
if @needNewAppToken()
console.log @RefreshToken(savePage)
else
savePage()
RefreshToken: (callback = null) ->
content = JSON.stringify(
grant_type: 'refresh_token'
refresh_token: @data.RefreshToken
client_id: @data.ClientId
client_secret: @data.ClientSecret
)
rinit = @RequestInit('POST', @NotAuhorizedHeader(), content)
rinit.url = @data.Url + '/oauth/v2/token'
self = this
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to refresh token ' + rinit.url
console.log error
console.log body
# console.log response
return
nowDate = new Date(Date.now())
self.data.ApiToken = info.access_token
self.data.RefreshToken = info.refresh_token
self.data.ExpireDateMs = nowDate.setSeconds(nowDate.getSeconds() + info.expires_in)
if callback
callback()
info
)
GetAppToken: ->
content = JSON.stringify(
grant_type: 'password'
client_id: @data.ClientId
client_secret: @data.ClientSecret
username: @data.UserLogin
password: PI:PASSWORD:<PASSWORD>END_PI
)
rinit = @RequestInit('POST', @NotAuhorizedHeader(), content)
rinit.url = @data.Url + '/oauth/v2/token'
self = this
request(rinit, (error, response, body) ->
info = JSON.parse(body)
if error or response.statusCode != 200 or info == '' or info == false
console.log 'Failed to get app token from ' + rinit.url
console.log error
console.log body
# console.log response
return
nowDate = new Date
self.data.ApiToken = info.access_token
self.data.RefreshToken = info.refresh_token
self.data.ExpireDateMs = nowDate.setSeconds(nowDate.getSeconds() + info.expires_in)
info
)
RequestInit: (rmethod, rheaders, content) ->
options =
method: rmethod
headers: rheaders
mode: 'cors'
cache: 'default'
if content != '' && content != null
options.body = content
options
AuhorizedHeader: ->
{
'Authorization': 'Bearer ' + @data.ApiToken
'Accept': 'application/json'
'Content-Type': 'application/json'
'Accept-Encoding': 'gzip, deflate'
}
NotAuhorizedHeader: ->
{
'Accept': 'application/json'
'Content-Type': 'application/json'
'Accept-Encoding': 'gzip, deflate'
}
needNewAppToken: ->
return @data.ApiToken == '' or @data.ApiToken == null or @expired()
expired: ->
return @data.ExpireDateMs != null and Date.now() > @data.ExpireDateMs
module.exports = WallabagApi
|
[
{
"context": "######################\n# Copyright (C) 2014 by Vaughn Iverson\n# ddp-login is free software released under t",
"end": 119,
"score": 0.9998388290405273,
"start": 105,
"tag": "NAME",
"value": "Vaughn Iverson"
},
{
"context": ", [{user: user, srp: srpDigest, passw... | src/index.coffee | vsivsi/ddp-login | 51 | ############################################################################
# Copyright (C) 2014 by Vaughn Iverson
# ddp-login is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
read = require 'read'
DDP = require 'ddp'
async = require 'async'
crypto = require 'crypto'
login = (ddp, options..., cb) ->
unless typeof cb is 'function'
throw new Error 'Valid callback must be provided to ddp-login'
unless ddp?.call? and ddp?.connect? and ddp?.close?
return cb(new Error 'Invalid DDP parameter')
options = options[0] ? {}
options.env ?= 'METEOR_TOKEN'
options.method ?= 'account'
options.retry ?= 5
options.plaintext ?= false
options.account ?= null
options.pass ?= null
switch options.method
when 'username'
method = tryOneUser
when 'email'
method = tryOneEmail
when 'account'
method = tryOneAccount
when 'token'
method = tryOneToken
else
return cb(new Error "Unsupported DDP login method '#{options.method}'")
if process.env[options.env]?
# We're already logged-in, maybe...
tryOneToken ddp, options, (err, res) ->
unless err or not res
return cb null, res
else
return async.retry options.retry, async.apply(method, ddp, options), cb
else
return async.retry options.retry, async.apply(method, ddp, options), cb
plaintextToDigest = (pass) ->
hash = crypto.createHash 'sha256'
hash.update pass, 'utf8'
return hash.digest('hex')
isEmail = (addr) ->
unless typeof addr is 'string'
return false
matchEmail = ///
^
[^@]+
@
[^@]+\.[^@]+
$
///i
m = addr.match matchEmail
m isnt null
attemptLogin = (ddp, user, pass, options, cb) ->
digest = plaintextToDigest pass
ddp.call 'login', [{user: user, password: {digest: digest, algorithm: 'sha-256' }}], (err, res) ->
unless err and err.error is 400
if err
console.error 'Login failed:', err.message
return cb err, res
if err.reason is 'old password format'
# Attempt to migrate from pre v0.8.2 SRP account to bcrypt account
console.error 'Old Meteor SRP (pre-v0.8.2) account detected. Attempting to migrate...'
try
details = JSON.parse err.details
catch e
return cb err
srpDigest = plaintextToDigest "#{details.identity}:#{pass}"
ddp.call 'login', [{user: user, srp: srpDigest, password: {digest: digest, algorithm: 'sha-256'}}], cb
else if options.plaintext
# Fallback to plaintext login
ddp.call 'login', [{user: user, password: pass}], (err, res) ->
console.error 'Login failed: ', err.message if err
return cb err, res
else
return cb err, res
loginWithUsername = (ddp, username, password, options..., cb) ->
attemptLogin ddp, {username: username}, password, options[0], cb
loginWithEmail = (ddp, email, password, options..., cb) ->
attemptLogin ddp, {email: email}, password, options[0], cb
loginWithAccount = (ddp, account, password, options..., cb) ->
if isEmail account
loginWithEmail ddp, account, password, options[0], (err, tok) ->
return cb err, tok unless err and err.error is 400
loginWithUsername ddp, account, password, options[0], cb
else
loginWithUsername ddp, account, password, options[0], cb
loginWithToken = (ddp, token, cb) ->
ddp.call 'login', [{ resume: token }], cb
tryOneToken = (ddp, options, cb) ->
loginWithToken ddp, process.env[options.env], (err, res) ->
return cb err, res
userPrompt = (prompt, options, cb) ->
readPrompts = {}
unless options.account?
readPrompts.account = async.apply read, {prompt: prompt, output: process.stderr}
unless options.pass?
readPrompts.pass = async.apply read, {prompt: 'Password: ', silent: true, output: process.stderr}
async.series readPrompts, (err, res) ->
cb err if err
result = {}
result.account = res.account?[0] or options.account
result.pass = res.pass?[0] or options.pass
cb null, result
tryOneEmail = (ddp, options, cb) ->
userPrompt "Email: ", options, (err, res) ->
return cb err if err
loginWithEmail ddp, res.account, res.pass, options, cb
tryOneUser = (ddp, options, cb) ->
userPrompt "Username: ", options, (err, res) ->
return cb err if err
loginWithUsername ddp, res.account, res.pass, options, cb
tryOneAccount = (ddp, options, cb) ->
userPrompt "Account: ", options, (err, res) ->
return cb err if err
loginWithAccount ddp, res.account, res.pass, options, cb
#
# When run standalone, the code below will execute
#
login._command_line = () ->
yargs = require('yargs')
.usage('''
Usage: $0 [--host <hostname>] [--port <portnum>] [--env <envvar>] [--method <logintype>] [--retry <count>] [--ssl] [--plaintext]
Output: a valid authToken, if successful
Note: If your Meteor server is older than v0.8.2, you will need to use the --plaintext option to authenticate.
''')
.example('', '''
export METEOR_TOKEN=$($0 --host 127.0.0.1 --port 3000 --env METEOR_TOKEN --method email --retry 5)
''')
.default('host', '127.0.0.1')
.describe('host', 'The domain name or IP address of the host to connect with')
.default('port', 3000)
.describe('port', 'The server port number to connect with')
.default('env', 'METEOR_TOKEN')
.describe('env', 'The environment variable to check for a valid token')
.default('method', 'account')
.describe('method', 'The login method: currently "email", "username", "account" or "token"')
.default('retry', 5)
.describe('retry', 'Number of times to retry login before giving up')
.describe('ssl', 'Use an SSL encrypted connection to connect with the host')
.boolean('ssl')
.default('ssl', false)
.describe('plaintext', 'For Meteor servers older than v0.8.2, fallback to sending the password as plaintext')
.default('plaintext', false)
.boolean('plaintext')
.boolean('h')
.alias('h','help')
.wrap(null)
.version((() -> require('../package').version))
argv = yargs.parse(process.argv)
if argv.h
yargs.showHelp()
process.exit 1
ddp = new DDP
host: argv.host
port: argv.port
use_ssl: argv.ssl
use_ejson: true
ddp.connect (err) ->
throw err if err
login ddp, { env: argv.env, method: argv.method, retry: argv.retry, plaintext: argv.plaintext }, (err, res) ->
ddp.close()
if err
console.error "Login attempt failed with error:"
console.dir err
process.exit 1
return
console.log res.token
process.exit 0
return
# ddp.on 'message', (msg) ->
# console.error("ddp message: " + msg)
login.loginWithToken = loginWithToken
login.loginWithUsername = loginWithUsername
login.loginWithEmail = loginWithEmail
login.loginWithAccount = loginWithAccount
module?.exports = login
| 157382 | ############################################################################
# Copyright (C) 2014 by <NAME>
# ddp-login is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
read = require 'read'
DDP = require 'ddp'
async = require 'async'
crypto = require 'crypto'
login = (ddp, options..., cb) ->
unless typeof cb is 'function'
throw new Error 'Valid callback must be provided to ddp-login'
unless ddp?.call? and ddp?.connect? and ddp?.close?
return cb(new Error 'Invalid DDP parameter')
options = options[0] ? {}
options.env ?= 'METEOR_TOKEN'
options.method ?= 'account'
options.retry ?= 5
options.plaintext ?= false
options.account ?= null
options.pass ?= null
switch options.method
when 'username'
method = tryOneUser
when 'email'
method = tryOneEmail
when 'account'
method = tryOneAccount
when 'token'
method = tryOneToken
else
return cb(new Error "Unsupported DDP login method '#{options.method}'")
if process.env[options.env]?
# We're already logged-in, maybe...
tryOneToken ddp, options, (err, res) ->
unless err or not res
return cb null, res
else
return async.retry options.retry, async.apply(method, ddp, options), cb
else
return async.retry options.retry, async.apply(method, ddp, options), cb
plaintextToDigest = (pass) ->
hash = crypto.createHash 'sha256'
hash.update pass, 'utf8'
return hash.digest('hex')
isEmail = (addr) ->
unless typeof addr is 'string'
return false
matchEmail = ///
^
[^@]+
@
[^@]+\.[^@]+
$
///i
m = addr.match matchEmail
m isnt null
attemptLogin = (ddp, user, pass, options, cb) ->
digest = plaintextToDigest pass
ddp.call 'login', [{user: user, password: {digest: digest, algorithm: 'sha-256' }}], (err, res) ->
unless err and err.error is 400
if err
console.error 'Login failed:', err.message
return cb err, res
if err.reason is 'old password format'
# Attempt to migrate from pre v0.8.2 SRP account to bcrypt account
console.error 'Old Meteor SRP (pre-v0.8.2) account detected. Attempting to migrate...'
try
details = JSON.parse err.details
catch e
return cb err
srpDigest = plaintextToDigest "#{details.identity}:#{pass}"
ddp.call 'login', [{user: user, srp: srpDigest, password: {digest: <PASSWORD>, algorithm: 'sha-256'}}], cb
else if options.plaintext
# Fallback to plaintext login
ddp.call 'login', [{user: user, password: <PASSWORD>}], (err, res) ->
console.error 'Login failed: ', err.message if err
return cb err, res
else
return cb err, res
loginWithUsername = (ddp, username, password, options..., cb) ->
attemptLogin ddp, {username: username}, password, options[0], cb
loginWithEmail = (ddp, email, password, options..., cb) ->
attemptLogin ddp, {email: email}, password, options[0], cb
loginWithAccount = (ddp, account, password, options..., cb) ->
if isEmail account
loginWithEmail ddp, account, password, options[0], (err, tok) ->
return cb err, tok unless err and err.error is 400
loginWithUsername ddp, account, password, options[0], cb
else
loginWithUsername ddp, account, password, options[0], cb
loginWithToken = (ddp, token, cb) ->
ddp.call 'login', [{ resume: token }], cb
tryOneToken = (ddp, options, cb) ->
loginWithToken ddp, process.env[options.env], (err, res) ->
return cb err, res
userPrompt = (prompt, options, cb) ->
readPrompts = {}
unless options.account?
readPrompts.account = async.apply read, {prompt: prompt, output: process.stderr}
unless options.pass?
readPrompts.pass = async.apply read, {prompt: 'Password: ', silent: true, output: process.stderr}
async.series readPrompts, (err, res) ->
cb err if err
result = {}
result.account = res.account?[0] or options.account
result.pass = res.pass?[0] or options.pass
cb null, result
tryOneEmail = (ddp, options, cb) ->
userPrompt "Email: ", options, (err, res) ->
return cb err if err
loginWithEmail ddp, res.account, res.pass, options, cb
tryOneUser = (ddp, options, cb) ->
userPrompt "Username: ", options, (err, res) ->
return cb err if err
loginWithUsername ddp, res.account, res.pass, options, cb
tryOneAccount = (ddp, options, cb) ->
userPrompt "Account: ", options, (err, res) ->
return cb err if err
loginWithAccount ddp, res.account, res.pass, options, cb
#
# When run standalone, the code below will execute
#
login._command_line = () ->
yargs = require('yargs')
.usage('''
Usage: $0 [--host <hostname>] [--port <portnum>] [--env <envvar>] [--method <logintype>] [--retry <count>] [--ssl] [--plaintext]
Output: a valid authToken, if successful
Note: If your Meteor server is older than v0.8.2, you will need to use the --plaintext option to authenticate.
''')
.example('', '''
export METEOR_TOKEN=$($0 --host 127.0.0.1 --port 3000 --env METEOR_TOKEN --method email --retry 5)
''')
.default('host', '127.0.0.1')
.describe('host', 'The domain name or IP address of the host to connect with')
.default('port', 3000)
.describe('port', 'The server port number to connect with')
.default('env', 'METEOR_TOKEN')
.describe('env', 'The environment variable to check for a valid token')
.default('method', 'account')
.describe('method', 'The login method: currently "email", "username", "account" or "token"')
.default('retry', 5)
.describe('retry', 'Number of times to retry login before giving up')
.describe('ssl', 'Use an SSL encrypted connection to connect with the host')
.boolean('ssl')
.default('ssl', false)
.describe('plaintext', 'For Meteor servers older than v0.8.2, fallback to sending the password as plaintext')
.default('plaintext', false)
.boolean('plaintext')
.boolean('h')
.alias('h','help')
.wrap(null)
.version((() -> require('../package').version))
argv = yargs.parse(process.argv)
if argv.h
yargs.showHelp()
process.exit 1
ddp = new DDP
host: argv.host
port: argv.port
use_ssl: argv.ssl
use_ejson: true
ddp.connect (err) ->
throw err if err
login ddp, { env: argv.env, method: argv.method, retry: argv.retry, plaintext: argv.plaintext }, (err, res) ->
ddp.close()
if err
console.error "Login attempt failed with error:"
console.dir err
process.exit 1
return
console.log res.token
process.exit 0
return
# ddp.on 'message', (msg) ->
# console.error("ddp message: " + msg)
login.loginWithToken = loginWithToken
login.loginWithUsername = loginWithUsername
login.loginWithEmail = loginWithEmail
login.loginWithAccount = loginWithAccount
module?.exports = login
| true | ############################################################################
# Copyright (C) 2014 by PI:NAME:<NAME>END_PI
# ddp-login is free software released under the MIT/X11 license.
# See included LICENSE file for details.
############################################################################
read = require 'read'
DDP = require 'ddp'
async = require 'async'
crypto = require 'crypto'
login = (ddp, options..., cb) ->
unless typeof cb is 'function'
throw new Error 'Valid callback must be provided to ddp-login'
unless ddp?.call? and ddp?.connect? and ddp?.close?
return cb(new Error 'Invalid DDP parameter')
options = options[0] ? {}
options.env ?= 'METEOR_TOKEN'
options.method ?= 'account'
options.retry ?= 5
options.plaintext ?= false
options.account ?= null
options.pass ?= null
switch options.method
when 'username'
method = tryOneUser
when 'email'
method = tryOneEmail
when 'account'
method = tryOneAccount
when 'token'
method = tryOneToken
else
return cb(new Error "Unsupported DDP login method '#{options.method}'")
if process.env[options.env]?
# We're already logged-in, maybe...
tryOneToken ddp, options, (err, res) ->
unless err or not res
return cb null, res
else
return async.retry options.retry, async.apply(method, ddp, options), cb
else
return async.retry options.retry, async.apply(method, ddp, options), cb
plaintextToDigest = (pass) ->
hash = crypto.createHash 'sha256'
hash.update pass, 'utf8'
return hash.digest('hex')
isEmail = (addr) ->
unless typeof addr is 'string'
return false
matchEmail = ///
^
[^@]+
@
[^@]+\.[^@]+
$
///i
m = addr.match matchEmail
m isnt null
attemptLogin = (ddp, user, pass, options, cb) ->
digest = plaintextToDigest pass
ddp.call 'login', [{user: user, password: {digest: digest, algorithm: 'sha-256' }}], (err, res) ->
unless err and err.error is 400
if err
console.error 'Login failed:', err.message
return cb err, res
if err.reason is 'old password format'
# Attempt to migrate from pre v0.8.2 SRP account to bcrypt account
console.error 'Old Meteor SRP (pre-v0.8.2) account detected. Attempting to migrate...'
try
details = JSON.parse err.details
catch e
return cb err
srpDigest = plaintextToDigest "#{details.identity}:#{pass}"
ddp.call 'login', [{user: user, srp: srpDigest, password: {digest: PI:PASSWORD:<PASSWORD>END_PI, algorithm: 'sha-256'}}], cb
else if options.plaintext
# Fallback to plaintext login
ddp.call 'login', [{user: user, password: PI:PASSWORD:<PASSWORD>END_PI}], (err, res) ->
console.error 'Login failed: ', err.message if err
return cb err, res
else
return cb err, res
loginWithUsername = (ddp, username, password, options..., cb) ->
attemptLogin ddp, {username: username}, password, options[0], cb
loginWithEmail = (ddp, email, password, options..., cb) ->
attemptLogin ddp, {email: email}, password, options[0], cb
loginWithAccount = (ddp, account, password, options..., cb) ->
if isEmail account
loginWithEmail ddp, account, password, options[0], (err, tok) ->
return cb err, tok unless err and err.error is 400
loginWithUsername ddp, account, password, options[0], cb
else
loginWithUsername ddp, account, password, options[0], cb
loginWithToken = (ddp, token, cb) ->
ddp.call 'login', [{ resume: token }], cb
tryOneToken = (ddp, options, cb) ->
loginWithToken ddp, process.env[options.env], (err, res) ->
return cb err, res
userPrompt = (prompt, options, cb) ->
readPrompts = {}
unless options.account?
readPrompts.account = async.apply read, {prompt: prompt, output: process.stderr}
unless options.pass?
readPrompts.pass = async.apply read, {prompt: 'Password: ', silent: true, output: process.stderr}
async.series readPrompts, (err, res) ->
cb err if err
result = {}
result.account = res.account?[0] or options.account
result.pass = res.pass?[0] or options.pass
cb null, result
tryOneEmail = (ddp, options, cb) ->
userPrompt "Email: ", options, (err, res) ->
return cb err if err
loginWithEmail ddp, res.account, res.pass, options, cb
tryOneUser = (ddp, options, cb) ->
userPrompt "Username: ", options, (err, res) ->
return cb err if err
loginWithUsername ddp, res.account, res.pass, options, cb
tryOneAccount = (ddp, options, cb) ->
userPrompt "Account: ", options, (err, res) ->
return cb err if err
loginWithAccount ddp, res.account, res.pass, options, cb
#
# When run standalone, the code below will execute
#
login._command_line = () ->
yargs = require('yargs')
.usage('''
Usage: $0 [--host <hostname>] [--port <portnum>] [--env <envvar>] [--method <logintype>] [--retry <count>] [--ssl] [--plaintext]
Output: a valid authToken, if successful
Note: If your Meteor server is older than v0.8.2, you will need to use the --plaintext option to authenticate.
''')
.example('', '''
export METEOR_TOKEN=$($0 --host 127.0.0.1 --port 3000 --env METEOR_TOKEN --method email --retry 5)
''')
.default('host', '127.0.0.1')
.describe('host', 'The domain name or IP address of the host to connect with')
.default('port', 3000)
.describe('port', 'The server port number to connect with')
.default('env', 'METEOR_TOKEN')
.describe('env', 'The environment variable to check for a valid token')
.default('method', 'account')
.describe('method', 'The login method: currently "email", "username", "account" or "token"')
.default('retry', 5)
.describe('retry', 'Number of times to retry login before giving up')
.describe('ssl', 'Use an SSL encrypted connection to connect with the host')
.boolean('ssl')
.default('ssl', false)
.describe('plaintext', 'For Meteor servers older than v0.8.2, fallback to sending the password as plaintext')
.default('plaintext', false)
.boolean('plaintext')
.boolean('h')
.alias('h','help')
.wrap(null)
.version((() -> require('../package').version))
argv = yargs.parse(process.argv)
if argv.h
yargs.showHelp()
process.exit 1
ddp = new DDP
host: argv.host
port: argv.port
use_ssl: argv.ssl
use_ejson: true
ddp.connect (err) ->
throw err if err
login ddp, { env: argv.env, method: argv.method, retry: argv.retry, plaintext: argv.plaintext }, (err, res) ->
ddp.close()
if err
console.error "Login attempt failed with error:"
console.dir err
process.exit 1
return
console.log res.token
process.exit 0
return
# ddp.on 'message', (msg) ->
# console.error("ddp message: " + msg)
login.loginWithToken = loginWithToken
login.loginWithUsername = loginWithUsername
login.loginWithEmail = loginWithEmail
login.loginWithAccount = loginWithAccount
module?.exports = login
|
[
{
"context": "lementation of passy algorithm in CoffeeScript, by Stephen Waits.\n\nThe MIT License\n\nCopyright (c) 2011 Stephen Wai",
"end": 86,
"score": 0.9998791813850403,
"start": 73,
"tag": "NAME",
"value": "Stephen Waits"
},
{
"context": "tephen Waits.\n\nThe MIT License\n\nCop... | coffee/passy.coffee | MCSDWVL/passy | 2 | ###
passy.coffee
Implementation of passy algorithm in CoffeeScript, by Stephen Waits.
The MIT License
Copyright (c) 2011 Stephen Waits <steve@waits.net>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
###
passy = (text, secret, allowSymbols) ->
#
# SHA1 code
#
# convert a single character code to its byte representation
char_code_to_bytes = (char_code) ->
result = []
while char_code
result.unshift(char_code & 0xff)
char_code >>= 8
result
# convert a string into a byte array, including multibytes chars
str_to_bytes = (str) ->
result = []
for char_code in str
result = result.concat(char_code_to_bytes(char_code.charCodeAt(0)))
result
# convert a number to an array of 8 bytes representing a 64 bit big-endian integer
num_to_big_endian_64 = (num) ->
[
(num & 0xff00000000000000) >>> 56,
(num & 0x00ff000000000000) >>> 48,
(num & 0x0000ff0000000000) >>> 40,
(num & 0x000000ff00000000) >>> 32,
(num & 0x00000000ff000000) >>> 24,
(num & 0x0000000000ff0000) >>> 16,
(num & 0x000000000000ff00) >>> 8,
(num & 0x00000000000000ff)
]
# convert an array of bytes to an array of int32 (big-endian)
bytes_to_big_endian_32 = (array) ->
((array[i] << 24) | (array[i+1] << 16) | (array[i+2] << 8) | array[i+3]) for i in [0...array.length] by 4
# take an array of bytes and return the hex string
bytes_to_hex = (bytes) ->
hextab = "0123456789abcdef"
(hextab[(x >>> 4) & 0x0f] + hextab[x & 0x0f] for x in bytes).join("")
# compute SHA1 hash
#
# input is an array of bytes (big-endian order)
# returns an array of 20 bytes
sha1 = (byte_array) ->
# helper function (rotate left)
rol = (x,i) -> (x << i) | (x >>> (32-i))
# initialize variables
message = byte_array.slice(0) # copy array, since we will modify it
message_size_in_bits = message.length * 8 # store message size for later use
# initialize hash state variables
[h0, h1, h2, h3, h4] = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]
# Preprocess message in preparation for hasing:
# append the bit '1' to the message
message.push(0x80)
# append (0 <= k < 512) bits '0', so that the resulting message length (in bits) is congruent to 448 = -64 (mod 512)
message.push(0) while ((message.length + 8) % 64)
# append length of message (before pre-processing), in bits, as 64-bit big-endian integer
message = message.concat(num_to_big_endian_64(message_size_in_bits))
# Process the message in successive 512-bit chunks:
# break message into 512-bit chunks
for i_chunk in [0...message.length] by 64
# convert bytes into int32
w = bytes_to_big_endian_32(message.slice(i_chunk, i_chunk+64))
# init state with current hash
[a, b, c, d, e] = [h0, h1, h2, h3, h4]
# hash rounds
for i in [0...80]
s = i & 0xf
if (i >= 16)
temp = w[(s + 13) & 0xf] ^ w[(s + 8) & 0xf] ^ w[(s + 2) & 0xf] ^ w[s]
w[s] = (temp << 1) | (temp >>> 31)
if (i < 20)
# rol(a,5) + _________f[i]________ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( d ^ (b & (c ^ d)) ) + e + 0x5a827999 + w[s]) & 0xffffffff
else if (i < 40)
# rol(a,5) + _____f[i]____ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( b ^ c ^ d ) + e + 0x6ed9eba1 + w[s]) & 0xffffffff
else if (i < 60)
# rol(a,5) + ___________f[i]________________ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( (b & c) | (b & d) | (c & d) ) + e + 0x8f1bbcdc + w[s]) & 0xffffffff
else
# rol(a,5) + _____f[i]____ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( b ^ c ^ d ) + e + 0xca62c1d6 + w[s]) & 0xffffffff
[a, b, c, d, e] = [temp, a, rol(b,30), c, d]
# Add this chunk's hash to result so far:
h0 = (h0 + a) & 0xffffffff
h1 = (h1 + b) & 0xffffffff
h2 = (h2 + c) & 0xffffffff
h3 = (h3 + d) & 0xffffffff
h4 = (h4 + e) & 0xffffffff
# Return the final hash value (big-endian):
[
(h0 >>> 24) & 0xff, (h0 >>> 16) & 0xff, (h0 >>> 8) & 0xff, h0 & 0xff,
(h1 >>> 24) & 0xff, (h1 >>> 16) & 0xff, (h1 >>> 8) & 0xff, h1 & 0xff,
(h2 >>> 24) & 0xff, (h2 >>> 16) & 0xff, (h2 >>> 8) & 0xff, h2 & 0xff,
(h3 >>> 24) & 0xff, (h3 >>> 16) & 0xff, (h3 >>> 8) & 0xff, h3 & 0xff,
(h4 >>> 24) & 0xff, (h4 >>> 16) & 0xff, (h4 >>> 8) & 0xff, h4 & 0xff
] # note: returning this as a byte array is quite expensive!
# compute SHA1
#
# input is a string
# returns a hex string
sha1_string = (str) ->
# convert hex string to a byte array, hash, and convert back to a hex string
bytes_to_hex(sha1(str_to_bytes(str)))
#
# HMAC-SHA1 code
#
# compute HMAC-SHA1
#
# key_str and message_str are both strings
# returns a hex string
hmac_sha1 = (key_str, message_str) ->
# convert key & message to byte arrays
key = str_to_bytes(key_str)
message = str_to_bytes(message_str)
# initialize key
key = sha1(key) if key.length > 64 # keys longer than 64 are truncated to sha1 result
key.push(0) while key.length < 64 # keys shorter than 64 are padded with zeroes
# setup pads
opad = (0x5c ^ key[i] for i in [0...64])
ipad = (0x36 ^ key[i] for i in [0...64])
# calculate HMAC
bytes_to_hex(sha1(opad.concat(sha1(ipad.concat(message)))))
# create a byte string of length "size" of "b"s
byte_string = (b, size) ->
(String.fromCharCode(b) for i in [0...size]).join("")
# test HMAC-SHA1 with known test vectors, which naturally tests SHA1
verify_hmac_sha1 = ->
# SHA1 test data from RFC
test_vectors = [
[ "Jefe", "what do ya want for nothing?", "effcdf6ae5eb2fa2d27416d5f184df9c259a7c79" ],
[ byte_string(0x0b, 20), "Hi There", "b617318655057264e28bc0b6fb378c8ef146be00" ],
[ byte_string(0xaa, 20), byte_string(0xdd, 50), "125d7342b9ac11cd91a39af48aa17b4f63f175d3" ],
[ byte_string(0x0c, 20), "Test With Truncation", "4c1a03424b55e07fe7f27be1d58bb9324a9a5a04" ],
[ byte_string(0xaa, 80), "Test Using Larger Than Block-Size Key - Hash Key First", "aa4ae5e15272d00e95705637ce8a3b55ed402112" ],
[ byte_string(0xaa, 80), "Test Using Larger Than Block-Size Key and Larger Than One Block-Size Data", "e8e99d0f45237d786d6bbaa7965c7808bbff1a91" ]
]
# run tests
for t in test_vectors
# return false if a test fails
return false if hmac_sha1(t[0], t[1]) != t[2]
# all tests passed
true
#
# passy code
#
# returns true if string contains one of each [A-H], [a-h], [2-9], and [#$%*+=@?]
good_passy = (str, allowSymbols) ->
str.match(/[A-H]/) &&
str.match(/[a-h]/) &&
str.match(/[2-9]/) &&
(!allowSymbols || str.match(/[#$%*+=@?]/))
# given a long string, find the minimum length "good" passy (i.e. has one of each
# character type, and meets minimum length of 16 characters)
good_passy_length = (str, allowSymbols) ->
for i in [16..str.length]
return i if good_passy(str.substr(0,i), allowSymbols)
return str.length # uh-oh, that's a long passy!
# encode a hex string (typically a single octet) to a passy string
encode_passy = (secret, text, allowSymbols) ->
# our symbol table for passy
symtab = if allowSymbols then "ABCDEFGHabcdefgh23456789#$%*+=@?" else "ABCDEFGHabcdefgh23456789"
# convert a hex string to a single passy character
# * modulo and lookup in symtab string
hex2passy = (x) -> symtab[parseInt(x,16) % symtab.length]
# encode a hex string into a passy string
# 1. split a string into two character strings (octets)
# 2. encode each two char string (octet) into a single passy char
# 3. join resulting array of passy chars into a single passy string
encode = (str) ->
(hex2passy(str.substr(i,2)) for i in [0...str.length] by 2).join("")
# this is the hmac_sha1 concatenated with the sha1(hmac_sha1)
double_hmac = hmac_sha1(secret,text)
double_hmac = double_hmac + sha1_string(double_hmac)
# convert the hex hmac-sha1 string to a passy string
encoded = encode(double_hmac)
# determine the length of the passy
len = good_passy_length(encoded, allowSymbols)
# finally, return the passy string
encoded.substr(0,len)
# encode a hex string into the old version of a passy string
encode_passy_legacy = (secret, text) ->
hmac_sha1(secret,text).substr(0,10)
# begin main passy function
# return an error if passy fails on this javascript
return ["Error!","Error!"] unless (
encode_passy( "0123", "a", true) == "Gad6DdC2e3cD6dF937c82h5%" &&
encode_passy( "ABab12!@CDcd34#$", "aB234SLKDJF(*#@jfsdk", true) == "d+B8#@hh5CB%=Fef" &&
encode_passy("11111111111111111111", "00000000000000000000", true) == "Fgh5bE?94A2chdhF"
)
# return both new and old passy results (for now)
[encode_passy(secret,text, allowSymbols),encode_passy_legacy(secret,text)]
# add to the DOM (for Google closure compiler)
window['passy'] = passy
| 170078 | ###
passy.coffee
Implementation of passy algorithm in CoffeeScript, by <NAME>.
The MIT License
Copyright (c) 2011 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
###
passy = (text, secret, allowSymbols) ->
#
# SHA1 code
#
# convert a single character code to its byte representation
char_code_to_bytes = (char_code) ->
result = []
while char_code
result.unshift(char_code & 0xff)
char_code >>= 8
result
# convert a string into a byte array, including multibytes chars
str_to_bytes = (str) ->
result = []
for char_code in str
result = result.concat(char_code_to_bytes(char_code.charCodeAt(0)))
result
# convert a number to an array of 8 bytes representing a 64 bit big-endian integer
num_to_big_endian_64 = (num) ->
[
(num & 0xff00000000000000) >>> 56,
(num & 0x00ff000000000000) >>> 48,
(num & 0x0000ff0000000000) >>> 40,
(num & 0x000000ff00000000) >>> 32,
(num & 0x00000000ff000000) >>> 24,
(num & 0x0000000000ff0000) >>> 16,
(num & 0x000000000000ff00) >>> 8,
(num & 0x00000000000000ff)
]
# convert an array of bytes to an array of int32 (big-endian)
bytes_to_big_endian_32 = (array) ->
((array[i] << 24) | (array[i+1] << 16) | (array[i+2] << 8) | array[i+3]) for i in [0...array.length] by 4
# take an array of bytes and return the hex string
bytes_to_hex = (bytes) ->
hextab = "0123456789abcdef"
(hextab[(x >>> 4) & 0x0f] + hextab[x & 0x0f] for x in bytes).join("")
# compute SHA1 hash
#
# input is an array of bytes (big-endian order)
# returns an array of 20 bytes
sha1 = (byte_array) ->
# helper function (rotate left)
rol = (x,i) -> (x << i) | (x >>> (32-i))
# initialize variables
message = byte_array.slice(0) # copy array, since we will modify it
message_size_in_bits = message.length * 8 # store message size for later use
# initialize hash state variables
[h0, h1, h2, h3, h4] = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]
# Preprocess message in preparation for hasing:
# append the bit '1' to the message
message.push(0x80)
# append (0 <= k < 512) bits '0', so that the resulting message length (in bits) is congruent to 448 = -64 (mod 512)
message.push(0) while ((message.length + 8) % 64)
# append length of message (before pre-processing), in bits, as 64-bit big-endian integer
message = message.concat(num_to_big_endian_64(message_size_in_bits))
# Process the message in successive 512-bit chunks:
# break message into 512-bit chunks
for i_chunk in [0...message.length] by 64
# convert bytes into int32
w = bytes_to_big_endian_32(message.slice(i_chunk, i_chunk+64))
# init state with current hash
[a, b, c, d, e] = [h0, h1, h2, h3, h4]
# hash rounds
for i in [0...80]
s = i & 0xf
if (i >= 16)
temp = w[(s + 13) & 0xf] ^ w[(s + 8) & 0xf] ^ w[(s + 2) & 0xf] ^ w[s]
w[s] = (temp << 1) | (temp >>> 31)
if (i < 20)
# rol(a,5) + _________f[i]________ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( d ^ (b & (c ^ d)) ) + e + 0x5a827999 + w[s]) & 0xffffffff
else if (i < 40)
# rol(a,5) + _____f[i]____ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( b ^ c ^ d ) + e + 0x6ed9eba1 + w[s]) & 0xffffffff
else if (i < 60)
# rol(a,5) + ___________f[i]________________ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( (b & c) | (b & d) | (c & d) ) + e + 0x8f1bbcdc + w[s]) & 0xffffffff
else
# rol(a,5) + _____f[i]____ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( b ^ c ^ d ) + e + 0xca62c1d6 + w[s]) & 0xffffffff
[a, b, c, d, e] = [temp, a, rol(b,30), c, d]
# Add this chunk's hash to result so far:
h0 = (h0 + a) & 0xffffffff
h1 = (h1 + b) & 0xffffffff
h2 = (h2 + c) & 0xffffffff
h3 = (h3 + d) & 0xffffffff
h4 = (h4 + e) & 0xffffffff
# Return the final hash value (big-endian):
[
(h0 >>> 24) & 0xff, (h0 >>> 16) & 0xff, (h0 >>> 8) & 0xff, h0 & 0xff,
(h1 >>> 24) & 0xff, (h1 >>> 16) & 0xff, (h1 >>> 8) & 0xff, h1 & 0xff,
(h2 >>> 24) & 0xff, (h2 >>> 16) & 0xff, (h2 >>> 8) & 0xff, h2 & 0xff,
(h3 >>> 24) & 0xff, (h3 >>> 16) & 0xff, (h3 >>> 8) & 0xff, h3 & 0xff,
(h4 >>> 24) & 0xff, (h4 >>> 16) & 0xff, (h4 >>> 8) & 0xff, h4 & 0xff
] # note: returning this as a byte array is quite expensive!
# compute SHA1
#
# input is a string
# returns a hex string
sha1_string = (str) ->
# convert hex string to a byte array, hash, and convert back to a hex string
bytes_to_hex(sha1(str_to_bytes(str)))
#
# HMAC-SHA1 code
#
# compute HMAC-SHA1
#
# key_str and message_str are both strings
# returns a hex string
hmac_sha1 = (key_str, message_str) ->
# convert key & message to byte arrays
key = str_to_bytes(key_str)
message = str_to_bytes(message_str)
# initialize key
key = sha1(key) if key.length > 64 # keys longer than 64 are truncated to sha1 result
key.push(0) while key.length < 64 # keys shorter than 64 are padded with zeroes
# setup pads
opad = (0x5c ^ key[i] for i in [0...64])
ipad = (0x36 ^ key[i] for i in [0...64])
# calculate HMAC
bytes_to_hex(sha1(opad.concat(sha1(ipad.concat(message)))))
# create a byte string of length "size" of "b"s
byte_string = (b, size) ->
(String.fromCharCode(b) for i in [0...size]).join("")
# test HMAC-SHA1 with known test vectors, which naturally tests SHA1
verify_hmac_sha1 = ->
# SHA1 test data from RFC
test_vectors = [
[ "Jefe", "what do ya want for nothing?", "effcdf6ae5eb2fa2d27416d5f184df9c259a7c79" ],
[ byte_string(0x0b, 20), "Hi There", "b617318655057264e28bc0b6fb378c8ef146be00" ],
[ byte_string(0xaa, 20), byte_string(0xdd, 50), "125d7342b9ac11cd91a39af48aa17b4f63f175d3" ],
[ byte_string(0x0c, 20), "Test With Truncation", "4c1a03424b55e07fe7f27be1d58bb9324a9a5a04" ],
[ byte_string(0xaa, 80), "Test Using Larger Than Block-Size Key - Hash Key First", "aa4ae5e15272d00e95705637ce8a3b55ed402112" ],
[ byte_string(0xaa, 80), "Test Using Larger Than Block-Size Key and Larger Than One Block-Size Data", "e8e99d0f45237d786d6bbaa7965c7808bbff1a91" ]
]
# run tests
for t in test_vectors
# return false if a test fails
return false if hmac_sha1(t[0], t[1]) != t[2]
# all tests passed
true
#
# passy code
#
# returns true if string contains one of each [A-H], [a-h], [2-9], and [#$%*+=@?]
good_passy = (str, allowSymbols) ->
str.match(/[A-H]/) &&
str.match(/[a-h]/) &&
str.match(/[2-9]/) &&
(!allowSymbols || str.match(/[#$%*+=@?]/))
# given a long string, find the minimum length "good" passy (i.e. has one of each
# character type, and meets minimum length of 16 characters)
good_passy_length = (str, allowSymbols) ->
for i in [16..str.length]
return i if good_passy(str.substr(0,i), allowSymbols)
return str.length # uh-oh, that's a long passy!
# encode a hex string (typically a single octet) to a passy string
encode_passy = (secret, text, allowSymbols) ->
# our symbol table for passy
symtab = if allowSymbols then "ABCDEFGHabcdefgh23456789#$%*+=@?" else "ABCDEFGHabcdefgh23456789"
# convert a hex string to a single passy character
# * modulo and lookup in symtab string
hex2passy = (x) -> symtab[parseInt(x,16) % symtab.length]
# encode a hex string into a passy string
# 1. split a string into two character strings (octets)
# 2. encode each two char string (octet) into a single passy char
# 3. join resulting array of passy chars into a single passy string
encode = (str) ->
(hex2passy(str.substr(i,2)) for i in [0...str.length] by 2).join("")
# this is the hmac_sha1 concatenated with the sha1(hmac_sha1)
double_hmac = hmac_sha1(secret,text)
double_hmac = double_hmac + sha1_string(double_hmac)
# convert the hex hmac-sha1 string to a passy string
encoded = encode(double_hmac)
# determine the length of the passy
len = good_passy_length(encoded, allowSymbols)
# finally, return the passy string
encoded.substr(0,len)
# encode a hex string into the old version of a passy string
encode_passy_legacy = (secret, text) ->
hmac_sha1(secret,text).substr(0,10)
# begin main passy function
# return an error if passy fails on this javascript
return ["Error!","Error!"] unless (
encode_passy( "0123", "a", true) == "Gad6DdC2e3cD6dF937c82h5%" &&
encode_passy( "ABab12!@CDcd34#$", "aB234SLKDJF(*#@jfsdk", true) == "d+B8#@hh5CB%=Fef" &&
encode_passy("11111111111111111111", "00000000000000000000", true) == "Fgh5bE?94A2chdhF"
)
# return both new and old passy results (for now)
[encode_passy(secret,text, allowSymbols),encode_passy_legacy(secret,text)]
# add to the DOM (for Google closure compiler)
window['passy'] = passy
| true | ###
passy.coffee
Implementation of passy algorithm in CoffeeScript, by PI:NAME:<NAME>END_PI.
The MIT License
Copyright (c) 2011 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
###
passy = (text, secret, allowSymbols) ->
#
# SHA1 code
#
# convert a single character code to its byte representation
char_code_to_bytes = (char_code) ->
result = []
while char_code
result.unshift(char_code & 0xff)
char_code >>= 8
result
# convert a string into a byte array, including multibytes chars
str_to_bytes = (str) ->
result = []
for char_code in str
result = result.concat(char_code_to_bytes(char_code.charCodeAt(0)))
result
# convert a number to an array of 8 bytes representing a 64 bit big-endian integer
num_to_big_endian_64 = (num) ->
[
(num & 0xff00000000000000) >>> 56,
(num & 0x00ff000000000000) >>> 48,
(num & 0x0000ff0000000000) >>> 40,
(num & 0x000000ff00000000) >>> 32,
(num & 0x00000000ff000000) >>> 24,
(num & 0x0000000000ff0000) >>> 16,
(num & 0x000000000000ff00) >>> 8,
(num & 0x00000000000000ff)
]
# convert an array of bytes to an array of int32 (big-endian)
bytes_to_big_endian_32 = (array) ->
((array[i] << 24) | (array[i+1] << 16) | (array[i+2] << 8) | array[i+3]) for i in [0...array.length] by 4
# take an array of bytes and return the hex string
bytes_to_hex = (bytes) ->
hextab = "0123456789abcdef"
(hextab[(x >>> 4) & 0x0f] + hextab[x & 0x0f] for x in bytes).join("")
# compute SHA1 hash
#
# input is an array of bytes (big-endian order)
# returns an array of 20 bytes
sha1 = (byte_array) ->
# helper function (rotate left)
rol = (x,i) -> (x << i) | (x >>> (32-i))
# initialize variables
message = byte_array.slice(0) # copy array, since we will modify it
message_size_in_bits = message.length * 8 # store message size for later use
# initialize hash state variables
[h0, h1, h2, h3, h4] = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]
# Preprocess message in preparation for hasing:
# append the bit '1' to the message
message.push(0x80)
# append (0 <= k < 512) bits '0', so that the resulting message length (in bits) is congruent to 448 = -64 (mod 512)
message.push(0) while ((message.length + 8) % 64)
# append length of message (before pre-processing), in bits, as 64-bit big-endian integer
message = message.concat(num_to_big_endian_64(message_size_in_bits))
# Process the message in successive 512-bit chunks:
# break message into 512-bit chunks
for i_chunk in [0...message.length] by 64
# convert bytes into int32
w = bytes_to_big_endian_32(message.slice(i_chunk, i_chunk+64))
# init state with current hash
[a, b, c, d, e] = [h0, h1, h2, h3, h4]
# hash rounds
for i in [0...80]
s = i & 0xf
if (i >= 16)
temp = w[(s + 13) & 0xf] ^ w[(s + 8) & 0xf] ^ w[(s + 2) & 0xf] ^ w[s]
w[s] = (temp << 1) | (temp >>> 31)
if (i < 20)
# rol(a,5) + _________f[i]________ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( d ^ (b & (c ^ d)) ) + e + 0x5a827999 + w[s]) & 0xffffffff
else if (i < 40)
# rol(a,5) + _____f[i]____ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( b ^ c ^ d ) + e + 0x6ed9eba1 + w[s]) & 0xffffffff
else if (i < 60)
# rol(a,5) + ___________f[i]________________ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( (b & c) | (b & d) | (c & d) ) + e + 0x8f1bbcdc + w[s]) & 0xffffffff
else
# rol(a,5) + _____f[i]____ + e + ___k[i]___ + w[s]
temp = (rol(a,5) + ( b ^ c ^ d ) + e + 0xca62c1d6 + w[s]) & 0xffffffff
[a, b, c, d, e] = [temp, a, rol(b,30), c, d]
# Add this chunk's hash to result so far:
h0 = (h0 + a) & 0xffffffff
h1 = (h1 + b) & 0xffffffff
h2 = (h2 + c) & 0xffffffff
h3 = (h3 + d) & 0xffffffff
h4 = (h4 + e) & 0xffffffff
# Return the final hash value (big-endian):
[
(h0 >>> 24) & 0xff, (h0 >>> 16) & 0xff, (h0 >>> 8) & 0xff, h0 & 0xff,
(h1 >>> 24) & 0xff, (h1 >>> 16) & 0xff, (h1 >>> 8) & 0xff, h1 & 0xff,
(h2 >>> 24) & 0xff, (h2 >>> 16) & 0xff, (h2 >>> 8) & 0xff, h2 & 0xff,
(h3 >>> 24) & 0xff, (h3 >>> 16) & 0xff, (h3 >>> 8) & 0xff, h3 & 0xff,
(h4 >>> 24) & 0xff, (h4 >>> 16) & 0xff, (h4 >>> 8) & 0xff, h4 & 0xff
] # note: returning this as a byte array is quite expensive!
# compute SHA1
#
# input is a string
# returns a hex string
sha1_string = (str) ->
# convert hex string to a byte array, hash, and convert back to a hex string
bytes_to_hex(sha1(str_to_bytes(str)))
#
# HMAC-SHA1 code
#
# compute HMAC-SHA1
#
# key_str and message_str are both strings
# returns a hex string
hmac_sha1 = (key_str, message_str) ->
# convert key & message to byte arrays
key = str_to_bytes(key_str)
message = str_to_bytes(message_str)
# initialize key
key = sha1(key) if key.length > 64 # keys longer than 64 are truncated to sha1 result
key.push(0) while key.length < 64 # keys shorter than 64 are padded with zeroes
# setup pads
opad = (0x5c ^ key[i] for i in [0...64])
ipad = (0x36 ^ key[i] for i in [0...64])
# calculate HMAC
bytes_to_hex(sha1(opad.concat(sha1(ipad.concat(message)))))
# create a byte string of length "size" of "b"s
byte_string = (b, size) ->
(String.fromCharCode(b) for i in [0...size]).join("")
# test HMAC-SHA1 with known test vectors, which naturally tests SHA1
verify_hmac_sha1 = ->
# SHA1 test data from RFC
test_vectors = [
[ "Jefe", "what do ya want for nothing?", "effcdf6ae5eb2fa2d27416d5f184df9c259a7c79" ],
[ byte_string(0x0b, 20), "Hi There", "b617318655057264e28bc0b6fb378c8ef146be00" ],
[ byte_string(0xaa, 20), byte_string(0xdd, 50), "125d7342b9ac11cd91a39af48aa17b4f63f175d3" ],
[ byte_string(0x0c, 20), "Test With Truncation", "4c1a03424b55e07fe7f27be1d58bb9324a9a5a04" ],
[ byte_string(0xaa, 80), "Test Using Larger Than Block-Size Key - Hash Key First", "aa4ae5e15272d00e95705637ce8a3b55ed402112" ],
[ byte_string(0xaa, 80), "Test Using Larger Than Block-Size Key and Larger Than One Block-Size Data", "e8e99d0f45237d786d6bbaa7965c7808bbff1a91" ]
]
# run tests
for t in test_vectors
# return false if a test fails
return false if hmac_sha1(t[0], t[1]) != t[2]
# all tests passed
true
#
# passy code
#
# returns true if string contains one of each [A-H], [a-h], [2-9], and [#$%*+=@?]
good_passy = (str, allowSymbols) ->
str.match(/[A-H]/) &&
str.match(/[a-h]/) &&
str.match(/[2-9]/) &&
(!allowSymbols || str.match(/[#$%*+=@?]/))
# given a long string, find the minimum length "good" passy (i.e. has one of each
# character type, and meets minimum length of 16 characters)
good_passy_length = (str, allowSymbols) ->
for i in [16..str.length]
return i if good_passy(str.substr(0,i), allowSymbols)
return str.length # uh-oh, that's a long passy!
# encode a hex string (typically a single octet) to a passy string
encode_passy = (secret, text, allowSymbols) ->
# our symbol table for passy
symtab = if allowSymbols then "ABCDEFGHabcdefgh23456789#$%*+=@?" else "ABCDEFGHabcdefgh23456789"
# convert a hex string to a single passy character
# * modulo and lookup in symtab string
hex2passy = (x) -> symtab[parseInt(x,16) % symtab.length]
# encode a hex string into a passy string
# 1. split a string into two character strings (octets)
# 2. encode each two char string (octet) into a single passy char
# 3. join resulting array of passy chars into a single passy string
encode = (str) ->
(hex2passy(str.substr(i,2)) for i in [0...str.length] by 2).join("")
# this is the hmac_sha1 concatenated with the sha1(hmac_sha1)
double_hmac = hmac_sha1(secret,text)
double_hmac = double_hmac + sha1_string(double_hmac)
# convert the hex hmac-sha1 string to a passy string
encoded = encode(double_hmac)
# determine the length of the passy
len = good_passy_length(encoded, allowSymbols)
# finally, return the passy string
encoded.substr(0,len)
# encode a hex string into the old version of a passy string
encode_passy_legacy = (secret, text) ->
hmac_sha1(secret,text).substr(0,10)
# begin main passy function
# return an error if passy fails on this javascript
return ["Error!","Error!"] unless (
encode_passy( "0123", "a", true) == "Gad6DdC2e3cD6dF937c82h5%" &&
encode_passy( "ABab12!@CDcd34#$", "aB234SLKDJF(*#@jfsdk", true) == "d+B8#@hh5CB%=Fef" &&
encode_passy("11111111111111111111", "00000000000000000000", true) == "Fgh5bE?94A2chdhF"
)
# return both new and old passy results (for now)
[encode_passy(secret,text, allowSymbols),encode_passy_legacy(secret,text)]
# add to the DOM (for Google closure compiler)
window['passy'] = passy
|
[
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright (C) 2014 Jesús Espino ",
"end": 38,
"score": 0.9998903274536133,
"start": 25,
"tag": "NAME",
"value": "Andrey Antukh"
},
{
"context": "###\n# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>\n# Copyright... | public/taiga-front/app/coffee/modules/controllerMixins.coffee | mabotech/maboss | 0 | ###
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino Garcia <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán Merino <bameda@dbarragan.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/controllerMixins.coffee
###
taiga = @.taiga
groupBy = @.taiga.groupBy
joinStr = @.taiga.joinStr
trim = @.taiga.trim
toString = @.taiga.toString
#############################################################################
## Page Mixin
#############################################################################
class PageMixin
loadUsersAndRoles: ->
promise = @q.all([
@rs.projects.usersList(@scope.projectId),
@rs.projects.rolesList(@scope.projectId)
])
return promise.then (results) =>
[users, roles] = results
@scope.users = _.sortBy(users, "full_name_display")
@scope.usersById = groupBy(@scope.users, (e) -> e.id)
@scope.roles = _.sortBy(roles, "order")
availableRoles = _(@scope.project.memberships).map("role").uniq().value()
@scope.computableRoles = _(roles).filter("computable")
.filter((x) -> _.contains(availableRoles, x.id))
.value()
return results
taiga.PageMixin = PageMixin
#############################################################################
## Filters Mixin
#############################################################################
# This mixin requires @location ($tgLocation) and @scope
class FiltersMixin
selectFilter: (name, value, load=false) ->
params = @location.search()
if params[name] != undefined and name != "page"
existing = _.map(taiga.toString(params[name]).split(","), (x) -> trim(x))
existing.push(taiga.toString(value))
existing = _.compact(existing)
value = joinStr(",", _.uniq(existing))
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
replaceFilter: (name, value, load=false) ->
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
replaceAllFilters: (filters, load=false) ->
location = if load then @location else @location.noreload(@scope)
location.search(filters)
unselectFilter: (name, value, load=false) ->
params = @location.search()
if params[name] is undefined
return
if value is undefined or value is null
delete params[name]
parsedValues = _.map(taiga.toString(params[name]).split(","), (x) -> trim(x))
newValues = _.reject(parsedValues, (x) -> x == taiga.toString(value))
newValues = _.compact(newValues)
if _.isEmpty(newValues)
value = null
else
value = joinStr(",", _.uniq(newValues))
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
taiga.FiltersMixin = FiltersMixin
| 214324 | ###
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
# Copyright (C) 2014 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/controllerMixins.coffee
###
taiga = @.taiga
groupBy = @.taiga.groupBy
joinStr = @.taiga.joinStr
trim = @.taiga.trim
toString = @.taiga.toString
#############################################################################
## Page Mixin
#############################################################################
class PageMixin
loadUsersAndRoles: ->
promise = @q.all([
@rs.projects.usersList(@scope.projectId),
@rs.projects.rolesList(@scope.projectId)
])
return promise.then (results) =>
[users, roles] = results
@scope.users = _.sortBy(users, "full_name_display")
@scope.usersById = groupBy(@scope.users, (e) -> e.id)
@scope.roles = _.sortBy(roles, "order")
availableRoles = _(@scope.project.memberships).map("role").uniq().value()
@scope.computableRoles = _(roles).filter("computable")
.filter((x) -> _.contains(availableRoles, x.id))
.value()
return results
taiga.PageMixin = PageMixin
#############################################################################
## Filters Mixin
#############################################################################
# This mixin requires @location ($tgLocation) and @scope
class FiltersMixin
selectFilter: (name, value, load=false) ->
params = @location.search()
if params[name] != undefined and name != "page"
existing = _.map(taiga.toString(params[name]).split(","), (x) -> trim(x))
existing.push(taiga.toString(value))
existing = _.compact(existing)
value = joinStr(",", _.uniq(existing))
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
replaceFilter: (name, value, load=false) ->
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
replaceAllFilters: (filters, load=false) ->
location = if load then @location else @location.noreload(@scope)
location.search(filters)
unselectFilter: (name, value, load=false) ->
params = @location.search()
if params[name] is undefined
return
if value is undefined or value is null
delete params[name]
parsedValues = _.map(taiga.toString(params[name]).split(","), (x) -> trim(x))
newValues = _.reject(parsedValues, (x) -> x == taiga.toString(value))
newValues = _.compact(newValues)
if _.isEmpty(newValues)
value = null
else
value = joinStr(",", _.uniq(newValues))
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
taiga.FiltersMixin = FiltersMixin
| true | ###
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# Copyright (C) 2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/controllerMixins.coffee
###
taiga = @.taiga
groupBy = @.taiga.groupBy
joinStr = @.taiga.joinStr
trim = @.taiga.trim
toString = @.taiga.toString
#############################################################################
## Page Mixin
#############################################################################
class PageMixin
loadUsersAndRoles: ->
promise = @q.all([
@rs.projects.usersList(@scope.projectId),
@rs.projects.rolesList(@scope.projectId)
])
return promise.then (results) =>
[users, roles] = results
@scope.users = _.sortBy(users, "full_name_display")
@scope.usersById = groupBy(@scope.users, (e) -> e.id)
@scope.roles = _.sortBy(roles, "order")
availableRoles = _(@scope.project.memberships).map("role").uniq().value()
@scope.computableRoles = _(roles).filter("computable")
.filter((x) -> _.contains(availableRoles, x.id))
.value()
return results
taiga.PageMixin = PageMixin
#############################################################################
## Filters Mixin
#############################################################################
# This mixin requires @location ($tgLocation) and @scope
class FiltersMixin
selectFilter: (name, value, load=false) ->
params = @location.search()
if params[name] != undefined and name != "page"
existing = _.map(taiga.toString(params[name]).split(","), (x) -> trim(x))
existing.push(taiga.toString(value))
existing = _.compact(existing)
value = joinStr(",", _.uniq(existing))
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
replaceFilter: (name, value, load=false) ->
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
replaceAllFilters: (filters, load=false) ->
location = if load then @location else @location.noreload(@scope)
location.search(filters)
unselectFilter: (name, value, load=false) ->
params = @location.search()
if params[name] is undefined
return
if value is undefined or value is null
delete params[name]
parsedValues = _.map(taiga.toString(params[name]).split(","), (x) -> trim(x))
newValues = _.reject(parsedValues, (x) -> x == taiga.toString(value))
newValues = _.compact(newValues)
if _.isEmpty(newValues)
value = null
else
value = joinStr(",", _.uniq(newValues))
location = if load then @location else @location.noreload(@scope)
location.search(name, value)
taiga.FiltersMixin = FiltersMixin
|
[
{
"context": "###\n# controllers/archive.coffee\n#\n# © 2014 Dan Nichols\n# See LICENSE for more details\n#\n# This module de",
"end": 55,
"score": 0.9995628595352173,
"start": 44,
"tag": "NAME",
"value": "Dan Nichols"
}
] | lib/controllers/archives.coffee | dlnichols/h_media | 0 | ###
# controllers/archive.coffee
#
# © 2014 Dan Nichols
# See LICENSE for more details
#
# This module defines the CRUD actions on the archive resource, for use in our
# express router.
###
'use strict'
# External libs
_ = require 'lodash'
mongoose = require 'mongoose'
debug = require('debug') 'hMedia:controllers:archive'
# Retrieve our model from mongoose
Archive = mongoose.model 'Archive'
###
# Archive controller
#
# Define the basic CRUD actions for the archive resource
###
debug 'Configuring archives controller...'
module.exports = exports =
###
# index
###
index: (req, res, next) ->
Archive
.find {}
.sort {glacierId: 1}
.limit 20
.exec (err, archives) ->
return next(err) if err
res.send archives || []
###
# create
###
create: (req, res, next) ->
new Archive()
.safeAssign req.body
.save (err, archive) ->
return next(err) if err
res.send archive
###
# show
###
show: (req, res, next) ->
Archive.findById req.params.id, (err, archive) ->
return next(err) if err
res.send archive
###
# update
###
update: (req, res, next) ->
Archive.findById req.params.id, (err, archive) ->
return next(err) if err
archive
.safeAssign req.body
.save (err, archive) ->
return next(err) if err
res.send archive
###
# delete
###
delete: (req, res, next) ->
Archive.findByIdAndRemove req.params.id, (err, archive) ->
return next(err) if err
res.send archive
| 116726 | ###
# controllers/archive.coffee
#
# © 2014 <NAME>
# See LICENSE for more details
#
# This module defines the CRUD actions on the archive resource, for use in our
# express router.
###
'use strict'
# External libs
_ = require 'lodash'
mongoose = require 'mongoose'
debug = require('debug') 'hMedia:controllers:archive'
# Retrieve our model from mongoose
Archive = mongoose.model 'Archive'
###
# Archive controller
#
# Define the basic CRUD actions for the archive resource
###
debug 'Configuring archives controller...'
module.exports = exports =
###
# index
###
index: (req, res, next) ->
Archive
.find {}
.sort {glacierId: 1}
.limit 20
.exec (err, archives) ->
return next(err) if err
res.send archives || []
###
# create
###
create: (req, res, next) ->
new Archive()
.safeAssign req.body
.save (err, archive) ->
return next(err) if err
res.send archive
###
# show
###
show: (req, res, next) ->
Archive.findById req.params.id, (err, archive) ->
return next(err) if err
res.send archive
###
# update
###
update: (req, res, next) ->
Archive.findById req.params.id, (err, archive) ->
return next(err) if err
archive
.safeAssign req.body
.save (err, archive) ->
return next(err) if err
res.send archive
###
# delete
###
delete: (req, res, next) ->
Archive.findByIdAndRemove req.params.id, (err, archive) ->
return next(err) if err
res.send archive
| true | ###
# controllers/archive.coffee
#
# © 2014 PI:NAME:<NAME>END_PI
# See LICENSE for more details
#
# This module defines the CRUD actions on the archive resource, for use in our
# express router.
###
'use strict'
# External libs
_ = require 'lodash'
mongoose = require 'mongoose'
debug = require('debug') 'hMedia:controllers:archive'
# Retrieve our model from mongoose
Archive = mongoose.model 'Archive'
###
# Archive controller
#
# Define the basic CRUD actions for the archive resource
###
debug 'Configuring archives controller...'
module.exports = exports =
###
# index
###
index: (req, res, next) ->
Archive
.find {}
.sort {glacierId: 1}
.limit 20
.exec (err, archives) ->
return next(err) if err
res.send archives || []
###
# create
###
create: (req, res, next) ->
new Archive()
.safeAssign req.body
.save (err, archive) ->
return next(err) if err
res.send archive
###
# show
###
show: (req, res, next) ->
Archive.findById req.params.id, (err, archive) ->
return next(err) if err
res.send archive
###
# update
###
update: (req, res, next) ->
Archive.findById req.params.id, (err, archive) ->
return next(err) if err
archive
.safeAssign req.body
.save (err, archive) ->
return next(err) if err
res.send archive
###
# delete
###
delete: (req, res, next) ->
Archive.findByIdAndRemove req.params.id, (err, archive) ->
return next(err) if err
res.send archive
|
[
{
"context": "send\n creator:\n _id: 1\n name: 'xxx'\n body: '开始投票 1,2'\n _roomId: 1\n ev",
"end": 286,
"score": 0.49182581901550293,
"start": 283,
"tag": "USERNAME",
"value": "xxx"
},
{
"context": "send\n creator:\n _id: 1\n name:... | test/main.coffee | jianliaoim/talk-vote-bot | 5 | should = require 'should'
app = require '../app'
request = require 'supertest'
describe 'Bot', ->
it 'should start vote in a channel', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: 'xxx'
body: '开始投票 1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '''
投票开始,操作选项:
1. 计票:"@我 选项1,选项2"(如需多选,可通过','分隔后发送给我)
2. 查看结果:"@我 查看结果"
投票将在 1 小时后结束,请在结束时间前查看投票结果
'''
done err
it 'should not start vote again in room 1', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: 'xxx'
body: '开始投票,1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '上次投票尚未结束,请查看统计结果后再发起投票'
done err
it 'should record vote 1 of user xxx', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: 'xxx'
body: '2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
console.log "Vote result", res.body
done err
it 'should record vote 1, 2 of user yyy', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 2
name: 'yyy'
body: '<$at|1|robot$> 1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
done err
it 'should get the final record of votes', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 2
name: 'yyy'
body: '查看结果'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '''
投票选项:1,2
总票数:3
选项 2,票数 2 66.67% (xxx,yyy)
选项 1,票数 1 33.33% (yyy)
'''
done err
| 164202 | should = require 'should'
app = require '../app'
request = require 'supertest'
describe 'Bot', ->
it 'should start vote in a channel', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: 'xxx'
body: '开始投票 1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '''
投票开始,操作选项:
1. 计票:"@我 选项1,选项2"(如需多选,可通过','分隔后发送给我)
2. 查看结果:"@我 查看结果"
投票将在 1 小时后结束,请在结束时间前查看投票结果
'''
done err
it 'should not start vote again in room 1', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: '<NAME>'
body: '开始投票,1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '上次投票尚未结束,请查看统计结果后再发起投票'
done err
it 'should record vote 1 of user xxx', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: '<NAME>'
body: '2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
console.log "Vote result", res.body
done err
it 'should record vote 1, 2 of user yyy', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 2
name: '<NAME>'
body: '<$at|1|robot$> 1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
done err
it 'should get the final record of votes', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 2
name: '<NAME>'
body: '查看结果'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '''
投票选项:1,2
总票数:3
选项 2,票数 2 66.67% (xxx,yyy)
选项 1,票数 1 33.33% (yyy)
'''
done err
| true | should = require 'should'
app = require '../app'
request = require 'supertest'
describe 'Bot', ->
it 'should start vote in a channel', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: 'xxx'
body: '开始投票 1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '''
投票开始,操作选项:
1. 计票:"@我 选项1,选项2"(如需多选,可通过','分隔后发送给我)
2. 查看结果:"@我 查看结果"
投票将在 1 小时后结束,请在结束时间前查看投票结果
'''
done err
it 'should not start vote again in room 1', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: 'PI:NAME:<NAME>END_PI'
body: '开始投票,1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '上次投票尚未结束,请查看统计结果后再发起投票'
done err
it 'should record vote 1 of user xxx', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 1
name: 'PI:NAME:<NAME>END_PI'
body: '2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
console.log "Vote result", res.body
done err
it 'should record vote 1, 2 of user yyy', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 2
name: 'PI:NAME:<NAME>END_PI'
body: '<$at|1|robot$> 1,2'
_roomId: 1
event: 'message.create'
.end (err, res) ->
done err
it 'should get the final record of votes', (done) ->
request(app).post '/incoming'
.set 'Content-Type': 'application/json'
.send
creator:
_id: 2
name: 'PI:NAME:<NAME>END_PI'
body: '查看结果'
_roomId: 1
event: 'message.create'
.end (err, res) ->
res.body.body.should.eql '''
投票选项:1,2
总票数:3
选项 2,票数 2 66.67% (xxx,yyy)
选项 1,票数 1 33.33% (yyy)
'''
done err
|
[
{
"context": "opyright 2013 Canopy Canopy Canopy, Inc.\n# Authors Adam Florin & Anthony Tran\n#\n",
"end": 473,
"score": 0.9997961521148682,
"start": 462,
"tag": "NAME",
"value": "Adam Florin"
},
{
"context": "Canopy Canopy Canopy, Inc.\n# Authors Adam Florin & Anthony Tran\n#\n",
... | app/assets/javascripts/alongslide.coffee | triplecanopy/alongslide | 33 | #
# alongslide.js: Require Alongslide libraries.
#
# Vendor
#= require regionFlow
#= require skrollr
#= require tether
#= require prefix
#= require jquery.fitvids
#= require jquery.history
#
# Utility
#= require ./styles
#= require ./fixedAspect
#
# Core
#= require alongslide/alongslide
#= require alongslide/parser
#= require alongslide/layout
#= require alongslide/scrolling
#= require alongslide/state
#
# Copyright 2013 Canopy Canopy Canopy, Inc.
# Authors Adam Florin & Anthony Tran
#
| 143144 | #
# alongslide.js: Require Alongslide libraries.
#
# Vendor
#= require regionFlow
#= require skrollr
#= require tether
#= require prefix
#= require jquery.fitvids
#= require jquery.history
#
# Utility
#= require ./styles
#= require ./fixedAspect
#
# Core
#= require alongslide/alongslide
#= require alongslide/parser
#= require alongslide/layout
#= require alongslide/scrolling
#= require alongslide/state
#
# Copyright 2013 Canopy Canopy Canopy, Inc.
# Authors <NAME> & <NAME>
#
| true | #
# alongslide.js: Require Alongslide libraries.
#
# Vendor
#= require regionFlow
#= require skrollr
#= require tether
#= require prefix
#= require jquery.fitvids
#= require jquery.history
#
# Utility
#= require ./styles
#= require ./fixedAspect
#
# Core
#= require alongslide/alongslide
#= require alongslide/parser
#= require alongslide/layout
#= require alongslide/scrolling
#= require alongslide/state
#
# Copyright 2013 Canopy Canopy Canopy, Inc.
# Authors PI:NAME:<NAME>END_PI & PI:NAME:<NAME>END_PI
#
|
[
{
"context": ".conversation [\n { expect : \"What is your name, Droote?\" },\n { sendline : \"abc\" },\n { expect : /Ja",
"end": 939,
"score": 0.9981682896614075,
"start": 933,
"tag": "NAME",
"value": "Droote"
},
{
"context": " )(defer err)\n T.no_error err\n await eng.se... | node_modules/iced-expect/test/files/t1.iced | AngelKey/Angelkey.nodeclient | 151 |
read = require 'read'
{Engine} = require '../../lib/main'
path = require 'path'
exports.t1 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
eng.run()
await eng.expect { pattern : /Droote\?/ }, defer err
T.no_error err
await eng.send "Joe\n", defer()
await eng.expect { pattern : /Jabbers\?/ }, defer err
T.no_error err
await eng.send "Bill\n", defer()
await eng.expect { pattern : /those dogs/ }, defer err
T.no_error err
await eng.send "yes\n", defer()
eng.expect { pattern : /You good\?/, repeat : true }, () ->
await eng.sendline "y", defer()
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "Joe:Bill:yes\n", "right stdout"
cb()
exports.t2 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
await eng.run().conversation [
{ expect : "What is your name, Droote?" },
{ sendline : "abc" },
{ expect : /Jabbers\?/ },
{ sendline : "1234" },
{ expect : /those dogs/ },
{ sendline : "woof" }
], defer err
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "abc:1234:woof\n", "right stdout"
cb()
exports.t3 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
eng.run()
await
((cb) ->
eng.expect { pattern : /No way in hell/ }, (err) ->
if (tcb = cb)
cb = null
tcb err
eng.expect { pattern : /Droote\?/ }, (err) ->
if (tcb = cb)
cb = null
tcb err
)(defer err)
T.no_error err
await eng.send "Joe\n", defer()
await eng.expect { pattern : /Jabbers\?/ }, defer err
T.no_error err
await eng.send "Bill\n", defer()
await eng.expect { pattern : /those dogs/ }, defer err
T.no_error err
await eng.send "no\n", defer()
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "Joe:Bill:no\n", "right stdout"
cb()
| 96633 |
read = require 'read'
{Engine} = require '../../lib/main'
path = require 'path'
exports.t1 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
eng.run()
await eng.expect { pattern : /Droote\?/ }, defer err
T.no_error err
await eng.send "Joe\n", defer()
await eng.expect { pattern : /Jabbers\?/ }, defer err
T.no_error err
await eng.send "Bill\n", defer()
await eng.expect { pattern : /those dogs/ }, defer err
T.no_error err
await eng.send "yes\n", defer()
eng.expect { pattern : /You good\?/, repeat : true }, () ->
await eng.sendline "y", defer()
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "Joe:Bill:yes\n", "right stdout"
cb()
exports.t2 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
await eng.run().conversation [
{ expect : "What is your name, <NAME>?" },
{ sendline : "abc" },
{ expect : /Jabbers\?/ },
{ sendline : "1234" },
{ expect : /those dogs/ },
{ sendline : "woof" }
], defer err
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "abc:1234:woof\n", "right stdout"
cb()
exports.t3 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
eng.run()
await
((cb) ->
eng.expect { pattern : /No way in hell/ }, (err) ->
if (tcb = cb)
cb = null
tcb err
eng.expect { pattern : /Droote\?/ }, (err) ->
if (tcb = cb)
cb = null
tcb err
)(defer err)
T.no_error err
await eng.send "<NAME>\n", defer()
await eng.expect { pattern : /Jabbers\?/ }, defer err
T.no_error err
await eng.send "<NAME>\n", defer()
await eng.expect { pattern : /those dogs/ }, defer err
T.no_error err
await eng.send "no\n", defer()
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "Joe:Bill:no\n", "right stdout"
cb()
| true |
read = require 'read'
{Engine} = require '../../lib/main'
path = require 'path'
exports.t1 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
eng.run()
await eng.expect { pattern : /Droote\?/ }, defer err
T.no_error err
await eng.send "Joe\n", defer()
await eng.expect { pattern : /Jabbers\?/ }, defer err
T.no_error err
await eng.send "Bill\n", defer()
await eng.expect { pattern : /those dogs/ }, defer err
T.no_error err
await eng.send "yes\n", defer()
eng.expect { pattern : /You good\?/, repeat : true }, () ->
await eng.sendline "y", defer()
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "Joe:Bill:yes\n", "right stdout"
cb()
exports.t2 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
await eng.run().conversation [
{ expect : "What is your name, PI:NAME:<NAME>END_PI?" },
{ sendline : "abc" },
{ expect : /Jabbers\?/ },
{ sendline : "1234" },
{ expect : /those dogs/ },
{ sendline : "woof" }
], defer err
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "abc:1234:woof\n", "right stdout"
cb()
exports.t3 = (T,cb) ->
eng = new Engine {
name : path.join(__dirname, "..", "bin", "p1.iced")
}
eng.run()
await
((cb) ->
eng.expect { pattern : /No way in hell/ }, (err) ->
if (tcb = cb)
cb = null
tcb err
eng.expect { pattern : /Droote\?/ }, (err) ->
if (tcb = cb)
cb = null
tcb err
)(defer err)
T.no_error err
await eng.send "PI:NAME:<NAME>END_PI\n", defer()
await eng.expect { pattern : /Jabbers\?/ }, defer err
T.no_error err
await eng.send "PI:NAME:<NAME>END_PI\n", defer()
await eng.expect { pattern : /those dogs/ }, defer err
T.no_error err
await eng.send "no\n", defer()
await eng.wait defer rc
T.equal rc, 0, "error was 0"
T.equal eng.stdout().toString('utf8'), "Joe:Bill:no\n", "right stdout"
cb()
|
[
{
"context": "lunk-graphdb: dbUrl', dbUrl\n\n defaultUsername = 'neo4j'\n defaultPassword = 'neo4j'\n\n getDB = (settings",
"end": 355,
"score": 0.9992361068725586,
"start": 350,
"tag": "USERNAME",
"value": "neo4j"
},
{
"context": "\n defaultUsername = 'neo4j'\n defaultPassword ... | src/db.coffee | brianshaler/kerplunk-graphdb | 0 | crypto = require 'crypto'
_ = require 'lodash'
request = require 'request'
Promise = require 'when'
Promisify = require('when/node').lift
neo4j = require 'neo4j'
module.exports = (System) ->
{ip, ports} = System.getService 'neo4j'
dbUrl = "http://#{ip}:#{ports['7474/tcp']}"
console.log 'kerplunk-graphdb: dbUrl', dbUrl
defaultUsername = 'neo4j'
defaultPassword = 'neo4j'
getDB = (settings) ->
# _dbUrl = "http://#{settings.username}:#{settings.password}@#{ip}:#{ports['7474/tcp']}"
# db = new neo4j.GraphDatabase _dbUrl
settings.username = settings.username ? defaultUsername
settings.password = settings.password ? defaultPassword
dbOptions =
url: dbUrl
auth:
username: settings.username
password: settings.password
dbRef = new neo4j.GraphDatabase dbOptions
dbRef: dbRef
models: {}
cypher: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.cypher opt, (err, data) ->
return reject err if err
resolve data
createConstraint: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.createConstraint opt, (err, data) ->
return reject err if err
resolve data
checkPasswordChangeNeeded: ->
Promise.promise (resolve, reject) ->
dbRef.checkPasswordChangeNeeded (err, data) ->
return reject err if err
resolve data
changePassword: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.changePassword opt, (err) ->
return reject err if err
resolve()
checkPassword = (db) ->
db.checkPasswordChangeNeeded()
.then (needed) ->
return unless needed
password = crypto.createHash 'sha1'
.update "#{Date.now()}#{Math.round Math.random() * 100000000}"
.digest 'base64'
db.changePassword password
.then System.getSettings
.then (settings) ->
settings.username = settings.username ? defaultUsername
settings.password = password
System.updateSettings settings
.then -> db
registerModel = (db, Model) ->
model = Model db
db.models[model.name] = model
Promise.all _.map (model.schema), (properties, key) ->
return unless properties.unique
db.createConstraint
label: model.name
property: key
.catch (err) ->
console.log 'failed to register constraints', err?.stack ? err
model
dbCache = null
methods:
getDB: ->
return dbCache if dbCache
dbCache = System.getSettings()
.then (settings) ->
# looks like first run, so delay to give service time to start up
if !settings.password
# console.log 'wait for it....'
Promise.promise (resolve, reject) ->
setTimeout ->
# console.log 'okay, now try to connect to DB'
resolve settings
, 4000
else
# console.log 'try right away'
settings
.then getDB
.then checkPassword
.then (db) ->
db.model = (Model) ->
if typeof Model is 'string'
db.models[Model]
else if typeof Model is 'function'
registerModel db, Model
db
| 182635 | crypto = require 'crypto'
_ = require 'lodash'
request = require 'request'
Promise = require 'when'
Promisify = require('when/node').lift
neo4j = require 'neo4j'
module.exports = (System) ->
{ip, ports} = System.getService 'neo4j'
dbUrl = "http://#{ip}:#{ports['7474/tcp']}"
console.log 'kerplunk-graphdb: dbUrl', dbUrl
defaultUsername = 'neo4j'
defaultPassword = '<PASSWORD>'
getDB = (settings) ->
# _dbUrl = "http://#{settings.username}:#{settings.password}@#{ip}:#{ports['7474/tcp']}"
# db = new neo4j.GraphDatabase _dbUrl
settings.username = settings.username ? defaultUsername
settings.password = settings.password ? defaultPassword
dbOptions =
url: dbUrl
auth:
username: settings.username
password: <PASSWORD>
dbRef = new neo4j.GraphDatabase dbOptions
dbRef: dbRef
models: {}
cypher: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.cypher opt, (err, data) ->
return reject err if err
resolve data
createConstraint: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.createConstraint opt, (err, data) ->
return reject err if err
resolve data
checkPasswordChangeNeeded: ->
Promise.promise (resolve, reject) ->
dbRef.checkPasswordChangeNeeded (err, data) ->
return reject err if err
resolve data
changePassword: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.changePassword opt, (err) ->
return reject err if err
resolve()
checkPassword = (db) ->
db.checkPasswordChangeNeeded()
.then (needed) ->
return unless needed
password = crypto.createHash 'sha1'
.update "#{Date.now()}#{Math.round Math.random() * 100000000}"
.digest 'base64'
db.changePassword password
.then System.getSettings
.then (settings) ->
settings.username = settings.username ? defaultUsername
settings.password = <PASSWORD>
System.updateSettings settings
.then -> db
registerModel = (db, Model) ->
model = Model db
db.models[model.name] = model
Promise.all _.map (model.schema), (properties, key) ->
return unless properties.unique
db.createConstraint
label: model.name
property: key
.catch (err) ->
console.log 'failed to register constraints', err?.stack ? err
model
dbCache = null
methods:
getDB: ->
return dbCache if dbCache
dbCache = System.getSettings()
.then (settings) ->
# looks like first run, so delay to give service time to start up
if !settings.password
# console.log 'wait for it....'
Promise.promise (resolve, reject) ->
setTimeout ->
# console.log 'okay, now try to connect to DB'
resolve settings
, 4000
else
# console.log 'try right away'
settings
.then getDB
.then checkPassword
.then (db) ->
db.model = (Model) ->
if typeof Model is 'string'
db.models[Model]
else if typeof Model is 'function'
registerModel db, Model
db
| true | crypto = require 'crypto'
_ = require 'lodash'
request = require 'request'
Promise = require 'when'
Promisify = require('when/node').lift
neo4j = require 'neo4j'
module.exports = (System) ->
{ip, ports} = System.getService 'neo4j'
dbUrl = "http://#{ip}:#{ports['7474/tcp']}"
console.log 'kerplunk-graphdb: dbUrl', dbUrl
defaultUsername = 'neo4j'
defaultPassword = 'PI:PASSWORD:<PASSWORD>END_PI'
getDB = (settings) ->
# _dbUrl = "http://#{settings.username}:#{settings.password}@#{ip}:#{ports['7474/tcp']}"
# db = new neo4j.GraphDatabase _dbUrl
settings.username = settings.username ? defaultUsername
settings.password = settings.password ? defaultPassword
dbOptions =
url: dbUrl
auth:
username: settings.username
password: PI:PASSWORD:<PASSWORD>END_PI
dbRef = new neo4j.GraphDatabase dbOptions
dbRef: dbRef
models: {}
cypher: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.cypher opt, (err, data) ->
return reject err if err
resolve data
createConstraint: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.createConstraint opt, (err, data) ->
return reject err if err
resolve data
checkPasswordChangeNeeded: ->
Promise.promise (resolve, reject) ->
dbRef.checkPasswordChangeNeeded (err, data) ->
return reject err if err
resolve data
changePassword: (opt) ->
Promise.promise (resolve, reject) ->
dbRef.changePassword opt, (err) ->
return reject err if err
resolve()
checkPassword = (db) ->
db.checkPasswordChangeNeeded()
.then (needed) ->
return unless needed
password = crypto.createHash 'sha1'
.update "#{Date.now()}#{Math.round Math.random() * 100000000}"
.digest 'base64'
db.changePassword password
.then System.getSettings
.then (settings) ->
settings.username = settings.username ? defaultUsername
settings.password = PI:PASSWORD:<PASSWORD>END_PI
System.updateSettings settings
.then -> db
registerModel = (db, Model) ->
model = Model db
db.models[model.name] = model
Promise.all _.map (model.schema), (properties, key) ->
return unless properties.unique
db.createConstraint
label: model.name
property: key
.catch (err) ->
console.log 'failed to register constraints', err?.stack ? err
model
dbCache = null
methods:
getDB: ->
return dbCache if dbCache
dbCache = System.getSettings()
.then (settings) ->
# looks like first run, so delay to give service time to start up
if !settings.password
# console.log 'wait for it....'
Promise.promise (resolve, reject) ->
setTimeout ->
# console.log 'okay, now try to connect to DB'
resolve settings
, 4000
else
# console.log 'try right away'
settings
.then getDB
.then checkPassword
.then (db) ->
db.model = (Model) ->
if typeof Model is 'string'
db.models[Model]
else if typeof Model is 'function'
registerModel db, Model
db
|
[
{
"context": "nit_pwmgr : () ->\n pwopts =\n password : @password()\n salt : @salt_or_email()\n inte",
"end": 3237,
"score": 0.9808381795883179,
"start": 3228,
"tag": "PASSWORD",
"value": "@password"
}
] | src/command/base.iced | substack/keybase-client | 1 | log = require '../log'
{PasswordManager} = require '../pw'
{base58} = require '../basex'
crypto = require 'crypto'
myfs = require '../fs'
fs = require 'fs'
{rmkey} = require '../util'
{add_option_dict} = require './argparse'
{Infile, Outfile} = require '../file'
{EscOk} = require 'iced-error'
{E} = require '../err'
{constants} = require '../constants'
{join} = require 'path'
FN = constants.filenames
SRV = constants.server
SC = constants.security
triplesec = require 'triplesec'
req = require '../req'
{env} = require '../env'
{make_esc} = require 'iced-error'
#=========================================================================
pick = (args...) ->
for a in args
return a if a?
return null
#=========================================================================
exports.Base = class Base
#-------------------
constructor : (@parent) ->
@batch = false # We sometimes turn this on if we're reading from stdin
#-------------------
set_argv : (a) ->
@argv = a
return null
#-------------------
@OPTS :
p :
alias : 'passhrase'
help : 'passphrase used to log into keybase'
c :
alias : 'config'
help : "a configuration file (#{join '~', FN.config_dir, FN.config_file})"
i :
alias : "interactive"
action : "storeTrue"
help : "interactive mode"
d:
alias: "debug"
action : "storeTrue"
help : "debug mode"
q:
alias : "quiet"
action : "storeTrue"
help : "quiet mode; only show errors, suppress info and warnings"
C:
alias : "no-color"
action : "storeTrue"
help : "disable logging colors"
port :
help : 'which port to connect to'
"no-tls" :
action : "storeTrue"
help : "turn off HTTPS/TLS (on by default)"
"host" :
help : 'which host to connect to'
"api-uri-prefix" :
help : "the API prefix to use (#{SRV.api_uri_prefix})"
B :
alias : "batch"
action : "storeTrue"
help : "batch mode; disable all prompts"
"preserve-tmp-keyring" :
action : "storeTrue"
help : "preserve the temporary keyring; don't clean it up"
"homedir" :
help : "specify a non-standard home directory; look for GPG keychain there"
g :
alias : "gpg"
help : "specify an alternate gpg command"
x :
alias : 'proxy'
help : 'specify a proxy server to all HTTPS requests'
"proxy-ca-certs" :
action : "append"
help : "specify 1 or more CA certs (in a file)"
O :
alias : "no-gpg-options"
action : "storeTrue"
help : "disable the GPG options file for temporary keyring operations"
M :
alias : "merkle-checks"
help : "check that users' chains are reflected in sitewide state, one of {none,soft,strict}; soft by default"
#-------------------
use_config : () -> true
use_session : () -> false
use_db : () -> true
use_gpg : () -> true
config_opts : () -> {}
needs_configuration : () -> false
#-------------------
make_outfile : (cb) ->
await Outfile.open { target : @output_filename() }, defer err, file
cb err, file
#-------------------
_init_pwmgr : () ->
pwopts =
password : @password()
salt : @salt_or_email()
interactive : @argv.interactive
@pwmgr.init pwopts
#-------------------
password : () -> pick @argv.password, @config.password()
#----------
assertions : (cb) ->
esc = make_esc cb, "Base::assertions"
await @assert_configured esc defer()
cb null
#----------
assert_configured : (cb) ->
err = null
if @needs_configuration() and not(env().is_configured())
err = new E.NotConfiguredError "you're not logged in. Please run `keybase login` or `keybase join`"
cb err
#=========================================================================
| 87235 | log = require '../log'
{PasswordManager} = require '../pw'
{base58} = require '../basex'
crypto = require 'crypto'
myfs = require '../fs'
fs = require 'fs'
{rmkey} = require '../util'
{add_option_dict} = require './argparse'
{Infile, Outfile} = require '../file'
{EscOk} = require 'iced-error'
{E} = require '../err'
{constants} = require '../constants'
{join} = require 'path'
FN = constants.filenames
SRV = constants.server
SC = constants.security
triplesec = require 'triplesec'
req = require '../req'
{env} = require '../env'
{make_esc} = require 'iced-error'
#=========================================================================
pick = (args...) ->
for a in args
return a if a?
return null
#=========================================================================
exports.Base = class Base
#-------------------
constructor : (@parent) ->
@batch = false # We sometimes turn this on if we're reading from stdin
#-------------------
set_argv : (a) ->
@argv = a
return null
#-------------------
@OPTS :
p :
alias : 'passhrase'
help : 'passphrase used to log into keybase'
c :
alias : 'config'
help : "a configuration file (#{join '~', FN.config_dir, FN.config_file})"
i :
alias : "interactive"
action : "storeTrue"
help : "interactive mode"
d:
alias: "debug"
action : "storeTrue"
help : "debug mode"
q:
alias : "quiet"
action : "storeTrue"
help : "quiet mode; only show errors, suppress info and warnings"
C:
alias : "no-color"
action : "storeTrue"
help : "disable logging colors"
port :
help : 'which port to connect to'
"no-tls" :
action : "storeTrue"
help : "turn off HTTPS/TLS (on by default)"
"host" :
help : 'which host to connect to'
"api-uri-prefix" :
help : "the API prefix to use (#{SRV.api_uri_prefix})"
B :
alias : "batch"
action : "storeTrue"
help : "batch mode; disable all prompts"
"preserve-tmp-keyring" :
action : "storeTrue"
help : "preserve the temporary keyring; don't clean it up"
"homedir" :
help : "specify a non-standard home directory; look for GPG keychain there"
g :
alias : "gpg"
help : "specify an alternate gpg command"
x :
alias : 'proxy'
help : 'specify a proxy server to all HTTPS requests'
"proxy-ca-certs" :
action : "append"
help : "specify 1 or more CA certs (in a file)"
O :
alias : "no-gpg-options"
action : "storeTrue"
help : "disable the GPG options file for temporary keyring operations"
M :
alias : "merkle-checks"
help : "check that users' chains are reflected in sitewide state, one of {none,soft,strict}; soft by default"
#-------------------
use_config : () -> true
use_session : () -> false
use_db : () -> true
use_gpg : () -> true
config_opts : () -> {}
needs_configuration : () -> false
#-------------------
make_outfile : (cb) ->
await Outfile.open { target : @output_filename() }, defer err, file
cb err, file
#-------------------
_init_pwmgr : () ->
pwopts =
password : <PASSWORD>()
salt : @salt_or_email()
interactive : @argv.interactive
@pwmgr.init pwopts
#-------------------
password : () -> pick @argv.password, @config.password()
#----------
assertions : (cb) ->
esc = make_esc cb, "Base::assertions"
await @assert_configured esc defer()
cb null
#----------
assert_configured : (cb) ->
err = null
if @needs_configuration() and not(env().is_configured())
err = new E.NotConfiguredError "you're not logged in. Please run `keybase login` or `keybase join`"
cb err
#=========================================================================
| true | log = require '../log'
{PasswordManager} = require '../pw'
{base58} = require '../basex'
crypto = require 'crypto'
myfs = require '../fs'
fs = require 'fs'
{rmkey} = require '../util'
{add_option_dict} = require './argparse'
{Infile, Outfile} = require '../file'
{EscOk} = require 'iced-error'
{E} = require '../err'
{constants} = require '../constants'
{join} = require 'path'
FN = constants.filenames
SRV = constants.server
SC = constants.security
triplesec = require 'triplesec'
req = require '../req'
{env} = require '../env'
{make_esc} = require 'iced-error'
#=========================================================================
pick = (args...) ->
for a in args
return a if a?
return null
#=========================================================================
exports.Base = class Base
#-------------------
constructor : (@parent) ->
@batch = false # We sometimes turn this on if we're reading from stdin
#-------------------
set_argv : (a) ->
@argv = a
return null
#-------------------
@OPTS :
p :
alias : 'passhrase'
help : 'passphrase used to log into keybase'
c :
alias : 'config'
help : "a configuration file (#{join '~', FN.config_dir, FN.config_file})"
i :
alias : "interactive"
action : "storeTrue"
help : "interactive mode"
d:
alias: "debug"
action : "storeTrue"
help : "debug mode"
q:
alias : "quiet"
action : "storeTrue"
help : "quiet mode; only show errors, suppress info and warnings"
C:
alias : "no-color"
action : "storeTrue"
help : "disable logging colors"
port :
help : 'which port to connect to'
"no-tls" :
action : "storeTrue"
help : "turn off HTTPS/TLS (on by default)"
"host" :
help : 'which host to connect to'
"api-uri-prefix" :
help : "the API prefix to use (#{SRV.api_uri_prefix})"
B :
alias : "batch"
action : "storeTrue"
help : "batch mode; disable all prompts"
"preserve-tmp-keyring" :
action : "storeTrue"
help : "preserve the temporary keyring; don't clean it up"
"homedir" :
help : "specify a non-standard home directory; look for GPG keychain there"
g :
alias : "gpg"
help : "specify an alternate gpg command"
x :
alias : 'proxy'
help : 'specify a proxy server to all HTTPS requests'
"proxy-ca-certs" :
action : "append"
help : "specify 1 or more CA certs (in a file)"
O :
alias : "no-gpg-options"
action : "storeTrue"
help : "disable the GPG options file for temporary keyring operations"
M :
alias : "merkle-checks"
help : "check that users' chains are reflected in sitewide state, one of {none,soft,strict}; soft by default"
#-------------------
use_config : () -> true
use_session : () -> false
use_db : () -> true
use_gpg : () -> true
config_opts : () -> {}
needs_configuration : () -> false
#-------------------
make_outfile : (cb) ->
await Outfile.open { target : @output_filename() }, defer err, file
cb err, file
#-------------------
_init_pwmgr : () ->
pwopts =
password : PI:PASSWORD:<PASSWORD>END_PI()
salt : @salt_or_email()
interactive : @argv.interactive
@pwmgr.init pwopts
#-------------------
password : () -> pick @argv.password, @config.password()
#----------
assertions : (cb) ->
esc = make_esc cb, "Base::assertions"
await @assert_configured esc defer()
cb null
#----------
assert_configured : (cb) ->
err = null
if @needs_configuration() and not(env().is_configured())
err = new E.NotConfiguredError "you're not logged in. Please run `keybase login` or `keybase join`"
cb err
#=========================================================================
|
[
{
"context": " exist', ->\n records = [\n { id: 1, name: 'Nikki' }\n { id: 2, name: 'John' }\n ]\n\n stubR",
"end": 281,
"score": 0.9997453093528748,
"start": 276,
"tag": "NAME",
"value": "Nikki"
},
{
"context": " { id: 1, name: 'Nikki' }\n { id: 2, name: 'J... | spec/crud/where.coffee | EduardoluizSanBlasDeveloper0/ROR-crud-example | 506 | describe '#where', ->
it 'should return zero initial records', ->
stubResponse success: true, records: [], ->
User.where().then (users) ->
expect(users).to.eql([])
it 'should return all records when they exist', ->
records = [
{ id: 1, name: 'Nikki' }
{ id: 2, name: 'John' }
]
stubResponse success: true, records: records, ->
User.where().then (users) ->
expect(users).to.eql(records)
| 184872 | describe '#where', ->
it 'should return zero initial records', ->
stubResponse success: true, records: [], ->
User.where().then (users) ->
expect(users).to.eql([])
it 'should return all records when they exist', ->
records = [
{ id: 1, name: '<NAME>' }
{ id: 2, name: '<NAME>' }
]
stubResponse success: true, records: records, ->
User.where().then (users) ->
expect(users).to.eql(records)
| true | describe '#where', ->
it 'should return zero initial records', ->
stubResponse success: true, records: [], ->
User.where().then (users) ->
expect(users).to.eql([])
it 'should return all records when they exist', ->
records = [
{ id: 1, name: 'PI:NAME:<NAME>END_PI' }
{ id: 2, name: 'PI:NAME:<NAME>END_PI' }
]
stubResponse success: true, records: records, ->
User.where().then (users) ->
expect(users).to.eql(records)
|
[
{
"context": "lled_trades\n position: pos \n dealer: name\n open_positions: open_positions[name]\n ",
"end": 11494,
"score": 0.9924793243408203,
"start": 11490,
"tag": "NAME",
"value": "name"
},
{
"context": "ce idx, 1 \n\n if !config.simulation\n pos.key... | pusher.coffee | invisible-college/meth | 7 | require './shared'
global.feature_engine = require './feature_engine'
exchange = require './exchange'
global.dealers = {}
global.open_positions = {}
global.log_error = (halt, data) ->
if !config.simulation
errors = bus.fetch 'errors'
errors.logs ||= []
error = [tick.time, JSON.stringify(data), new Error().stack]
errors.logs.push error
bus.save errors
if config.mailgun
try
mailgun = require('mailgun-js')
apiKey: config.mailgun.apiKey
domain: config.mailgun.domain
send_email = ({subject, message, recipient}) ->
mailgun.messages().send
from: config.mailgun.from
to: recipient or config.mailgun.recipient
subject: subject
text: message
catch e
send_email = ->
console.error 'could not send message beecause mailgun failed to load'
send_email
subject: "Error for #{config.exchange} #{config.c1}-#{config.c2} at #{tick.time}"
message: """
Data:
#{error[1]}
Trace:
#{error[2]}
"""
if halt
bus.save {key: 'time_to_die', now: true}
console.error "Halting because of:", data
else
console.error data
learn_strategy = (name, teacher, strat_dealers) ->
console.assert uniq(strat_dealers), {message: 'dealers aren\'t unique!', dealers: strat_dealers}
operation = from_cache 'operation'
operation[name] =
key: name
dealers: []
save operation
strategy = operation[name]
needs_save = false
# name dealer names more space efficiently :)
names = {}
params = {}
for dealer_conf,idx in strat_dealers
for k,v of dealer_conf
params[k] ||= {}
params[k][v] = 1
differentiating_params = {}
for k,v of params
if Object.keys(v).length > 1
differentiating_params[k] = 1
######
for dealer_conf,idx in strat_dealers
dealer_conf = defaults dealer_conf,
max_open_positions: 9999999
if dealer_conf.max_open_positions > 1 && !dealer_conf.cooloff_period?
dealer_conf.cooloff_period = 4 * 60
dealer = teacher dealer_conf
name = get_name strategy.key, dealer_conf, differentiating_params
if name == strategy.key
name = "#{strategy.key}-dealer"
key = name
dealer_data = fetch key
if !dealer_data.positions # initialize
dealer_data = extend dealer_data,
parent: '/' + strategy.key
positions: []
dealer_data.settings = dealer_conf
console.assert dealer_data.settings.series || dealer.eval_unfilled_trades?,
message: 'Dealer has no eval_unfilled_trades method defined'
dealer: name
dealers[key] = dealer
if ('/' + key) not in strategy.dealers
strategy.dealers.push '/' + key
needs_save = true
save dealer_data
if needs_save
save strategy
init = ({history, clear_all_positions, take_position, cancel_unfilled, update_trade}) ->
global.history = history
for name in get_dealers()
dealer_data = from_cache name
save dealer_data
reset_open()
clear_positions() if clear_all_positions
pusher.last_checked_new_position = {}
pusher.last_checked_exit = {}
pusher.last_checked_unfilled = {}
tick_interval = null
tick_interval_no_unfilled = null
for name in get_all_actors()
dealer_data = from_cache name
dealer = dealers[name]
has_open_positions = open_positions[name]?.length > 0
# create a feature engine that will generate features with trades quantized
# by resolution seconds
for d in dealer.dependencies
resolution = d[0]
engine = feature_engine.create resolution
engine.subscribe_dealer name, dealer, dealer_data
dealer.features ?= {}
dealer.features[resolution] = engine
pusher.last_checked_new_position[name] = 0
pusher.last_checked_exit[name] = 0
pusher.last_checked_unfilled[name] = 0
intervals = [
dealer_data.settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
]
if !dealer_data.settings.never_exits
intervals.push dealer_data.settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if !dealer_data.settings.series
intervals.push dealer_data.settings.eval_unfilled_every_n_seconds or config.eval_unfilled_every_n_seconds
if !tick_interval
tick_interval = intervals[0]
intervals.push tick_interval
tick_interval = Math.greatest_common_divisor intervals
if !dealer_data.settings.series
intervals = [
dealer_data.settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
]
if !dealer_data.settings.never_exits
intervals.push dealer_data.settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if !tick_interval_no_unfilled
tick_interval_no_unfilled = intervals[0]
intervals.push tick_interval_no_unfilled
tick_interval_no_unfilled = Math.greatest_common_divisor intervals
history_buffer = (e.num_frames * res + 1 for res, e of feature_engine.resolutions)
history_buffer = Math.max.apply null, history_buffer
history.set_longest_requested_history history_buffer
pusher.take_position = take_position if take_position
pusher.cancel_unfilled = cancel_unfilled if cancel_unfilled
pusher.update_trade = update_trade if update_trade
pusher.tick_interval = tick_interval
pusher.tick_interval_no_unfilled = tick_interval_no_unfilled
clear_positions = ->
for name in get_all_actors()
dealer = fetch(name)
dealer.positions = []
save dealer
reset_open = ->
global.open_positions = {}
for name in get_dealers()
dealer = fetch(name)
open_positions[name] = (p for p in (dealer.positions or []) when !p.closed)
find_opportunities = (balance) ->
console.assert tick?.time?,
message: "tick.time is not defined!"
tick: tick
# identify new positions and changes to old positions (opportunities)
opportunities = []
all_actors = get_all_actors()
################################
## 1. Identify new opportunities
for name in all_actors
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
eval_entry_every_n_seconds = settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
if tick.time - pusher.last_checked_new_position[name] < eval_entry_every_n_seconds
continue
if dealer_data.locked
locked_for = tick.time - dealer_data.locked
console.log "Skipping #{name} because it is locked (locked for #{locked_for}s)"
continue
pusher.last_checked_new_position[name] = tick.time
zzz = Date.now()
# A strategy can't have too many positions on the books at once...
if settings.series || settings.never_exits || open_positions[name]?.length < settings.max_open_positions
yyy = Date.now()
spec = dealer.eval_whether_to_enter_new_position
dealer: name
open_positions: open_positions[name]
balance: balance
t_.eval_pos += Date.now() - yyy if t_?
#yyy = Date.now()
if spec
position = create_position spec, name
#t_.create_pos += Date.now() - yyy if t_?
yyy = Date.now()
valid = position && is_valid_position(position)
found_match = false
if valid
sell = if position.entry.type == 'sell' then position.entry else position.exit
buy = if position.entry.type == 'buy' then position.entry else position.exit
opportunities.push
pos: position
action: 'create'
required_c2: if sell then sell.amount
required_c1: if buy then buy.amount * buy.rate
t_.check_new += Date.now() - zzz if t_?
continue if dealer_data.settings.series
##########################################
## 2. Handle positions that haven't exited
for name in all_actors when open_positions[name]?.length > 0
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
continue if settings.series || settings.never_exits
eval_exit_every_n_seconds = settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if tick.time - pusher.last_checked_exit[name] < eval_exit_every_n_seconds
continue
if dealer_data.locked
locked_for = tick.time - dealer_data.locked
if locked_for > 60 * 60 && locked_for < 68 * 60
log_error false, {message: "#{name} locked an excessive period of time.", dealer_data, locked_for}
continue
pusher.last_checked_exit[name] = tick.time
# see if any open positions want to exit
yyy = Date.now()
for pos in open_positions[name] when !pos.series_data && !pos.exit
opportunity = dealer.eval_whether_to_exit_position
position: pos
dealer: name
open_positions: open_positions[name]
balance: balance
if opportunity
if opportunity.action == 'exit'
type = if pos.entry.type == 'buy' then 'sell' else 'buy'
if type == 'sell'
amt = opportunity.amount or pos.entry.amount
if amt < exchange.minimum_order_size()
opportunity.amount = 1.02 * exchange.minimum_order_size()
#log_error false, {message: 'resizing order so it is above minimum', opportunity, amt}
opportunity.required_c2 = opportunity.amount or pos.entry.amount
else
total = (opportunity.amount or pos.entry.amount) * opportunity.rate
if total < exchange.minimum_order_size(config.c1)
opportunity.amount = 1.02 * exchange.minimum_order_size(config.c1) / opportunity.rate
#log_error false, {message: 'resizing order so it is above minimum', opportunity, total}
opportunity.required_c1 = (opportunity.amount or pos.entry.amount) * opportunity.rate
opportunity.pos = pos
opportunities.push opportunity
t_.check_exit += Date.now() - yyy if t_?
##########################################
## 3. Handle unfilled orders
for name in all_actors when open_positions[name]?.length > 0
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
continue if settings.series
eval_unfilled_every_n_seconds = settings.eval_unfilled_every_n_seconds or config.eval_unfilled_every_n_seconds
if tick.time - pusher.last_checked_unfilled[name] < eval_unfilled_every_n_seconds
continue
pusher.last_checked_unfilled[name] = tick.time
yyy = Date.now()
for pos in open_positions[name] when (pos.entry && !pos.entry.closed) || \
(pos.exit && !pos.exit.closed)
unfilled = if pos.entry.closed then pos.exit else pos.entry
if !config.simulation
continue if dealer_data.locked
continue if config.exchange == 'gdax' && \
unfilled.flags?.order_method > 1 && \
unfilled.latest_order > tick.started && \ # make sure trades don't get stuck if there's a restart
unfilled.current_order
# because we're in the midst of a dynamically-updating order
opportunity = dealer.eval_unfilled_trades
position: pos
dealer: name
open_positions: open_positions[name]
balance: balance
if opportunity
opportunity.pos = pos
opportunities.push opportunity
t_.check_unfilled += Date.now() - yyy if t_?
if !uniq(opportunities)
msg =
message: 'Duplicate opportunities'
opportunities: opportunities
for opp,idx in opportunities
msg["pos-#{idx}"] = opp.pos
log_error true, msg
return []
opportunities
execute_opportunities = (opportunities) ->
return if !opportunities || opportunities.length == 0
for opportunity in opportunities
pos = opportunity.pos
switch opportunity.action
when 'create'
take_position pos
when 'cancel_unfilled'
cancel_unfilled pos
when 'exit'
exit_position {pos, opportunity}
take_position pos
when 'update_exit'
update_trade {pos, trade: pos.exit, opportunity}
when 'update_entry'
update_trade {pos, trade: pos.entry, opportunity}
check_wallet = (opportunities, balance) ->
doable = []
required_c1 = required_c2 = 0
by_dealer = {}
ops_cnt = 0
for opportunity in opportunities
if (!opportunity.required_c2 && !opportunity.required_c1) || opportunity.pos.series_data
doable.push opportunity
continue
by_dealer[opportunity.pos.dealer] ||= []
by_dealer[opportunity.pos.dealer].push opportunity
ops_cnt += 1
return doable if ops_cnt == 0
all_avail_BTC = balance.balances.c1
all_avail_ETH = balance.balances.c2
for name, ops of by_dealer
dbalance = balance[name].balances
avail_BTC = dbalance.c1
avail_ETH = dbalance.c2
# console.log {message: 'yo', dealer: name, avail_ETH, avail_BTC, dealer_opportunities: by_dealer[name], balance: dbalance}
if avail_BTC < 0 || avail_ETH < 0
out = if config.simulation then console.log else console.log
out false, {message: 'negative balance', dealer: name, avail_ETH, avail_BTC, dealer_opportunities: by_dealer[name], balance: dbalance}
continue
for op in ops
r_ETH = op.required_c2 or 0
r_BTC = op.required_c1 or 0
if avail_ETH >= r_ETH && avail_BTC >= r_BTC && all_avail_ETH >= r_ETH && all_avail_BTC >= r_BTC
doable.push op
avail_ETH -= r_ETH
avail_BTC -= r_BTC
all_avail_ETH -= r_ETH
all_avail_BTC -= r_BTC
doable
create_position = (spec, dealer) ->
return null if !spec
buy = spec.buy
sell = spec.sell
spec.buy = undefined; spec.sell = undefined
simultaneous = buy && sell
if !buy?.entry && !sell?.entry
if simultaneous || buy
buy.entry = true
else
sell.entry = true
if config.exchange == 'gdax'
for trade in [buy, sell] when trade
trade.rate = parseFloat trade.rate.toFixed(exchange.minimum_rate_precision())
trade.amount = parseFloat((Math.floor(trade.amount * 1000000) / 1000000).toFixed(6))
if buy
buy.type = 'buy'
buy.to_fill ||= if buy.flags?.market then buy.amount * buy.rate else buy.amount
if sell
sell.type = 'sell'
sell.to_fill ||= sell.amount
position = extend {}, spec,
key: if !config.simulation then "position/#{dealer}-#{tick.time}"
dealer: dealer
created: tick.time
entry: if buy?.entry then buy else sell
exit: if simultaneous then (if sell.entry then buy else sell)
for trade in [position.entry, position.exit] when trade
defaults trade,
created: tick.time
fills: []
original_rate: trade.rate
original_amt: trade.amount
position
exit_position = ({pos, opportunity}) ->
if !pos.entry
log_error false,
message: 'position can\'t be exited because entry is undefined'
pos: pos
opportunity: opportunity
return
rate = opportunity.rate
market_trade = opportunity.flags?.market
amount = opportunity.amount or pos.entry.amount
type = if pos.entry.type == 'buy' then 'sell' else 'buy'
trade = pos.exit =
amount: amount
type: type
rate: rate
to_fill: if market_trade && type == 'buy' then amount * rate else amount
flags: if opportunity.flags? then opportunity.flags
entry: false
created: tick.time
fills: []
original_rate: rate
original_amt: amount
if config.exchange == 'gdax'
trade.rate = parseFloat trade.rate.toFixed(exchange.minimum_rate_precision())
trade.amount = parseFloat((Math.floor(trade.amount * 1000000) / 1000000).toFixed(6))
pos
# Executes the position entry and exit, if they exist and it hasn't already been done.
take_position = (pos) ->
if pusher.take_position && !pos.series_data
pusher.take_position pos, (error, trades_left) ->
took_position(pos, error, trades_left == 0)
else
took_position pos
took_position = (pos, error, destroy_if_all_errored) ->
if !pos.series_data && config.log_level > 1
console.log "TOOK POSITION", pos, {error, destroy_if_all_errored}
if error && !config.simulation
for trade in ['entry', 'exit'] when pos[trade]
if !pos[trade].current_order
if !(pos[trade].orders?.length > 0)
pos[trade] = undefined
else
console.error
message: "We've partially filled a trade, but an update order occurred. I think Pusher will handle this properly though :p"
pos: pos
if !pos.exit && !pos.entry && destroy_if_all_errored
return destroy_position pos
bus.save pos if pos.key
if !from_cache(pos.dealer).positions
throw "#{pos.dealer} not properly initialized with positions"
if from_cache(pos.dealer).positions.indexOf(pos) == -1
from_cache(pos.dealer).positions.push pos
if !pos.series_data
open_positions[pos.dealer].push pos
update_trade = ({pos, trade, opportunity}) ->
rate = opportunity.rate
if config.exchange == 'gdax'
rate = parseFloat rate.toFixed(exchange.minimum_rate_precision())
if isNaN(rate) || !rate || rate == 0
return log_error false,
message: 'Bad rate for updating trade!',
rate: rate
pos: pos
if trade.to_fill == 0
return log_error false,
message: 'trying to move a trade that should already be closed'
pos: pos
trade: trade
fills: trade.fills
if trade.flags?.market
return log_error true,
message: 'trying to move a market exit'
trade: trade
if trade.type == 'buy'
# we need to adjust the *amount* we're buying because our buying power has changed
amt_purchased = 0
for f in (trade.fills or [])
amt_purchased += f.amount
amt_remaining = (trade.original_amt or trade.amount) - amt_purchased
total_remaining = amt_remaining * (trade.original_rate or trade.rate)
dbalance = from_cache('balances')[pos.dealer]
total_available = dbalance.balances.c1 + dbalance.on_order.c1
dealer = from_cache(pos.dealer)
total_amt_purchased = 0
total_amt_sold = 0
for pos in (dealer.positions or [])
for t in [pos.entry, pos.exit] when t
for fill in (t.fills or [])
if t.type == 'buy'
total_amt_purchased += fill.amount
else
total_amt_sold += fill.amount
total_diff = total_amt_purchased - total_amt_sold
if total_remaining > .99 * total_available
# console.error 'total remaining was too much!', {dealer: pos.dealer, total_diff, amt_remaining, amt_purchased, total_available, total_remaining, orig_amt: trade.original_amt, orig_rate: trade.original_rate, rate, balance: from_cache('balances')[pos.dealer]}
total_remaining = .99 * total_available
new_amount = total_remaining / rate
else
new_amount = trade.to_fill
if config.exchange == 'gdax'
new_amount = parseFloat((Math.floor(new_amount * 1000000) / 1000000).toFixed(6))
trade.rate = rate
if trade.type == 'buy'
trade.amount = new_amount + amt_purchased
trade.to_fill = new_amount
if pusher.update_trade
pusher.update_trade
pos: pos
trade: trade # orphanable
rate: rate
amount: trade.to_fill
cancel_unfilled = (pos) ->
if pusher.cancel_unfilled
dealer = from_cache(pos.dealer)
dealer.locked = tick.time
bus.save dealer
pusher.cancel_unfilled pos, ->
dealer.locked = false
bus.save dealer
canceled_unfilled pos
else
canceled_unfilled pos
# make all meta data updates after trades have potentially been canceled.
canceled_unfilled = (pos) ->
for trade in ['exit', 'entry'] when !pos[trade]?.closed
if pos[trade] && !pos[trade].current_order
if pos[trade].fills.length == 0
pos[trade] = undefined
if pos.exit && !pos.entry
pos.entry = pos.exit
pos.exit = undefined
else if !pos.entry && !pos.exit
destroy_position pos
destroy_position = (pos) ->
positions = from_cache(pos.dealer).positions
open = open_positions[pos.dealer]
if !config.simulation && !pos.key
return log_error true, {message: 'trying to destroy a position without a key', pos: pos}
idx = positions.indexOf(pos)
if idx == -1
console.log "COULD NOT DESTROY position #{pos.key}...not found in positions", pos
else
positions.splice idx, 1
idx = open.indexOf(pos)
if idx == -1
console.log 'CANT DESTROY POSITION THAT ISNT IN OPEN POSITIONS'
else
open.splice idx, 1
if !config.simulation
pos.key = undefined
################
# Conditions for whether to open a position:
#
# automatically applied:
# - cool off period
# - trade closeness
# - max open positions
# automatically applied if entry & exit specified immediately:
# - profit threshold
LOG_REASONS = false
is_valid_position = (pos) ->
return false if !pos
return true if pos.series_data
settings = get_settings(pos.dealer)
if LOG_REASONS
failure_reasons = []
entry = pos.entry
exit = pos.exit
# Non-zero rates
if (entry && (entry.rate == 0 or isNaN(entry.rate))) || (exit && (exit.rate == 0 or isNaN(exit.rate)))
# console.log "Can't have a zero rate"
return false if !LOG_REASONS
failure_reasons.push "Can't have a zero rate"
if entry.amount <= exchange.minimum_order_size() || exit?.amount <= exchange.minimum_order_size()
return false if !LOG_REASONS
failure_reasons.push "Can't have a negative amount"
# A strategy can't have too many positions on the books at once...
if !settings.never_exits && open_positions[pos.dealer].length > settings.max_open_positions - 1
# console.log "#{settings.max_open_positions} POSITIONS ALREADY ON BOOKS"
return false if !LOG_REASONS
failure_reasons.push "#{settings.max_open_positions} POSITIONS ALREADY ON BOOKS"
# Space positions from the same strategy out
position_set = if settings.never_exits then from_cache(pos.dealer).positions else open_positions[pos.dealer]
for other_pos in position_set
if tick.time - other_pos.created < settings.cooloff_period
# console.log "TOO MANY POSITIONS IN LAST #{settings.cooloff_period} SECONDS"
return false if !LOG_REASONS
failure_reasons.push "TOO MANY POSITIONS IN LAST #{settings.cooloff_period} SECONDS"
break
if settings.alternating_types
buys = sells = 0
for other_pos in open_positions[pos.dealer]
if other_entry.type == 'buy'
buys++
else
sells++
if (entry.type == 'buy' && buys > sells) || (entry.type == 'sell' && sells > buys)
return false if !LOG_REASONS
failure_reasons.push "Positions need to alternate"
return true #failure_reasons.length == 0
global.close_trade = (pos, trade) ->
amount = total = 0
c1_fees = c2_fees = 0
last = null
for fill in (trade.fills or [])
total += fill.total
amount += fill.amount
if trade.type == 'buy' && config.exchange == 'poloniex'
c2_fees += fill.fee
else
c1_fees += fill.fee
if fill.date? && fill.date > last
last = fill.date
extend trade, {amount, total, c1_fees, c2_fees}
trade.to_fill = 0
trade.closed = last or tick.time
trade.rate = total / amount
if trade.original_rate?
original_rate = trade.original_rate
original_amt = trade.original_amt or trade.amount
rate_before_fee = total / amount
rate_after_fee = (total - c1_fees) / (amount - c2_fees)
slipped = Math.abs( original_rate - rate_before_fee ) / original_rate
slipped_amt = slipped * original_amt
overhead = Math.abs(original_rate - rate_after_fee) / original_rate
overhead_amt = original_amt * overhead
extend trade, {slipped, slipped_amt, overhead, overhead_amt}
action_priorities =
create: 0
exit: 1
update_exit: 2
cancel_unfilled: 3
hustle = (balance) ->
yyy = Date.now()
opportunities = find_opportunities balance
t_.hustle += Date.now() - yyy if t_?
if opportunities.length > 0
yyy = Date.now()
# prioritize exits & cancelations over new positions
opportunities.sort (a,b) -> action_priorities[b.action] - action_priorities[a.action]
if config.enforce_balance
fillable_opportunities = check_wallet opportunities, balance
else
fillable_opportunities = opportunities
# if fillable_opportunities.length != opportunities.length
# console.log "Slimmed opportunities from #{opportunities.length} to #{fillable_opportunities.length}"
# # for op in opportunities
# # if op not in fillable_opportunities
# # console.log 'ELIMINATED:', op.pos.dealer
if fillable_opportunities.length > 0 && !config.disabled && !global.no_new_orders
execute_opportunities fillable_opportunities
t_.exec += Date.now() - yyy if t_?
pusher = module.exports = {init, hustle, learn_strategy, destroy_position, reset_open}
| 141723 | require './shared'
global.feature_engine = require './feature_engine'
exchange = require './exchange'
global.dealers = {}
global.open_positions = {}
global.log_error = (halt, data) ->
if !config.simulation
errors = bus.fetch 'errors'
errors.logs ||= []
error = [tick.time, JSON.stringify(data), new Error().stack]
errors.logs.push error
bus.save errors
if config.mailgun
try
mailgun = require('mailgun-js')
apiKey: config.mailgun.apiKey
domain: config.mailgun.domain
send_email = ({subject, message, recipient}) ->
mailgun.messages().send
from: config.mailgun.from
to: recipient or config.mailgun.recipient
subject: subject
text: message
catch e
send_email = ->
console.error 'could not send message beecause mailgun failed to load'
send_email
subject: "Error for #{config.exchange} #{config.c1}-#{config.c2} at #{tick.time}"
message: """
Data:
#{error[1]}
Trace:
#{error[2]}
"""
if halt
bus.save {key: 'time_to_die', now: true}
console.error "Halting because of:", data
else
console.error data
learn_strategy = (name, teacher, strat_dealers) ->
console.assert uniq(strat_dealers), {message: 'dealers aren\'t unique!', dealers: strat_dealers}
operation = from_cache 'operation'
operation[name] =
key: name
dealers: []
save operation
strategy = operation[name]
needs_save = false
# name dealer names more space efficiently :)
names = {}
params = {}
for dealer_conf,idx in strat_dealers
for k,v of dealer_conf
params[k] ||= {}
params[k][v] = 1
differentiating_params = {}
for k,v of params
if Object.keys(v).length > 1
differentiating_params[k] = 1
######
for dealer_conf,idx in strat_dealers
dealer_conf = defaults dealer_conf,
max_open_positions: 9999999
if dealer_conf.max_open_positions > 1 && !dealer_conf.cooloff_period?
dealer_conf.cooloff_period = 4 * 60
dealer = teacher dealer_conf
name = get_name strategy.key, dealer_conf, differentiating_params
if name == strategy.key
name = "#{strategy.key}-dealer"
key = name
dealer_data = fetch key
if !dealer_data.positions # initialize
dealer_data = extend dealer_data,
parent: '/' + strategy.key
positions: []
dealer_data.settings = dealer_conf
console.assert dealer_data.settings.series || dealer.eval_unfilled_trades?,
message: 'Dealer has no eval_unfilled_trades method defined'
dealer: name
dealers[key] = dealer
if ('/' + key) not in strategy.dealers
strategy.dealers.push '/' + key
needs_save = true
save dealer_data
if needs_save
save strategy
init = ({history, clear_all_positions, take_position, cancel_unfilled, update_trade}) ->
global.history = history
for name in get_dealers()
dealer_data = from_cache name
save dealer_data
reset_open()
clear_positions() if clear_all_positions
pusher.last_checked_new_position = {}
pusher.last_checked_exit = {}
pusher.last_checked_unfilled = {}
tick_interval = null
tick_interval_no_unfilled = null
for name in get_all_actors()
dealer_data = from_cache name
dealer = dealers[name]
has_open_positions = open_positions[name]?.length > 0
# create a feature engine that will generate features with trades quantized
# by resolution seconds
for d in dealer.dependencies
resolution = d[0]
engine = feature_engine.create resolution
engine.subscribe_dealer name, dealer, dealer_data
dealer.features ?= {}
dealer.features[resolution] = engine
pusher.last_checked_new_position[name] = 0
pusher.last_checked_exit[name] = 0
pusher.last_checked_unfilled[name] = 0
intervals = [
dealer_data.settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
]
if !dealer_data.settings.never_exits
intervals.push dealer_data.settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if !dealer_data.settings.series
intervals.push dealer_data.settings.eval_unfilled_every_n_seconds or config.eval_unfilled_every_n_seconds
if !tick_interval
tick_interval = intervals[0]
intervals.push tick_interval
tick_interval = Math.greatest_common_divisor intervals
if !dealer_data.settings.series
intervals = [
dealer_data.settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
]
if !dealer_data.settings.never_exits
intervals.push dealer_data.settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if !tick_interval_no_unfilled
tick_interval_no_unfilled = intervals[0]
intervals.push tick_interval_no_unfilled
tick_interval_no_unfilled = Math.greatest_common_divisor intervals
history_buffer = (e.num_frames * res + 1 for res, e of feature_engine.resolutions)
history_buffer = Math.max.apply null, history_buffer
history.set_longest_requested_history history_buffer
pusher.take_position = take_position if take_position
pusher.cancel_unfilled = cancel_unfilled if cancel_unfilled
pusher.update_trade = update_trade if update_trade
pusher.tick_interval = tick_interval
pusher.tick_interval_no_unfilled = tick_interval_no_unfilled
clear_positions = ->
for name in get_all_actors()
dealer = fetch(name)
dealer.positions = []
save dealer
reset_open = ->
global.open_positions = {}
for name in get_dealers()
dealer = fetch(name)
open_positions[name] = (p for p in (dealer.positions or []) when !p.closed)
find_opportunities = (balance) ->
console.assert tick?.time?,
message: "tick.time is not defined!"
tick: tick
# identify new positions and changes to old positions (opportunities)
opportunities = []
all_actors = get_all_actors()
################################
## 1. Identify new opportunities
for name in all_actors
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
eval_entry_every_n_seconds = settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
if tick.time - pusher.last_checked_new_position[name] < eval_entry_every_n_seconds
continue
if dealer_data.locked
locked_for = tick.time - dealer_data.locked
console.log "Skipping #{name} because it is locked (locked for #{locked_for}s)"
continue
pusher.last_checked_new_position[name] = tick.time
zzz = Date.now()
# A strategy can't have too many positions on the books at once...
if settings.series || settings.never_exits || open_positions[name]?.length < settings.max_open_positions
yyy = Date.now()
spec = dealer.eval_whether_to_enter_new_position
dealer: name
open_positions: open_positions[name]
balance: balance
t_.eval_pos += Date.now() - yyy if t_?
#yyy = Date.now()
if spec
position = create_position spec, name
#t_.create_pos += Date.now() - yyy if t_?
yyy = Date.now()
valid = position && is_valid_position(position)
found_match = false
if valid
sell = if position.entry.type == 'sell' then position.entry else position.exit
buy = if position.entry.type == 'buy' then position.entry else position.exit
opportunities.push
pos: position
action: 'create'
required_c2: if sell then sell.amount
required_c1: if buy then buy.amount * buy.rate
t_.check_new += Date.now() - zzz if t_?
continue if dealer_data.settings.series
##########################################
## 2. Handle positions that haven't exited
for name in all_actors when open_positions[name]?.length > 0
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
continue if settings.series || settings.never_exits
eval_exit_every_n_seconds = settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if tick.time - pusher.last_checked_exit[name] < eval_exit_every_n_seconds
continue
if dealer_data.locked
locked_for = tick.time - dealer_data.locked
if locked_for > 60 * 60 && locked_for < 68 * 60
log_error false, {message: "#{name} locked an excessive period of time.", dealer_data, locked_for}
continue
pusher.last_checked_exit[name] = tick.time
# see if any open positions want to exit
yyy = Date.now()
for pos in open_positions[name] when !pos.series_data && !pos.exit
opportunity = dealer.eval_whether_to_exit_position
position: pos
dealer: name
open_positions: open_positions[name]
balance: balance
if opportunity
if opportunity.action == 'exit'
type = if pos.entry.type == 'buy' then 'sell' else 'buy'
if type == 'sell'
amt = opportunity.amount or pos.entry.amount
if amt < exchange.minimum_order_size()
opportunity.amount = 1.02 * exchange.minimum_order_size()
#log_error false, {message: 'resizing order so it is above minimum', opportunity, amt}
opportunity.required_c2 = opportunity.amount or pos.entry.amount
else
total = (opportunity.amount or pos.entry.amount) * opportunity.rate
if total < exchange.minimum_order_size(config.c1)
opportunity.amount = 1.02 * exchange.minimum_order_size(config.c1) / opportunity.rate
#log_error false, {message: 'resizing order so it is above minimum', opportunity, total}
opportunity.required_c1 = (opportunity.amount or pos.entry.amount) * opportunity.rate
opportunity.pos = pos
opportunities.push opportunity
t_.check_exit += Date.now() - yyy if t_?
##########################################
## 3. Handle unfilled orders
for name in all_actors when open_positions[name]?.length > 0
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
continue if settings.series
eval_unfilled_every_n_seconds = settings.eval_unfilled_every_n_seconds or config.eval_unfilled_every_n_seconds
if tick.time - pusher.last_checked_unfilled[name] < eval_unfilled_every_n_seconds
continue
pusher.last_checked_unfilled[name] = tick.time
yyy = Date.now()
for pos in open_positions[name] when (pos.entry && !pos.entry.closed) || \
(pos.exit && !pos.exit.closed)
unfilled = if pos.entry.closed then pos.exit else pos.entry
if !config.simulation
continue if dealer_data.locked
continue if config.exchange == 'gdax' && \
unfilled.flags?.order_method > 1 && \
unfilled.latest_order > tick.started && \ # make sure trades don't get stuck if there's a restart
unfilled.current_order
# because we're in the midst of a dynamically-updating order
opportunity = dealer.eval_unfilled_trades
position: pos
dealer: <NAME>
open_positions: open_positions[name]
balance: balance
if opportunity
opportunity.pos = pos
opportunities.push opportunity
t_.check_unfilled += Date.now() - yyy if t_?
if !uniq(opportunities)
msg =
message: 'Duplicate opportunities'
opportunities: opportunities
for opp,idx in opportunities
msg["pos-#{idx}"] = opp.pos
log_error true, msg
return []
opportunities
execute_opportunities = (opportunities) ->
return if !opportunities || opportunities.length == 0
for opportunity in opportunities
pos = opportunity.pos
switch opportunity.action
when 'create'
take_position pos
when 'cancel_unfilled'
cancel_unfilled pos
when 'exit'
exit_position {pos, opportunity}
take_position pos
when 'update_exit'
update_trade {pos, trade: pos.exit, opportunity}
when 'update_entry'
update_trade {pos, trade: pos.entry, opportunity}
check_wallet = (opportunities, balance) ->
doable = []
required_c1 = required_c2 = 0
by_dealer = {}
ops_cnt = 0
for opportunity in opportunities
if (!opportunity.required_c2 && !opportunity.required_c1) || opportunity.pos.series_data
doable.push opportunity
continue
by_dealer[opportunity.pos.dealer] ||= []
by_dealer[opportunity.pos.dealer].push opportunity
ops_cnt += 1
return doable if ops_cnt == 0
all_avail_BTC = balance.balances.c1
all_avail_ETH = balance.balances.c2
for name, ops of by_dealer
dbalance = balance[name].balances
avail_BTC = dbalance.c1
avail_ETH = dbalance.c2
# console.log {message: 'yo', dealer: name, avail_ETH, avail_BTC, dealer_opportunities: by_dealer[name], balance: dbalance}
if avail_BTC < 0 || avail_ETH < 0
out = if config.simulation then console.log else console.log
out false, {message: 'negative balance', dealer: name, avail_ETH, avail_BTC, dealer_opportunities: by_dealer[name], balance: dbalance}
continue
for op in ops
r_ETH = op.required_c2 or 0
r_BTC = op.required_c1 or 0
if avail_ETH >= r_ETH && avail_BTC >= r_BTC && all_avail_ETH >= r_ETH && all_avail_BTC >= r_BTC
doable.push op
avail_ETH -= r_ETH
avail_BTC -= r_BTC
all_avail_ETH -= r_ETH
all_avail_BTC -= r_BTC
doable
create_position = (spec, dealer) ->
return null if !spec
buy = spec.buy
sell = spec.sell
spec.buy = undefined; spec.sell = undefined
simultaneous = buy && sell
if !buy?.entry && !sell?.entry
if simultaneous || buy
buy.entry = true
else
sell.entry = true
if config.exchange == 'gdax'
for trade in [buy, sell] when trade
trade.rate = parseFloat trade.rate.toFixed(exchange.minimum_rate_precision())
trade.amount = parseFloat((Math.floor(trade.amount * 1000000) / 1000000).toFixed(6))
if buy
buy.type = 'buy'
buy.to_fill ||= if buy.flags?.market then buy.amount * buy.rate else buy.amount
if sell
sell.type = 'sell'
sell.to_fill ||= sell.amount
position = extend {}, spec,
key: if !config.simulation then "position/#{dealer}-#{tick.time}"
dealer: dealer
created: tick.time
entry: if buy?.entry then buy else sell
exit: if simultaneous then (if sell.entry then buy else sell)
for trade in [position.entry, position.exit] when trade
defaults trade,
created: tick.time
fills: []
original_rate: trade.rate
original_amt: trade.amount
position
exit_position = ({pos, opportunity}) ->
if !pos.entry
log_error false,
message: 'position can\'t be exited because entry is undefined'
pos: pos
opportunity: opportunity
return
rate = opportunity.rate
market_trade = opportunity.flags?.market
amount = opportunity.amount or pos.entry.amount
type = if pos.entry.type == 'buy' then 'sell' else 'buy'
trade = pos.exit =
amount: amount
type: type
rate: rate
to_fill: if market_trade && type == 'buy' then amount * rate else amount
flags: if opportunity.flags? then opportunity.flags
entry: false
created: tick.time
fills: []
original_rate: rate
original_amt: amount
if config.exchange == 'gdax'
trade.rate = parseFloat trade.rate.toFixed(exchange.minimum_rate_precision())
trade.amount = parseFloat((Math.floor(trade.amount * 1000000) / 1000000).toFixed(6))
pos
# Executes the position entry and exit, if they exist and it hasn't already been done.
take_position = (pos) ->
if pusher.take_position && !pos.series_data
pusher.take_position pos, (error, trades_left) ->
took_position(pos, error, trades_left == 0)
else
took_position pos
took_position = (pos, error, destroy_if_all_errored) ->
if !pos.series_data && config.log_level > 1
console.log "TOOK POSITION", pos, {error, destroy_if_all_errored}
if error && !config.simulation
for trade in ['entry', 'exit'] when pos[trade]
if !pos[trade].current_order
if !(pos[trade].orders?.length > 0)
pos[trade] = undefined
else
console.error
message: "We've partially filled a trade, but an update order occurred. I think Pusher will handle this properly though :p"
pos: pos
if !pos.exit && !pos.entry && destroy_if_all_errored
return destroy_position pos
bus.save pos if pos.key
if !from_cache(pos.dealer).positions
throw "#{pos.dealer} not properly initialized with positions"
if from_cache(pos.dealer).positions.indexOf(pos) == -1
from_cache(pos.dealer).positions.push pos
if !pos.series_data
open_positions[pos.dealer].push pos
update_trade = ({pos, trade, opportunity}) ->
rate = opportunity.rate
if config.exchange == 'gdax'
rate = parseFloat rate.toFixed(exchange.minimum_rate_precision())
if isNaN(rate) || !rate || rate == 0
return log_error false,
message: 'Bad rate for updating trade!',
rate: rate
pos: pos
if trade.to_fill == 0
return log_error false,
message: 'trying to move a trade that should already be closed'
pos: pos
trade: trade
fills: trade.fills
if trade.flags?.market
return log_error true,
message: 'trying to move a market exit'
trade: trade
if trade.type == 'buy'
# we need to adjust the *amount* we're buying because our buying power has changed
amt_purchased = 0
for f in (trade.fills or [])
amt_purchased += f.amount
amt_remaining = (trade.original_amt or trade.amount) - amt_purchased
total_remaining = amt_remaining * (trade.original_rate or trade.rate)
dbalance = from_cache('balances')[pos.dealer]
total_available = dbalance.balances.c1 + dbalance.on_order.c1
dealer = from_cache(pos.dealer)
total_amt_purchased = 0
total_amt_sold = 0
for pos in (dealer.positions or [])
for t in [pos.entry, pos.exit] when t
for fill in (t.fills or [])
if t.type == 'buy'
total_amt_purchased += fill.amount
else
total_amt_sold += fill.amount
total_diff = total_amt_purchased - total_amt_sold
if total_remaining > .99 * total_available
# console.error 'total remaining was too much!', {dealer: pos.dealer, total_diff, amt_remaining, amt_purchased, total_available, total_remaining, orig_amt: trade.original_amt, orig_rate: trade.original_rate, rate, balance: from_cache('balances')[pos.dealer]}
total_remaining = .99 * total_available
new_amount = total_remaining / rate
else
new_amount = trade.to_fill
if config.exchange == 'gdax'
new_amount = parseFloat((Math.floor(new_amount * 1000000) / 1000000).toFixed(6))
trade.rate = rate
if trade.type == 'buy'
trade.amount = new_amount + amt_purchased
trade.to_fill = new_amount
if pusher.update_trade
pusher.update_trade
pos: pos
trade: trade # orphanable
rate: rate
amount: trade.to_fill
cancel_unfilled = (pos) ->
if pusher.cancel_unfilled
dealer = from_cache(pos.dealer)
dealer.locked = tick.time
bus.save dealer
pusher.cancel_unfilled pos, ->
dealer.locked = false
bus.save dealer
canceled_unfilled pos
else
canceled_unfilled pos
# make all meta data updates after trades have potentially been canceled.
canceled_unfilled = (pos) ->
for trade in ['exit', 'entry'] when !pos[trade]?.closed
if pos[trade] && !pos[trade].current_order
if pos[trade].fills.length == 0
pos[trade] = undefined
if pos.exit && !pos.entry
pos.entry = pos.exit
pos.exit = undefined
else if !pos.entry && !pos.exit
destroy_position pos
destroy_position = (pos) ->
positions = from_cache(pos.dealer).positions
open = open_positions[pos.dealer]
if !config.simulation && !pos.key
return log_error true, {message: 'trying to destroy a position without a key', pos: pos}
idx = positions.indexOf(pos)
if idx == -1
console.log "COULD NOT DESTROY position #{pos.key}...not found in positions", pos
else
positions.splice idx, 1
idx = open.indexOf(pos)
if idx == -1
console.log 'CANT DESTROY POSITION THAT ISNT IN OPEN POSITIONS'
else
open.splice idx, 1
if !config.simulation
pos.key = <KEY>
################
# Conditions for whether to open a position:
#
# automatically applied:
# - cool off period
# - trade closeness
# - max open positions
# automatically applied if entry & exit specified immediately:
# - profit threshold
LOG_REASONS = false
is_valid_position = (pos) ->
return false if !pos
return true if pos.series_data
settings = get_settings(pos.dealer)
if LOG_REASONS
failure_reasons = []
entry = pos.entry
exit = pos.exit
# Non-zero rates
if (entry && (entry.rate == 0 or isNaN(entry.rate))) || (exit && (exit.rate == 0 or isNaN(exit.rate)))
# console.log "Can't have a zero rate"
return false if !LOG_REASONS
failure_reasons.push "Can't have a zero rate"
if entry.amount <= exchange.minimum_order_size() || exit?.amount <= exchange.minimum_order_size()
return false if !LOG_REASONS
failure_reasons.push "Can't have a negative amount"
# A strategy can't have too many positions on the books at once...
if !settings.never_exits && open_positions[pos.dealer].length > settings.max_open_positions - 1
# console.log "#{settings.max_open_positions} POSITIONS ALREADY ON BOOKS"
return false if !LOG_REASONS
failure_reasons.push "#{settings.max_open_positions} POSITIONS ALREADY ON BOOKS"
# Space positions from the same strategy out
position_set = if settings.never_exits then from_cache(pos.dealer).positions else open_positions[pos.dealer]
for other_pos in position_set
if tick.time - other_pos.created < settings.cooloff_period
# console.log "TOO MANY POSITIONS IN LAST #{settings.cooloff_period} SECONDS"
return false if !LOG_REASONS
failure_reasons.push "TOO MANY POSITIONS IN LAST #{settings.cooloff_period} SECONDS"
break
if settings.alternating_types
buys = sells = 0
for other_pos in open_positions[pos.dealer]
if other_entry.type == 'buy'
buys++
else
sells++
if (entry.type == 'buy' && buys > sells) || (entry.type == 'sell' && sells > buys)
return false if !LOG_REASONS
failure_reasons.push "Positions need to alternate"
return true #failure_reasons.length == 0
global.close_trade = (pos, trade) ->
amount = total = 0
c1_fees = c2_fees = 0
last = null
for fill in (trade.fills or [])
total += fill.total
amount += fill.amount
if trade.type == 'buy' && config.exchange == 'poloniex'
c2_fees += fill.fee
else
c1_fees += fill.fee
if fill.date? && fill.date > last
last = fill.date
extend trade, {amount, total, c1_fees, c2_fees}
trade.to_fill = 0
trade.closed = last or tick.time
trade.rate = total / amount
if trade.original_rate?
original_rate = trade.original_rate
original_amt = trade.original_amt or trade.amount
rate_before_fee = total / amount
rate_after_fee = (total - c1_fees) / (amount - c2_fees)
slipped = Math.abs( original_rate - rate_before_fee ) / original_rate
slipped_amt = slipped * original_amt
overhead = Math.abs(original_rate - rate_after_fee) / original_rate
overhead_amt = original_amt * overhead
extend trade, {slipped, slipped_amt, overhead, overhead_amt}
action_priorities =
create: 0
exit: 1
update_exit: 2
cancel_unfilled: 3
hustle = (balance) ->
yyy = Date.now()
opportunities = find_opportunities balance
t_.hustle += Date.now() - yyy if t_?
if opportunities.length > 0
yyy = Date.now()
# prioritize exits & cancelations over new positions
opportunities.sort (a,b) -> action_priorities[b.action] - action_priorities[a.action]
if config.enforce_balance
fillable_opportunities = check_wallet opportunities, balance
else
fillable_opportunities = opportunities
# if fillable_opportunities.length != opportunities.length
# console.log "Slimmed opportunities from #{opportunities.length} to #{fillable_opportunities.length}"
# # for op in opportunities
# # if op not in fillable_opportunities
# # console.log 'ELIMINATED:', op.pos.dealer
if fillable_opportunities.length > 0 && !config.disabled && !global.no_new_orders
execute_opportunities fillable_opportunities
t_.exec += Date.now() - yyy if t_?
pusher = module.exports = {init, hustle, learn_strategy, destroy_position, reset_open}
| true | require './shared'
global.feature_engine = require './feature_engine'
exchange = require './exchange'
global.dealers = {}
global.open_positions = {}
global.log_error = (halt, data) ->
if !config.simulation
errors = bus.fetch 'errors'
errors.logs ||= []
error = [tick.time, JSON.stringify(data), new Error().stack]
errors.logs.push error
bus.save errors
if config.mailgun
try
mailgun = require('mailgun-js')
apiKey: config.mailgun.apiKey
domain: config.mailgun.domain
send_email = ({subject, message, recipient}) ->
mailgun.messages().send
from: config.mailgun.from
to: recipient or config.mailgun.recipient
subject: subject
text: message
catch e
send_email = ->
console.error 'could not send message beecause mailgun failed to load'
send_email
subject: "Error for #{config.exchange} #{config.c1}-#{config.c2} at #{tick.time}"
message: """
Data:
#{error[1]}
Trace:
#{error[2]}
"""
if halt
bus.save {key: 'time_to_die', now: true}
console.error "Halting because of:", data
else
console.error data
learn_strategy = (name, teacher, strat_dealers) ->
console.assert uniq(strat_dealers), {message: 'dealers aren\'t unique!', dealers: strat_dealers}
operation = from_cache 'operation'
operation[name] =
key: name
dealers: []
save operation
strategy = operation[name]
needs_save = false
# name dealer names more space efficiently :)
names = {}
params = {}
for dealer_conf,idx in strat_dealers
for k,v of dealer_conf
params[k] ||= {}
params[k][v] = 1
differentiating_params = {}
for k,v of params
if Object.keys(v).length > 1
differentiating_params[k] = 1
######
for dealer_conf,idx in strat_dealers
dealer_conf = defaults dealer_conf,
max_open_positions: 9999999
if dealer_conf.max_open_positions > 1 && !dealer_conf.cooloff_period?
dealer_conf.cooloff_period = 4 * 60
dealer = teacher dealer_conf
name = get_name strategy.key, dealer_conf, differentiating_params
if name == strategy.key
name = "#{strategy.key}-dealer"
key = name
dealer_data = fetch key
if !dealer_data.positions # initialize
dealer_data = extend dealer_data,
parent: '/' + strategy.key
positions: []
dealer_data.settings = dealer_conf
console.assert dealer_data.settings.series || dealer.eval_unfilled_trades?,
message: 'Dealer has no eval_unfilled_trades method defined'
dealer: name
dealers[key] = dealer
if ('/' + key) not in strategy.dealers
strategy.dealers.push '/' + key
needs_save = true
save dealer_data
if needs_save
save strategy
init = ({history, clear_all_positions, take_position, cancel_unfilled, update_trade}) ->
global.history = history
for name in get_dealers()
dealer_data = from_cache name
save dealer_data
reset_open()
clear_positions() if clear_all_positions
pusher.last_checked_new_position = {}
pusher.last_checked_exit = {}
pusher.last_checked_unfilled = {}
tick_interval = null
tick_interval_no_unfilled = null
for name in get_all_actors()
dealer_data = from_cache name
dealer = dealers[name]
has_open_positions = open_positions[name]?.length > 0
# create a feature engine that will generate features with trades quantized
# by resolution seconds
for d in dealer.dependencies
resolution = d[0]
engine = feature_engine.create resolution
engine.subscribe_dealer name, dealer, dealer_data
dealer.features ?= {}
dealer.features[resolution] = engine
pusher.last_checked_new_position[name] = 0
pusher.last_checked_exit[name] = 0
pusher.last_checked_unfilled[name] = 0
intervals = [
dealer_data.settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
]
if !dealer_data.settings.never_exits
intervals.push dealer_data.settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if !dealer_data.settings.series
intervals.push dealer_data.settings.eval_unfilled_every_n_seconds or config.eval_unfilled_every_n_seconds
if !tick_interval
tick_interval = intervals[0]
intervals.push tick_interval
tick_interval = Math.greatest_common_divisor intervals
if !dealer_data.settings.series
intervals = [
dealer_data.settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
]
if !dealer_data.settings.never_exits
intervals.push dealer_data.settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if !tick_interval_no_unfilled
tick_interval_no_unfilled = intervals[0]
intervals.push tick_interval_no_unfilled
tick_interval_no_unfilled = Math.greatest_common_divisor intervals
history_buffer = (e.num_frames * res + 1 for res, e of feature_engine.resolutions)
history_buffer = Math.max.apply null, history_buffer
history.set_longest_requested_history history_buffer
pusher.take_position = take_position if take_position
pusher.cancel_unfilled = cancel_unfilled if cancel_unfilled
pusher.update_trade = update_trade if update_trade
pusher.tick_interval = tick_interval
pusher.tick_interval_no_unfilled = tick_interval_no_unfilled
clear_positions = ->
for name in get_all_actors()
dealer = fetch(name)
dealer.positions = []
save dealer
reset_open = ->
global.open_positions = {}
for name in get_dealers()
dealer = fetch(name)
open_positions[name] = (p for p in (dealer.positions or []) when !p.closed)
find_opportunities = (balance) ->
console.assert tick?.time?,
message: "tick.time is not defined!"
tick: tick
# identify new positions and changes to old positions (opportunities)
opportunities = []
all_actors = get_all_actors()
################################
## 1. Identify new opportunities
for name in all_actors
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
eval_entry_every_n_seconds = settings.eval_entry_every_n_seconds or config.eval_entry_every_n_seconds
if tick.time - pusher.last_checked_new_position[name] < eval_entry_every_n_seconds
continue
if dealer_data.locked
locked_for = tick.time - dealer_data.locked
console.log "Skipping #{name} because it is locked (locked for #{locked_for}s)"
continue
pusher.last_checked_new_position[name] = tick.time
zzz = Date.now()
# A strategy can't have too many positions on the books at once...
if settings.series || settings.never_exits || open_positions[name]?.length < settings.max_open_positions
yyy = Date.now()
spec = dealer.eval_whether_to_enter_new_position
dealer: name
open_positions: open_positions[name]
balance: balance
t_.eval_pos += Date.now() - yyy if t_?
#yyy = Date.now()
if spec
position = create_position spec, name
#t_.create_pos += Date.now() - yyy if t_?
yyy = Date.now()
valid = position && is_valid_position(position)
found_match = false
if valid
sell = if position.entry.type == 'sell' then position.entry else position.exit
buy = if position.entry.type == 'buy' then position.entry else position.exit
opportunities.push
pos: position
action: 'create'
required_c2: if sell then sell.amount
required_c1: if buy then buy.amount * buy.rate
t_.check_new += Date.now() - zzz if t_?
continue if dealer_data.settings.series
##########################################
## 2. Handle positions that haven't exited
for name in all_actors when open_positions[name]?.length > 0
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
continue if settings.series || settings.never_exits
eval_exit_every_n_seconds = settings.eval_exit_every_n_seconds or config.eval_exit_every_n_seconds
if tick.time - pusher.last_checked_exit[name] < eval_exit_every_n_seconds
continue
if dealer_data.locked
locked_for = tick.time - dealer_data.locked
if locked_for > 60 * 60 && locked_for < 68 * 60
log_error false, {message: "#{name} locked an excessive period of time.", dealer_data, locked_for}
continue
pusher.last_checked_exit[name] = tick.time
# see if any open positions want to exit
yyy = Date.now()
for pos in open_positions[name] when !pos.series_data && !pos.exit
opportunity = dealer.eval_whether_to_exit_position
position: pos
dealer: name
open_positions: open_positions[name]
balance: balance
if opportunity
if opportunity.action == 'exit'
type = if pos.entry.type == 'buy' then 'sell' else 'buy'
if type == 'sell'
amt = opportunity.amount or pos.entry.amount
if amt < exchange.minimum_order_size()
opportunity.amount = 1.02 * exchange.minimum_order_size()
#log_error false, {message: 'resizing order so it is above minimum', opportunity, amt}
opportunity.required_c2 = opportunity.amount or pos.entry.amount
else
total = (opportunity.amount or pos.entry.amount) * opportunity.rate
if total < exchange.minimum_order_size(config.c1)
opportunity.amount = 1.02 * exchange.minimum_order_size(config.c1) / opportunity.rate
#log_error false, {message: 'resizing order so it is above minimum', opportunity, total}
opportunity.required_c1 = (opportunity.amount or pos.entry.amount) * opportunity.rate
opportunity.pos = pos
opportunities.push opportunity
t_.check_exit += Date.now() - yyy if t_?
##########################################
## 3. Handle unfilled orders
for name in all_actors when open_positions[name]?.length > 0
dealer_data = from_cache name
settings = dealer_data.settings
dealer = dealers[name]
continue if settings.series
eval_unfilled_every_n_seconds = settings.eval_unfilled_every_n_seconds or config.eval_unfilled_every_n_seconds
if tick.time - pusher.last_checked_unfilled[name] < eval_unfilled_every_n_seconds
continue
pusher.last_checked_unfilled[name] = tick.time
yyy = Date.now()
for pos in open_positions[name] when (pos.entry && !pos.entry.closed) || \
(pos.exit && !pos.exit.closed)
unfilled = if pos.entry.closed then pos.exit else pos.entry
if !config.simulation
continue if dealer_data.locked
continue if config.exchange == 'gdax' && \
unfilled.flags?.order_method > 1 && \
unfilled.latest_order > tick.started && \ # make sure trades don't get stuck if there's a restart
unfilled.current_order
# because we're in the midst of a dynamically-updating order
opportunity = dealer.eval_unfilled_trades
position: pos
dealer: PI:NAME:<NAME>END_PI
open_positions: open_positions[name]
balance: balance
if opportunity
opportunity.pos = pos
opportunities.push opportunity
t_.check_unfilled += Date.now() - yyy if t_?
if !uniq(opportunities)
msg =
message: 'Duplicate opportunities'
opportunities: opportunities
for opp,idx in opportunities
msg["pos-#{idx}"] = opp.pos
log_error true, msg
return []
opportunities
execute_opportunities = (opportunities) ->
return if !opportunities || opportunities.length == 0
for opportunity in opportunities
pos = opportunity.pos
switch opportunity.action
when 'create'
take_position pos
when 'cancel_unfilled'
cancel_unfilled pos
when 'exit'
exit_position {pos, opportunity}
take_position pos
when 'update_exit'
update_trade {pos, trade: pos.exit, opportunity}
when 'update_entry'
update_trade {pos, trade: pos.entry, opportunity}
check_wallet = (opportunities, balance) ->
doable = []
required_c1 = required_c2 = 0
by_dealer = {}
ops_cnt = 0
for opportunity in opportunities
if (!opportunity.required_c2 && !opportunity.required_c1) || opportunity.pos.series_data
doable.push opportunity
continue
by_dealer[opportunity.pos.dealer] ||= []
by_dealer[opportunity.pos.dealer].push opportunity
ops_cnt += 1
return doable if ops_cnt == 0
all_avail_BTC = balance.balances.c1
all_avail_ETH = balance.balances.c2
for name, ops of by_dealer
dbalance = balance[name].balances
avail_BTC = dbalance.c1
avail_ETH = dbalance.c2
# console.log {message: 'yo', dealer: name, avail_ETH, avail_BTC, dealer_opportunities: by_dealer[name], balance: dbalance}
if avail_BTC < 0 || avail_ETH < 0
out = if config.simulation then console.log else console.log
out false, {message: 'negative balance', dealer: name, avail_ETH, avail_BTC, dealer_opportunities: by_dealer[name], balance: dbalance}
continue
for op in ops
r_ETH = op.required_c2 or 0
r_BTC = op.required_c1 or 0
if avail_ETH >= r_ETH && avail_BTC >= r_BTC && all_avail_ETH >= r_ETH && all_avail_BTC >= r_BTC
doable.push op
avail_ETH -= r_ETH
avail_BTC -= r_BTC
all_avail_ETH -= r_ETH
all_avail_BTC -= r_BTC
doable
create_position = (spec, dealer) ->
return null if !spec
buy = spec.buy
sell = spec.sell
spec.buy = undefined; spec.sell = undefined
simultaneous = buy && sell
if !buy?.entry && !sell?.entry
if simultaneous || buy
buy.entry = true
else
sell.entry = true
if config.exchange == 'gdax'
for trade in [buy, sell] when trade
trade.rate = parseFloat trade.rate.toFixed(exchange.minimum_rate_precision())
trade.amount = parseFloat((Math.floor(trade.amount * 1000000) / 1000000).toFixed(6))
if buy
buy.type = 'buy'
buy.to_fill ||= if buy.flags?.market then buy.amount * buy.rate else buy.amount
if sell
sell.type = 'sell'
sell.to_fill ||= sell.amount
position = extend {}, spec,
key: if !config.simulation then "position/#{dealer}-#{tick.time}"
dealer: dealer
created: tick.time
entry: if buy?.entry then buy else sell
exit: if simultaneous then (if sell.entry then buy else sell)
for trade in [position.entry, position.exit] when trade
defaults trade,
created: tick.time
fills: []
original_rate: trade.rate
original_amt: trade.amount
position
exit_position = ({pos, opportunity}) ->
if !pos.entry
log_error false,
message: 'position can\'t be exited because entry is undefined'
pos: pos
opportunity: opportunity
return
rate = opportunity.rate
market_trade = opportunity.flags?.market
amount = opportunity.amount or pos.entry.amount
type = if pos.entry.type == 'buy' then 'sell' else 'buy'
trade = pos.exit =
amount: amount
type: type
rate: rate
to_fill: if market_trade && type == 'buy' then amount * rate else amount
flags: if opportunity.flags? then opportunity.flags
entry: false
created: tick.time
fills: []
original_rate: rate
original_amt: amount
if config.exchange == 'gdax'
trade.rate = parseFloat trade.rate.toFixed(exchange.minimum_rate_precision())
trade.amount = parseFloat((Math.floor(trade.amount * 1000000) / 1000000).toFixed(6))
pos
# Executes the position entry and exit, if they exist and it hasn't already been done.
take_position = (pos) ->
if pusher.take_position && !pos.series_data
pusher.take_position pos, (error, trades_left) ->
took_position(pos, error, trades_left == 0)
else
took_position pos
took_position = (pos, error, destroy_if_all_errored) ->
if !pos.series_data && config.log_level > 1
console.log "TOOK POSITION", pos, {error, destroy_if_all_errored}
if error && !config.simulation
for trade in ['entry', 'exit'] when pos[trade]
if !pos[trade].current_order
if !(pos[trade].orders?.length > 0)
pos[trade] = undefined
else
console.error
message: "We've partially filled a trade, but an update order occurred. I think Pusher will handle this properly though :p"
pos: pos
if !pos.exit && !pos.entry && destroy_if_all_errored
return destroy_position pos
bus.save pos if pos.key
if !from_cache(pos.dealer).positions
throw "#{pos.dealer} not properly initialized with positions"
if from_cache(pos.dealer).positions.indexOf(pos) == -1
from_cache(pos.dealer).positions.push pos
if !pos.series_data
open_positions[pos.dealer].push pos
update_trade = ({pos, trade, opportunity}) ->
rate = opportunity.rate
if config.exchange == 'gdax'
rate = parseFloat rate.toFixed(exchange.minimum_rate_precision())
if isNaN(rate) || !rate || rate == 0
return log_error false,
message: 'Bad rate for updating trade!',
rate: rate
pos: pos
if trade.to_fill == 0
return log_error false,
message: 'trying to move a trade that should already be closed'
pos: pos
trade: trade
fills: trade.fills
if trade.flags?.market
return log_error true,
message: 'trying to move a market exit'
trade: trade
if trade.type == 'buy'
# we need to adjust the *amount* we're buying because our buying power has changed
amt_purchased = 0
for f in (trade.fills or [])
amt_purchased += f.amount
amt_remaining = (trade.original_amt or trade.amount) - amt_purchased
total_remaining = amt_remaining * (trade.original_rate or trade.rate)
dbalance = from_cache('balances')[pos.dealer]
total_available = dbalance.balances.c1 + dbalance.on_order.c1
dealer = from_cache(pos.dealer)
total_amt_purchased = 0
total_amt_sold = 0
for pos in (dealer.positions or [])
for t in [pos.entry, pos.exit] when t
for fill in (t.fills or [])
if t.type == 'buy'
total_amt_purchased += fill.amount
else
total_amt_sold += fill.amount
total_diff = total_amt_purchased - total_amt_sold
if total_remaining > .99 * total_available
# console.error 'total remaining was too much!', {dealer: pos.dealer, total_diff, amt_remaining, amt_purchased, total_available, total_remaining, orig_amt: trade.original_amt, orig_rate: trade.original_rate, rate, balance: from_cache('balances')[pos.dealer]}
total_remaining = .99 * total_available
new_amount = total_remaining / rate
else
new_amount = trade.to_fill
if config.exchange == 'gdax'
new_amount = parseFloat((Math.floor(new_amount * 1000000) / 1000000).toFixed(6))
trade.rate = rate
if trade.type == 'buy'
trade.amount = new_amount + amt_purchased
trade.to_fill = new_amount
if pusher.update_trade
pusher.update_trade
pos: pos
trade: trade # orphanable
rate: rate
amount: trade.to_fill
cancel_unfilled = (pos) ->
if pusher.cancel_unfilled
dealer = from_cache(pos.dealer)
dealer.locked = tick.time
bus.save dealer
pusher.cancel_unfilled pos, ->
dealer.locked = false
bus.save dealer
canceled_unfilled pos
else
canceled_unfilled pos
# make all meta data updates after trades have potentially been canceled.
canceled_unfilled = (pos) ->
for trade in ['exit', 'entry'] when !pos[trade]?.closed
if pos[trade] && !pos[trade].current_order
if pos[trade].fills.length == 0
pos[trade] = undefined
if pos.exit && !pos.entry
pos.entry = pos.exit
pos.exit = undefined
else if !pos.entry && !pos.exit
destroy_position pos
destroy_position = (pos) ->
positions = from_cache(pos.dealer).positions
open = open_positions[pos.dealer]
if !config.simulation && !pos.key
return log_error true, {message: 'trying to destroy a position without a key', pos: pos}
idx = positions.indexOf(pos)
if idx == -1
console.log "COULD NOT DESTROY position #{pos.key}...not found in positions", pos
else
positions.splice idx, 1
idx = open.indexOf(pos)
if idx == -1
console.log 'CANT DESTROY POSITION THAT ISNT IN OPEN POSITIONS'
else
open.splice idx, 1
if !config.simulation
pos.key = PI:KEY:<KEY>END_PI
################
# Conditions for whether to open a position:
#
# automatically applied:
# - cool off period
# - trade closeness
# - max open positions
# automatically applied if entry & exit specified immediately:
# - profit threshold
LOG_REASONS = false
is_valid_position = (pos) ->
return false if !pos
return true if pos.series_data
settings = get_settings(pos.dealer)
if LOG_REASONS
failure_reasons = []
entry = pos.entry
exit = pos.exit
# Non-zero rates
if (entry && (entry.rate == 0 or isNaN(entry.rate))) || (exit && (exit.rate == 0 or isNaN(exit.rate)))
# console.log "Can't have a zero rate"
return false if !LOG_REASONS
failure_reasons.push "Can't have a zero rate"
if entry.amount <= exchange.minimum_order_size() || exit?.amount <= exchange.minimum_order_size()
return false if !LOG_REASONS
failure_reasons.push "Can't have a negative amount"
# A strategy can't have too many positions on the books at once...
if !settings.never_exits && open_positions[pos.dealer].length > settings.max_open_positions - 1
# console.log "#{settings.max_open_positions} POSITIONS ALREADY ON BOOKS"
return false if !LOG_REASONS
failure_reasons.push "#{settings.max_open_positions} POSITIONS ALREADY ON BOOKS"
# Space positions from the same strategy out
position_set = if settings.never_exits then from_cache(pos.dealer).positions else open_positions[pos.dealer]
for other_pos in position_set
if tick.time - other_pos.created < settings.cooloff_period
# console.log "TOO MANY POSITIONS IN LAST #{settings.cooloff_period} SECONDS"
return false if !LOG_REASONS
failure_reasons.push "TOO MANY POSITIONS IN LAST #{settings.cooloff_period} SECONDS"
break
if settings.alternating_types
buys = sells = 0
for other_pos in open_positions[pos.dealer]
if other_entry.type == 'buy'
buys++
else
sells++
if (entry.type == 'buy' && buys > sells) || (entry.type == 'sell' && sells > buys)
return false if !LOG_REASONS
failure_reasons.push "Positions need to alternate"
return true #failure_reasons.length == 0
global.close_trade = (pos, trade) ->
amount = total = 0
c1_fees = c2_fees = 0
last = null
for fill in (trade.fills or [])
total += fill.total
amount += fill.amount
if trade.type == 'buy' && config.exchange == 'poloniex'
c2_fees += fill.fee
else
c1_fees += fill.fee
if fill.date? && fill.date > last
last = fill.date
extend trade, {amount, total, c1_fees, c2_fees}
trade.to_fill = 0
trade.closed = last or tick.time
trade.rate = total / amount
if trade.original_rate?
original_rate = trade.original_rate
original_amt = trade.original_amt or trade.amount
rate_before_fee = total / amount
rate_after_fee = (total - c1_fees) / (amount - c2_fees)
slipped = Math.abs( original_rate - rate_before_fee ) / original_rate
slipped_amt = slipped * original_amt
overhead = Math.abs(original_rate - rate_after_fee) / original_rate
overhead_amt = original_amt * overhead
extend trade, {slipped, slipped_amt, overhead, overhead_amt}
action_priorities =
create: 0
exit: 1
update_exit: 2
cancel_unfilled: 3
hustle = (balance) ->
yyy = Date.now()
opportunities = find_opportunities balance
t_.hustle += Date.now() - yyy if t_?
if opportunities.length > 0
yyy = Date.now()
# prioritize exits & cancelations over new positions
opportunities.sort (a,b) -> action_priorities[b.action] - action_priorities[a.action]
if config.enforce_balance
fillable_opportunities = check_wallet opportunities, balance
else
fillable_opportunities = opportunities
# if fillable_opportunities.length != opportunities.length
# console.log "Slimmed opportunities from #{opportunities.length} to #{fillable_opportunities.length}"
# # for op in opportunities
# # if op not in fillable_opportunities
# # console.log 'ELIMINATED:', op.pos.dealer
if fillable_opportunities.length > 0 && !config.disabled && !global.no_new_orders
execute_opportunities fillable_opportunities
t_.exec += Date.now() - yyy if t_?
pusher = module.exports = {init, hustle, learn_strategy, destroy_position, reset_open}
|
[
{
"context": "dminConfig =\n name: Config.name\n adminEmails: ['dakarpt@gmail.com']\n collections:\n# Posts:\n# color: 'red'\n",
"end": 69,
"score": 0.9999250173568726,
"start": 52,
"tag": "EMAIL",
"value": "dakarpt@gmail.com"
}
] | lib/_config/adminConfig.coffee | dakarpt/PSocial | 0 | @AdminConfig =
name: Config.name
adminEmails: ['dakarpt@gmail.com']
collections:
# Posts:
# color: 'red'
# icon: 'pencil'
# extraFields: ['owner']
# tableColumns: [
# {label: 'Title', name: 'title'}
# {label: 'User', name: 'author()', template: 'adminUserCell'}
# ]
# Comments:
# color: 'green'
# icon: 'comments'
# extraFields: ['doc', 'owner']
# tableColumns: [
# {label: 'Content', name: 'content'}
# {label: 'Post', name: 'docTitle()', template: 'adminPostCell'}
# {label: 'User', name: 'author()', template: 'adminUserCell'}
# ]
# children: [
# {
# find: (comment) ->
# Posts.find comment.doc, limit: 1
# }
# {
# find: (comment) ->
# Meteor.users.find comment.owner, limit: 1
# }
# ]
Notifications:
color: 'blue'
icon: 'pencil'
extraFields: ['doc','owner','from']
tableColumns: [
{label: 'Title', name: 'title'}
{label: 'From', name: 'from_email'}
{label: 'To', name: 'to'}
{label: 'Msg', name: 'message'}
]
smsinfo:
color: 'blue'
icon: 'pencil'
extraFields: ['doc']
tableColumns: [
{label: 'Mobile', name: 'mobile'}
{label: 'smsText', name: 'smsText'}
{label: 'When', name: 'timestamp'}
]
# Attachments:
# color: 'blue'
# icon: 'pencil'
# extraFields: ['doc','owner']
# tableColumns: [
# {label: 'Name', name: 'copies.attachments.name'}
# {label: 'Size', name: 'copies.attachments.size'}
# {label: 'Type', name: 'copies.attachments.type'}
# {label: 'UploadedAt', name: 'copies.attachments.uploadedAt'}
# ]
# Messages:
# color: 'yellow'
# icon: 'comments'
# extraFields: ['userId']
# tableColumns: [
# {label: 'Mensagem', name: 'message'}
# {label: 'Time', name: 'time'}
# ]
# Processos:
# color: 'red'
# icon: 'pencil'
# extraFields: ['id']
# tableColumns: [
# {label: 'Nome', name: 'name'}
# {label: 'Status', name: 'status'}
# {label: 'Macro', name: 'macro'}
# {label: 'Processo Macro', name: 'macro_nome'}
# {label: 'Time', name: 'timestamp'}
# ]
# LogMessages:
# color: 'yellow'
# icon: 'pencil'
# extraFields: ['processoOwner']
# tableColumns: [
# {label: 'Data/hora', name: 'timestamp'}
# {label: 'Processo', name: 'processo'}
# {label: 'Antes', name: 'oldstatus'}
# {label: 'Depois', name: 'newstatus'}
# {label: 'Mensagem', name: 'message'}
# ]
dashboard:
homeUrl: '/listItems'
autoForm:
omitFields: ['createdAt', 'updatedAt']
| 173359 | @AdminConfig =
name: Config.name
adminEmails: ['<EMAIL>']
collections:
# Posts:
# color: 'red'
# icon: 'pencil'
# extraFields: ['owner']
# tableColumns: [
# {label: 'Title', name: 'title'}
# {label: 'User', name: 'author()', template: 'adminUserCell'}
# ]
# Comments:
# color: 'green'
# icon: 'comments'
# extraFields: ['doc', 'owner']
# tableColumns: [
# {label: 'Content', name: 'content'}
# {label: 'Post', name: 'docTitle()', template: 'adminPostCell'}
# {label: 'User', name: 'author()', template: 'adminUserCell'}
# ]
# children: [
# {
# find: (comment) ->
# Posts.find comment.doc, limit: 1
# }
# {
# find: (comment) ->
# Meteor.users.find comment.owner, limit: 1
# }
# ]
Notifications:
color: 'blue'
icon: 'pencil'
extraFields: ['doc','owner','from']
tableColumns: [
{label: 'Title', name: 'title'}
{label: 'From', name: 'from_email'}
{label: 'To', name: 'to'}
{label: 'Msg', name: 'message'}
]
smsinfo:
color: 'blue'
icon: 'pencil'
extraFields: ['doc']
tableColumns: [
{label: 'Mobile', name: 'mobile'}
{label: 'smsText', name: 'smsText'}
{label: 'When', name: 'timestamp'}
]
# Attachments:
# color: 'blue'
# icon: 'pencil'
# extraFields: ['doc','owner']
# tableColumns: [
# {label: 'Name', name: 'copies.attachments.name'}
# {label: 'Size', name: 'copies.attachments.size'}
# {label: 'Type', name: 'copies.attachments.type'}
# {label: 'UploadedAt', name: 'copies.attachments.uploadedAt'}
# ]
# Messages:
# color: 'yellow'
# icon: 'comments'
# extraFields: ['userId']
# tableColumns: [
# {label: 'Mensagem', name: 'message'}
# {label: 'Time', name: 'time'}
# ]
# Processos:
# color: 'red'
# icon: 'pencil'
# extraFields: ['id']
# tableColumns: [
# {label: 'Nome', name: 'name'}
# {label: 'Status', name: 'status'}
# {label: 'Macro', name: 'macro'}
# {label: 'Processo Macro', name: 'macro_nome'}
# {label: 'Time', name: 'timestamp'}
# ]
# LogMessages:
# color: 'yellow'
# icon: 'pencil'
# extraFields: ['processoOwner']
# tableColumns: [
# {label: 'Data/hora', name: 'timestamp'}
# {label: 'Processo', name: 'processo'}
# {label: 'Antes', name: 'oldstatus'}
# {label: 'Depois', name: 'newstatus'}
# {label: 'Mensagem', name: 'message'}
# ]
dashboard:
homeUrl: '/listItems'
autoForm:
omitFields: ['createdAt', 'updatedAt']
| true | @AdminConfig =
name: Config.name
adminEmails: ['PI:EMAIL:<EMAIL>END_PI']
collections:
# Posts:
# color: 'red'
# icon: 'pencil'
# extraFields: ['owner']
# tableColumns: [
# {label: 'Title', name: 'title'}
# {label: 'User', name: 'author()', template: 'adminUserCell'}
# ]
# Comments:
# color: 'green'
# icon: 'comments'
# extraFields: ['doc', 'owner']
# tableColumns: [
# {label: 'Content', name: 'content'}
# {label: 'Post', name: 'docTitle()', template: 'adminPostCell'}
# {label: 'User', name: 'author()', template: 'adminUserCell'}
# ]
# children: [
# {
# find: (comment) ->
# Posts.find comment.doc, limit: 1
# }
# {
# find: (comment) ->
# Meteor.users.find comment.owner, limit: 1
# }
# ]
Notifications:
color: 'blue'
icon: 'pencil'
extraFields: ['doc','owner','from']
tableColumns: [
{label: 'Title', name: 'title'}
{label: 'From', name: 'from_email'}
{label: 'To', name: 'to'}
{label: 'Msg', name: 'message'}
]
smsinfo:
color: 'blue'
icon: 'pencil'
extraFields: ['doc']
tableColumns: [
{label: 'Mobile', name: 'mobile'}
{label: 'smsText', name: 'smsText'}
{label: 'When', name: 'timestamp'}
]
# Attachments:
# color: 'blue'
# icon: 'pencil'
# extraFields: ['doc','owner']
# tableColumns: [
# {label: 'Name', name: 'copies.attachments.name'}
# {label: 'Size', name: 'copies.attachments.size'}
# {label: 'Type', name: 'copies.attachments.type'}
# {label: 'UploadedAt', name: 'copies.attachments.uploadedAt'}
# ]
# Messages:
# color: 'yellow'
# icon: 'comments'
# extraFields: ['userId']
# tableColumns: [
# {label: 'Mensagem', name: 'message'}
# {label: 'Time', name: 'time'}
# ]
# Processos:
# color: 'red'
# icon: 'pencil'
# extraFields: ['id']
# tableColumns: [
# {label: 'Nome', name: 'name'}
# {label: 'Status', name: 'status'}
# {label: 'Macro', name: 'macro'}
# {label: 'Processo Macro', name: 'macro_nome'}
# {label: 'Time', name: 'timestamp'}
# ]
# LogMessages:
# color: 'yellow'
# icon: 'pencil'
# extraFields: ['processoOwner']
# tableColumns: [
# {label: 'Data/hora', name: 'timestamp'}
# {label: 'Processo', name: 'processo'}
# {label: 'Antes', name: 'oldstatus'}
# {label: 'Depois', name: 'newstatus'}
# {label: 'Mensagem', name: 'message'}
# ]
dashboard:
homeUrl: '/listItems'
autoForm:
omitFields: ['createdAt', 'updatedAt']
|
[
{
"context": "ts.find().count() is 0\n\t\tProducts.insert\n\t\t\tname:\"Nuka Cola\"\n\t\t\tprice: 1099\n\n\t\tProducts.insert\n\t\t\tname:\"1up S",
"end": 140,
"score": 0.9998040795326233,
"start": 131,
"tag": "NAME",
"value": "Nuka Cola"
},
{
"context": " Cola\"\n\t\t\tprice: 1099\n... | _/Module 3/_globals/server/initial_setup.coffee | paullewallencom/meteor-978-1-7872-8775-4 | 9 | # /_globals/server/initial_setup.coffee
Meteor.startup ->
# Products
if Products.find().count() is 0
Products.insert
name:"Nuka Cola"
price: 1099
Products.insert
name:"1up Soda"
price: 999
Products.insert
name:"JuggerNog"
price: 899
# Users
if Meteor.users.find().count() is 0
user = Accounts.createUser
email:"you@email.com"
password:"1234"
Roles.addUsersToRoles user, ["admin"]
| 24387 | # /_globals/server/initial_setup.coffee
Meteor.startup ->
# Products
if Products.find().count() is 0
Products.insert
name:"<NAME>"
price: 1099
Products.insert
name:"<NAME>"
price: 999
Products.insert
name:"<NAME>"
price: 899
# Users
if Meteor.users.find().count() is 0
user = Accounts.createUser
email:"<EMAIL>"
password:"<PASSWORD>"
Roles.addUsersToRoles user, ["admin"]
| true | # /_globals/server/initial_setup.coffee
Meteor.startup ->
# Products
if Products.find().count() is 0
Products.insert
name:"PI:NAME:<NAME>END_PI"
price: 1099
Products.insert
name:"PI:NAME:<NAME>END_PI"
price: 999
Products.insert
name:"PI:NAME:<NAME>END_PI"
price: 899
# Users
if Meteor.users.find().count() is 0
user = Accounts.createUser
email:"PI:EMAIL:<EMAIL>END_PI"
password:"PI:PASSWORD:<PASSWORD>END_PI"
Roles.addUsersToRoles user, ["admin"]
|
[
{
"context": "nt(text_with_search_highlight, propsData: (text: 'hey'))\n expect(wrapper.text()).to.eql 'hey'\n\n it ",
"end": 255,
"score": 0.9401182532310486,
"start": 252,
"tag": "NAME",
"value": "hey"
},
{
"context": "xt_with_search_highlight, propsData:\n text: 'hey'\n ... | ui/src/components/dashboard/text_with_search_highlight.test.coffee | jozsefsallai/makes.audio | 4 | import text_with_search_highlight from './text_with_search_highlight'
import { mount } from 'avoriaz'
describe 'text-with-search-highlight', ->
it 'should render text with no q', ->
wrapper = mount(text_with_search_highlight, propsData: (text: 'hey'))
expect(wrapper.text()).to.eql 'hey'
it 'should render text with q in center', ->
wrapper = mount text_with_search_highlight, propsData:
text: 'hey'
q: 'e'
expect(wrapper.text()).to.eql 'hey'
highlight_el = wrapper.first('.q-highlight')
expect(highlight_el.hasStyle('background-color', 'yellow')).to.be.true
expect(highlight_el.text()).to.eql 'e'
it 'should render text and highlight the first occurrence of q', ->
wrapper = mount text_with_search_highlight, propsData:
text: 'mydodo'
q: 'do'
expect(wrapper.text()).to.eql 'mydodo'
before_el = wrapper.first('.before-q')
highlight_el = wrapper.first('.q-highlight')
after_el = wrapper.first('.after-q')
expect(before_el.text()).to.eql 'my'
expect(highlight_el.hasStyle('background-color', 'yellow')).to.be.true
expect(highlight_el.text()).to.eql 'do'
expect(after_el.text()).to.eql 'do'
| 37650 | import text_with_search_highlight from './text_with_search_highlight'
import { mount } from 'avoriaz'
describe 'text-with-search-highlight', ->
it 'should render text with no q', ->
wrapper = mount(text_with_search_highlight, propsData: (text: '<NAME>'))
expect(wrapper.text()).to.eql 'hey'
it 'should render text with q in center', ->
wrapper = mount text_with_search_highlight, propsData:
text: '<NAME>'
q: 'e'
expect(wrapper.text()).to.eql 'hey'
highlight_el = wrapper.first('.q-highlight')
expect(highlight_el.hasStyle('background-color', 'yellow')).to.be.true
expect(highlight_el.text()).to.eql 'e'
it 'should render text and highlight the first occurrence of q', ->
wrapper = mount text_with_search_highlight, propsData:
text: '<NAME>'
q: 'do'
expect(wrapper.text()).to.eql 'mydodo'
before_el = wrapper.first('.before-q')
highlight_el = wrapper.first('.q-highlight')
after_el = wrapper.first('.after-q')
expect(before_el.text()).to.eql 'my'
expect(highlight_el.hasStyle('background-color', 'yellow')).to.be.true
expect(highlight_el.text()).to.eql 'do'
expect(after_el.text()).to.eql 'do'
| true | import text_with_search_highlight from './text_with_search_highlight'
import { mount } from 'avoriaz'
describe 'text-with-search-highlight', ->
it 'should render text with no q', ->
wrapper = mount(text_with_search_highlight, propsData: (text: 'PI:NAME:<NAME>END_PI'))
expect(wrapper.text()).to.eql 'hey'
it 'should render text with q in center', ->
wrapper = mount text_with_search_highlight, propsData:
text: 'PI:NAME:<NAME>END_PI'
q: 'e'
expect(wrapper.text()).to.eql 'hey'
highlight_el = wrapper.first('.q-highlight')
expect(highlight_el.hasStyle('background-color', 'yellow')).to.be.true
expect(highlight_el.text()).to.eql 'e'
it 'should render text and highlight the first occurrence of q', ->
wrapper = mount text_with_search_highlight, propsData:
text: 'PI:NAME:<NAME>END_PI'
q: 'do'
expect(wrapper.text()).to.eql 'mydodo'
before_el = wrapper.first('.before-q')
highlight_el = wrapper.first('.q-highlight')
after_el = wrapper.first('.after-q')
expect(before_el.text()).to.eql 'my'
expect(highlight_el.hasStyle('background-color', 'yellow')).to.be.true
expect(highlight_el.text()).to.eql 'do'
expect(after_el.text()).to.eql 'do'
|
[
{
"context": " output.setraw(\"${1:my-details}\",'${2:{\"name\":\"tom\"}}') // Set JSON object in output\n \"\"\"\n\n 'Get",
"end": 4005,
"score": 0.9562172293663025,
"start": 4002,
"tag": "NAME",
"value": "tom"
},
{
"context": "'body' : \"\"\"\n name = util.json('${1:{\"... | snippets/groovy.cson | manoj-dhadke/flint-atom | 0 | ##########################################################################
#
# INFIVERVE TECHNOLOGIES PTE LIMITED CONFIDENTIAL
# __________________
#
# (C) INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE
# All Rights Reserved.
# Product / Project: Flint IT Automation Platform
# NOTICE: All information contained herein is, and remains
# the property of INFIVERVE TECHNOLOGIES PTE LIMITED.
# The intellectual and technical concepts contained
# herein are proprietary to INFIVERVE TECHNOLOGIES PTE LIMITED.
# Dissemination of this information or any form of reproduction of this material
# is strictly forbidden unless prior written permission is obtained
# from INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE.
# An example CoffeeScript snippet to expand log to console.log:
#
# '.source.coffee':
# 'Console log':
# 'prefix': 'log'
# 'body': 'console.log $1'
#
# Each scope (e.g. '.source.coffee' above) can only be declared once.
#
# This file uses CoffeeScript Object Notation (CSON).
# If you are unfamiliar with CSON, you can read more about it in the
# Atom Flight Manual:
# https://atom.io/docs/latest/using-atom-basic-customization#cson
'.source.groovy':
'Flintbit Template':
'prefix': 'flintbit'
'body': """
// get input from REST JSON or XML
${1:variable_name1} = input.get("${2:name or json path}")
${3:variable_name2} = input.get("${4:name or json path}")
//Call a connector syncronously and set arguments
response = call.connector("${5:connector name}").set("key","value").sync()
// check response from connector. '0' means success
if response.exitcode == 0
// get connector output
out = response.get("output")
// set connector output in REST response
output.set("${6:connector_output}",out)
else
// log the error
log.error("Connector call failed")
// exit the flintbit
output.exit(1,"Connector call failed")
end
//Call another flintbit syncronously and set arguments
bit_response = call.bit("${7:example:hello.rb}").set("key","value").sync()
if bit_response.exitcode == 0
${8:name} = bit_response.get("name")
// set flintbit output in REST response
output.set("${8:name}",${8:name})
else
// log the error
log.error("Flintbit call failed")
// exit the flintbit
output.exit(1,"Flintbit call failed")
end
"""
'Get Input':
'prefix': 'input'
'body': '${1:variable} = input.get("${1:name or json path}")'
'Set Output':
'prefix': 'output'
'body': 'output.set("${1:name}",${1:value or variable})'
'info':
'prefix':'loginfo'
'body':'log.info("${1:Information Statement}")'
'error':
'prefix':'logerror'
'body':'log.error("${1:Error Statment}")'
'warn':
'prefix':'logwarn'
'body':'log.warn("${1:Warning Statment}")'
'debug':
'prefix':'logdebug'
'body':'log.debug("${1:Debug Statement}")'
'Call flintbit':
'prefix' : 'call-flintbit'
'body' : """
// Call flintbit synchronously and set arguments
${1:flintbit}_response = call.bit("${2:example:hello.rb}") // Provide path for flintbit
.set("${3:message}","${4:Welcome to Flint!}") // Set arguments
.sync() // To call flintbit asynchronously use .async() instead of .sync()
"""
'Get input using path':
'prefix' : 'input-path'
'body' : """
${1:name} = input.path("$.${1:name}") // It will return you values of parameters/elements depending on the JSON/XML
"""
'Get raw input':
'prefix' : 'input-raw'
'body' : """
${1:raw}_input = input.raw() // It returns a string representation of your JSON/XML input.
"""
'Set raw output':
'prefix' : 'output-raw'
'body' : """
output.setraw("${1:my-details}",'${2:{"name":"tom"}}') // Set JSON object in output
"""
'Get global config':
'prefix' : 'config-global'
'body' : '${1:path} = config.global("${2:config_name.path}") // It gives value for path field from config_name'
'Get local config':
'prefix' : 'config-local'
'body' : '${1:path} = config.local("${2:config_name.path}") // It gives value for path field from config_name'
'Parse json string':
'prefix' : 'parse-json'
'body' : """
name = util.json('${1:{"name":"tom"}}').get("${2:name}") // The value of JSON parameter name i.e tom will be retrieved.
"""
'Parse xml string':
'prefix' : 'parse-xml'
'body' : """
name = util.xml("${1:<info><name>tom</name></info>}").path("${2:/info/name/text()}") # The value of XML parameter name i.e tom will be retrieved.
"""
| 168636 | ##########################################################################
#
# INFIVERVE TECHNOLOGIES PTE LIMITED CONFIDENTIAL
# __________________
#
# (C) INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE
# All Rights Reserved.
# Product / Project: Flint IT Automation Platform
# NOTICE: All information contained herein is, and remains
# the property of INFIVERVE TECHNOLOGIES PTE LIMITED.
# The intellectual and technical concepts contained
# herein are proprietary to INFIVERVE TECHNOLOGIES PTE LIMITED.
# Dissemination of this information or any form of reproduction of this material
# is strictly forbidden unless prior written permission is obtained
# from INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE.
# An example CoffeeScript snippet to expand log to console.log:
#
# '.source.coffee':
# 'Console log':
# 'prefix': 'log'
# 'body': 'console.log $1'
#
# Each scope (e.g. '.source.coffee' above) can only be declared once.
#
# This file uses CoffeeScript Object Notation (CSON).
# If you are unfamiliar with CSON, you can read more about it in the
# Atom Flight Manual:
# https://atom.io/docs/latest/using-atom-basic-customization#cson
'.source.groovy':
'Flintbit Template':
'prefix': 'flintbit'
'body': """
// get input from REST JSON or XML
${1:variable_name1} = input.get("${2:name or json path}")
${3:variable_name2} = input.get("${4:name or json path}")
//Call a connector syncronously and set arguments
response = call.connector("${5:connector name}").set("key","value").sync()
// check response from connector. '0' means success
if response.exitcode == 0
// get connector output
out = response.get("output")
// set connector output in REST response
output.set("${6:connector_output}",out)
else
// log the error
log.error("Connector call failed")
// exit the flintbit
output.exit(1,"Connector call failed")
end
//Call another flintbit syncronously and set arguments
bit_response = call.bit("${7:example:hello.rb}").set("key","value").sync()
if bit_response.exitcode == 0
${8:name} = bit_response.get("name")
// set flintbit output in REST response
output.set("${8:name}",${8:name})
else
// log the error
log.error("Flintbit call failed")
// exit the flintbit
output.exit(1,"Flintbit call failed")
end
"""
'Get Input':
'prefix': 'input'
'body': '${1:variable} = input.get("${1:name or json path}")'
'Set Output':
'prefix': 'output'
'body': 'output.set("${1:name}",${1:value or variable})'
'info':
'prefix':'loginfo'
'body':'log.info("${1:Information Statement}")'
'error':
'prefix':'logerror'
'body':'log.error("${1:Error Statment}")'
'warn':
'prefix':'logwarn'
'body':'log.warn("${1:Warning Statment}")'
'debug':
'prefix':'logdebug'
'body':'log.debug("${1:Debug Statement}")'
'Call flintbit':
'prefix' : 'call-flintbit'
'body' : """
// Call flintbit synchronously and set arguments
${1:flintbit}_response = call.bit("${2:example:hello.rb}") // Provide path for flintbit
.set("${3:message}","${4:Welcome to Flint!}") // Set arguments
.sync() // To call flintbit asynchronously use .async() instead of .sync()
"""
'Get input using path':
'prefix' : 'input-path'
'body' : """
${1:name} = input.path("$.${1:name}") // It will return you values of parameters/elements depending on the JSON/XML
"""
'Get raw input':
'prefix' : 'input-raw'
'body' : """
${1:raw}_input = input.raw() // It returns a string representation of your JSON/XML input.
"""
'Set raw output':
'prefix' : 'output-raw'
'body' : """
output.setraw("${1:my-details}",'${2:{"name":"<NAME>"}}') // Set JSON object in output
"""
'Get global config':
'prefix' : 'config-global'
'body' : '${1:path} = config.global("${2:config_name.path}") // It gives value for path field from config_name'
'Get local config':
'prefix' : 'config-local'
'body' : '${1:path} = config.local("${2:config_name.path}") // It gives value for path field from config_name'
'Parse json string':
'prefix' : 'parse-json'
'body' : """
name = util.json('${1:{"name":"<NAME>"}}').get("${2:name}") // The value of JSON parameter name i.e <NAME> will be retrieved.
"""
'Parse xml string':
'prefix' : 'parse-xml'
'body' : """
name = util.xml("${1:<info><name><NAME></name></info>}").path("${2:/info/name/text()}") # The value of XML parameter name i.e <NAME> will be retrieved.
"""
| true | ##########################################################################
#
# INFIVERVE TECHNOLOGIES PTE LIMITED CONFIDENTIAL
# __________________
#
# (C) INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE
# All Rights Reserved.
# Product / Project: Flint IT Automation Platform
# NOTICE: All information contained herein is, and remains
# the property of INFIVERVE TECHNOLOGIES PTE LIMITED.
# The intellectual and technical concepts contained
# herein are proprietary to INFIVERVE TECHNOLOGIES PTE LIMITED.
# Dissemination of this information or any form of reproduction of this material
# is strictly forbidden unless prior written permission is obtained
# from INFIVERVE TECHNOLOGIES PTE LIMITED, SINGAPORE.
# An example CoffeeScript snippet to expand log to console.log:
#
# '.source.coffee':
# 'Console log':
# 'prefix': 'log'
# 'body': 'console.log $1'
#
# Each scope (e.g. '.source.coffee' above) can only be declared once.
#
# This file uses CoffeeScript Object Notation (CSON).
# If you are unfamiliar with CSON, you can read more about it in the
# Atom Flight Manual:
# https://atom.io/docs/latest/using-atom-basic-customization#cson
'.source.groovy':
'Flintbit Template':
'prefix': 'flintbit'
'body': """
// get input from REST JSON or XML
${1:variable_name1} = input.get("${2:name or json path}")
${3:variable_name2} = input.get("${4:name or json path}")
//Call a connector syncronously and set arguments
response = call.connector("${5:connector name}").set("key","value").sync()
// check response from connector. '0' means success
if response.exitcode == 0
// get connector output
out = response.get("output")
// set connector output in REST response
output.set("${6:connector_output}",out)
else
// log the error
log.error("Connector call failed")
// exit the flintbit
output.exit(1,"Connector call failed")
end
//Call another flintbit syncronously and set arguments
bit_response = call.bit("${7:example:hello.rb}").set("key","value").sync()
if bit_response.exitcode == 0
${8:name} = bit_response.get("name")
// set flintbit output in REST response
output.set("${8:name}",${8:name})
else
// log the error
log.error("Flintbit call failed")
// exit the flintbit
output.exit(1,"Flintbit call failed")
end
"""
'Get Input':
'prefix': 'input'
'body': '${1:variable} = input.get("${1:name or json path}")'
'Set Output':
'prefix': 'output'
'body': 'output.set("${1:name}",${1:value or variable})'
'info':
'prefix':'loginfo'
'body':'log.info("${1:Information Statement}")'
'error':
'prefix':'logerror'
'body':'log.error("${1:Error Statment}")'
'warn':
'prefix':'logwarn'
'body':'log.warn("${1:Warning Statment}")'
'debug':
'prefix':'logdebug'
'body':'log.debug("${1:Debug Statement}")'
'Call flintbit':
'prefix' : 'call-flintbit'
'body' : """
// Call flintbit synchronously and set arguments
${1:flintbit}_response = call.bit("${2:example:hello.rb}") // Provide path for flintbit
.set("${3:message}","${4:Welcome to Flint!}") // Set arguments
.sync() // To call flintbit asynchronously use .async() instead of .sync()
"""
'Get input using path':
'prefix' : 'input-path'
'body' : """
${1:name} = input.path("$.${1:name}") // It will return you values of parameters/elements depending on the JSON/XML
"""
'Get raw input':
'prefix' : 'input-raw'
'body' : """
${1:raw}_input = input.raw() // It returns a string representation of your JSON/XML input.
"""
'Set raw output':
'prefix' : 'output-raw'
'body' : """
output.setraw("${1:my-details}",'${2:{"name":"PI:NAME:<NAME>END_PI"}}') // Set JSON object in output
"""
'Get global config':
'prefix' : 'config-global'
'body' : '${1:path} = config.global("${2:config_name.path}") // It gives value for path field from config_name'
'Get local config':
'prefix' : 'config-local'
'body' : '${1:path} = config.local("${2:config_name.path}") // It gives value for path field from config_name'
'Parse json string':
'prefix' : 'parse-json'
'body' : """
name = util.json('${1:{"name":"PI:NAME:<NAME>END_PI"}}').get("${2:name}") // The value of JSON parameter name i.e PI:NAME:<NAME>END_PI will be retrieved.
"""
'Parse xml string':
'prefix' : 'parse-xml'
'body' : """
name = util.xml("${1:<info><name>PI:NAME:<NAME>END_PI</name></info>}").path("${2:/info/name/text()}") # The value of XML parameter name i.e PI:NAME:<NAME>END_PI will be retrieved.
"""
|
[
{
"context": "E OR OTHER DEALINGS IN\n# THE SOFTWARE.\n#\n# Author: Mark Lee\n\nclass BrowserAnalytics extends Analytics\n\n @def",
"end": 1141,
"score": 0.9998311996459961,
"start": 1133,
"tag": "NAME",
"value": "Mark Lee"
}
] | browser_stats.coffee | dreamboxlearning/google-analytics-formatter | 0 | ###!
Copyright (c) 2013 DreamBox Learning, Inc.
MIT License
###
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: Mark Lee
class BrowserAnalytics extends Analytics
@default_dimensions = [
'ga:year',
'ga:month',
'ga:browser',
'ga:browserVersion',
'ga:isMobile',
]
generate_browser_key: (browser, version) ->
switch browser
when 'Internet Explorer' then "IE #{version}"
when 'Mozilla'
if version is '11.0'
# Google Analytics is being stupid
# See: http://productforums.google.com/forum/#!msg/analytics/xepcZ5Ki5TQ/PUnE7PTviT8J
"IE #{version}"
else
browser
#when 'Firefox' then "#{browser} #{/^\d+\.\d/.exec(version)[0]}"
when 'Mozilla Compatible Agent', 'Safari (in-app)' then 'iPhone'
#when 'Safari'
# # see http://en.wikipedia.org/wiki/Safari_version_history
# bversion = switch version
# when '48' then '0.8'
# when '73' then '0.9'
# when '85', '85.8.3' then '1.0.x'
# when '100' then '1.1'
# when '125' then '1.2'
# when '312', '312.3', '312.5', '312.6' then '1.3.x'
# when '412', '416.11', '419.3' then '2.0.x'
# when '522.11', '522.11.3', '522.12', '522.12.1', '522.12.2', '522.13.1', '522.15.5', '523.10', '523.12.9', '523.13', '523.15' then '3.0.x'
# when '525.13', '525.17', '525.20', '525.21' then '3.1.x'
# when '525.26', '525.26.13', '525.27', '525.27.1', '525.28', '525.28.1', '525.29.1' then '3.2.x'
# when '526.11.2', '526.12.2', '528.1.1', '528.16', '528.17', '530.17', '530.18', '530.19', '530.19.1', '531.9', '531.9.1', '531.21.10', '531.22.7' then '4.0.x'
# when '533.16', '533.17.8' then '4.1.x or 5.0.x'
# #if os_name is 'Macintosh' # TODO and os_version is '10.4'
# # '4.1.x'
# #else
# # '5.0.x'
# when '533.18.5', '533.19.4', '533.20.7', '533.21.1', '533.22.3' then '5.0.x'
# when '534.48.3', '534.50', '534.51.22', '534.52.7' then '5.1.x'
# when '6533.18.5' then '5.0.x (iOS4.2)'
# when '7534.48.3' then '5.1.x (iOS5)'
# else version
# "#{browser} #{bversion}"
else browser
generate_browser_counts_from_data: (data, browser_counts) =>
browser_counts.by_browser = {} unless browser_counts.by_browser
browser_counts.monthly_totals = {} unless browser_counts.monthly_totals
browser_counts.time_slices = [] unless browser_counts.time_slices
for row in data
[year, month, browser, version, is_mobile, count] = row
year_month = "#{year}/#{month}"
if year_month not in browser_counts.time_slices
browser_counts.time_slices.push(year_month)
browser_counts.monthly_totals[year_month] = 0
device_type = if is_mobile == 'Yes' then 'mobile' else 'desktop'
browser_counts.by_browser[device_type] = {} if device_type not of browser_counts.by_browser
browser_key = this.generate_browser_key(browser, version)
browser_counts.by_browser[device_type][browser_key] = {} if browser_key not of browser_counts.by_browser[device_type]
browser_counts.by_browser[device_type][browser_key][year_month] = 0 if year_month not of browser_counts.by_browser[device_type][browser_key]
n_ct = Number(count)
browser_counts.by_browser[device_type][browser_key][year_month] += n_ct
browser_counts.monthly_totals[year_month] += n_ct
return browser_counts
| 118390 | ###!
Copyright (c) 2013 DreamBox Learning, Inc.
MIT License
###
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: <NAME>
class BrowserAnalytics extends Analytics
@default_dimensions = [
'ga:year',
'ga:month',
'ga:browser',
'ga:browserVersion',
'ga:isMobile',
]
generate_browser_key: (browser, version) ->
switch browser
when 'Internet Explorer' then "IE #{version}"
when 'Mozilla'
if version is '11.0'
# Google Analytics is being stupid
# See: http://productforums.google.com/forum/#!msg/analytics/xepcZ5Ki5TQ/PUnE7PTviT8J
"IE #{version}"
else
browser
#when 'Firefox' then "#{browser} #{/^\d+\.\d/.exec(version)[0]}"
when 'Mozilla Compatible Agent', 'Safari (in-app)' then 'iPhone'
#when 'Safari'
# # see http://en.wikipedia.org/wiki/Safari_version_history
# bversion = switch version
# when '48' then '0.8'
# when '73' then '0.9'
# when '85', '85.8.3' then '1.0.x'
# when '100' then '1.1'
# when '125' then '1.2'
# when '312', '312.3', '312.5', '312.6' then '1.3.x'
# when '412', '416.11', '419.3' then '2.0.x'
# when '522.11', '522.11.3', '522.12', '522.12.1', '522.12.2', '522.13.1', '522.15.5', '523.10', '523.12.9', '523.13', '523.15' then '3.0.x'
# when '525.13', '525.17', '525.20', '525.21' then '3.1.x'
# when '525.26', '525.26.13', '525.27', '525.27.1', '525.28', '525.28.1', '525.29.1' then '3.2.x'
# when '526.11.2', '526.12.2', '528.1.1', '528.16', '528.17', '530.17', '530.18', '530.19', '530.19.1', '531.9', '531.9.1', '531.21.10', '531.22.7' then '4.0.x'
# when '533.16', '533.17.8' then '4.1.x or 5.0.x'
# #if os_name is 'Macintosh' # TODO and os_version is '10.4'
# # '4.1.x'
# #else
# # '5.0.x'
# when '533.18.5', '533.19.4', '533.20.7', '533.21.1', '533.22.3' then '5.0.x'
# when '534.48.3', '534.50', '534.51.22', '534.52.7' then '5.1.x'
# when '6533.18.5' then '5.0.x (iOS4.2)'
# when '7534.48.3' then '5.1.x (iOS5)'
# else version
# "#{browser} #{bversion}"
else browser
generate_browser_counts_from_data: (data, browser_counts) =>
browser_counts.by_browser = {} unless browser_counts.by_browser
browser_counts.monthly_totals = {} unless browser_counts.monthly_totals
browser_counts.time_slices = [] unless browser_counts.time_slices
for row in data
[year, month, browser, version, is_mobile, count] = row
year_month = "#{year}/#{month}"
if year_month not in browser_counts.time_slices
browser_counts.time_slices.push(year_month)
browser_counts.monthly_totals[year_month] = 0
device_type = if is_mobile == 'Yes' then 'mobile' else 'desktop'
browser_counts.by_browser[device_type] = {} if device_type not of browser_counts.by_browser
browser_key = this.generate_browser_key(browser, version)
browser_counts.by_browser[device_type][browser_key] = {} if browser_key not of browser_counts.by_browser[device_type]
browser_counts.by_browser[device_type][browser_key][year_month] = 0 if year_month not of browser_counts.by_browser[device_type][browser_key]
n_ct = Number(count)
browser_counts.by_browser[device_type][browser_key][year_month] += n_ct
browser_counts.monthly_totals[year_month] += n_ct
return browser_counts
| true | ###!
Copyright (c) 2013 DreamBox Learning, Inc.
MIT License
###
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Author: PI:NAME:<NAME>END_PI
class BrowserAnalytics extends Analytics
@default_dimensions = [
'ga:year',
'ga:month',
'ga:browser',
'ga:browserVersion',
'ga:isMobile',
]
generate_browser_key: (browser, version) ->
switch browser
when 'Internet Explorer' then "IE #{version}"
when 'Mozilla'
if version is '11.0'
# Google Analytics is being stupid
# See: http://productforums.google.com/forum/#!msg/analytics/xepcZ5Ki5TQ/PUnE7PTviT8J
"IE #{version}"
else
browser
#when 'Firefox' then "#{browser} #{/^\d+\.\d/.exec(version)[0]}"
when 'Mozilla Compatible Agent', 'Safari (in-app)' then 'iPhone'
#when 'Safari'
# # see http://en.wikipedia.org/wiki/Safari_version_history
# bversion = switch version
# when '48' then '0.8'
# when '73' then '0.9'
# when '85', '85.8.3' then '1.0.x'
# when '100' then '1.1'
# when '125' then '1.2'
# when '312', '312.3', '312.5', '312.6' then '1.3.x'
# when '412', '416.11', '419.3' then '2.0.x'
# when '522.11', '522.11.3', '522.12', '522.12.1', '522.12.2', '522.13.1', '522.15.5', '523.10', '523.12.9', '523.13', '523.15' then '3.0.x'
# when '525.13', '525.17', '525.20', '525.21' then '3.1.x'
# when '525.26', '525.26.13', '525.27', '525.27.1', '525.28', '525.28.1', '525.29.1' then '3.2.x'
# when '526.11.2', '526.12.2', '528.1.1', '528.16', '528.17', '530.17', '530.18', '530.19', '530.19.1', '531.9', '531.9.1', '531.21.10', '531.22.7' then '4.0.x'
# when '533.16', '533.17.8' then '4.1.x or 5.0.x'
# #if os_name is 'Macintosh' # TODO and os_version is '10.4'
# # '4.1.x'
# #else
# # '5.0.x'
# when '533.18.5', '533.19.4', '533.20.7', '533.21.1', '533.22.3' then '5.0.x'
# when '534.48.3', '534.50', '534.51.22', '534.52.7' then '5.1.x'
# when '6533.18.5' then '5.0.x (iOS4.2)'
# when '7534.48.3' then '5.1.x (iOS5)'
# else version
# "#{browser} #{bversion}"
else browser
generate_browser_counts_from_data: (data, browser_counts) =>
browser_counts.by_browser = {} unless browser_counts.by_browser
browser_counts.monthly_totals = {} unless browser_counts.monthly_totals
browser_counts.time_slices = [] unless browser_counts.time_slices
for row in data
[year, month, browser, version, is_mobile, count] = row
year_month = "#{year}/#{month}"
if year_month not in browser_counts.time_slices
browser_counts.time_slices.push(year_month)
browser_counts.monthly_totals[year_month] = 0
device_type = if is_mobile == 'Yes' then 'mobile' else 'desktop'
browser_counts.by_browser[device_type] = {} if device_type not of browser_counts.by_browser
browser_key = this.generate_browser_key(browser, version)
browser_counts.by_browser[device_type][browser_key] = {} if browser_key not of browser_counts.by_browser[device_type]
browser_counts.by_browser[device_type][browser_key][year_month] = 0 if year_month not of browser_counts.by_browser[device_type][browser_key]
n_ct = Number(count)
browser_counts.by_browser[device_type][browser_key][year_month] += n_ct
browser_counts.monthly_totals[year_month] += n_ct
return browser_counts
|
[
{
"context": "\"https://#{host}.campfirenow.com/\"\n @auth = { 'username' : api_key, 'password' : 'X'}\n @headers = { 'C",
"end": 349,
"score": 0.9698989987373352,
"start": 341,
"tag": "USERNAME",
"value": "username"
},
{
"context": "irenow.com/\"\n @auth = { 'username' : ap... | spec/fixtures/campfire.coffee | masatake/CoffeeTags | 48 | # this example is not complete, but shows how to
# implement an API client using Request class
class Campfire
# @api_key - Campfire API keys
# @host - your campifre host, ie if you're using trololo.campfirenow.com,
# then host is 'trololo
constructor: (api_key, host) ->
@url = "https://#{host}.campfirenow.com/"
@auth = { 'username' : api_key, 'password' : 'X'}
@headers = { 'Content-Type' : 'application/json' }
# private function used for parsing JSON responses
handlers: (callbacks) ->
resp =
onSuccess : (response) ->
try
obj = JSON.parse(response.responseText)
catch error
console.dir(error)
callbacks.onFailure(error)
callbacks.onSuccess(obj)
onFailure: (response) ->
console.dir(response)
callbacks.onFailure(response)
# get list of rooms
rooms: (callbacks) ->
new Request(@url, @headers, @auth).get 'rooms.json', this.handlers(callbacks)
# get information about a room
# @id - room id
roomInfo: (id, callbacks) ->
new Request(@url, @headers, @auth).get "room/#{id}.json", this.handlers(callbacks)
# get latest messages and events from a room
# @id - room id
# @since - optional since id parameter
recent: (id, since, callbacks) ->
url = "room/#{id}/recent.json"
url += "?since_message_id=#{since}" if since
new Request(@url, @headers, @auth).get url, this.handlers(callbacks)
class Test
bump : ->
| 162201 | # this example is not complete, but shows how to
# implement an API client using Request class
class Campfire
# @api_key - Campfire API keys
# @host - your campifre host, ie if you're using trololo.campfirenow.com,
# then host is 'trololo
constructor: (api_key, host) ->
@url = "https://#{host}.campfirenow.com/"
@auth = { 'username' : api_key, '<PASSWORD>' : '<PASSWORD>'}
@headers = { 'Content-Type' : 'application/json' }
# private function used for parsing JSON responses
handlers: (callbacks) ->
resp =
onSuccess : (response) ->
try
obj = JSON.parse(response.responseText)
catch error
console.dir(error)
callbacks.onFailure(error)
callbacks.onSuccess(obj)
onFailure: (response) ->
console.dir(response)
callbacks.onFailure(response)
# get list of rooms
rooms: (callbacks) ->
new Request(@url, @headers, @auth).get 'rooms.json', this.handlers(callbacks)
# get information about a room
# @id - room id
roomInfo: (id, callbacks) ->
new Request(@url, @headers, @auth).get "room/#{id}.json", this.handlers(callbacks)
# get latest messages and events from a room
# @id - room id
# @since - optional since id parameter
recent: (id, since, callbacks) ->
url = "room/#{id}/recent.json"
url += "?since_message_id=#{since}" if since
new Request(@url, @headers, @auth).get url, this.handlers(callbacks)
class Test
bump : ->
| true | # this example is not complete, but shows how to
# implement an API client using Request class
class Campfire
# @api_key - Campfire API keys
# @host - your campifre host, ie if you're using trololo.campfirenow.com,
# then host is 'trololo
constructor: (api_key, host) ->
@url = "https://#{host}.campfirenow.com/"
@auth = { 'username' : api_key, 'PI:PASSWORD:<PASSWORD>END_PI' : 'PI:PASSWORD:<PASSWORD>END_PI'}
@headers = { 'Content-Type' : 'application/json' }
# private function used for parsing JSON responses
handlers: (callbacks) ->
resp =
onSuccess : (response) ->
try
obj = JSON.parse(response.responseText)
catch error
console.dir(error)
callbacks.onFailure(error)
callbacks.onSuccess(obj)
onFailure: (response) ->
console.dir(response)
callbacks.onFailure(response)
# get list of rooms
rooms: (callbacks) ->
new Request(@url, @headers, @auth).get 'rooms.json', this.handlers(callbacks)
# get information about a room
# @id - room id
roomInfo: (id, callbacks) ->
new Request(@url, @headers, @auth).get "room/#{id}.json", this.handlers(callbacks)
# get latest messages and events from a room
# @id - room id
# @since - optional since id parameter
recent: (id, since, callbacks) ->
url = "room/#{id}/recent.json"
url += "?since_message_id=#{since}" if since
new Request(@url, @headers, @auth).get url, this.handlers(callbacks)
class Test
bump : ->
|
[
{
"context": "sponseData =\n\t\t\tprojectId: @projectId,\n\t\t\tkeys: ['one', 'two', 'three']\n\n\tdescribe 'indexAll', ->\n\n\t\tbe",
"end": 1117,
"score": 0.8884860277175903,
"start": 1114,
"tag": "KEY",
"value": "one"
},
{
"context": "ata =\n\t\t\tprojectId: @projectId,\n\t\t\tkey... | test/UnitTests/coffee/References/ReferencesControllerTests.coffee | bowlofstew/web-sharelatex | 0 | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/References/ReferencesController"
MockRequest = require "../helpers/MockRequest"
MockResponse = require "../helpers/MockResponse"
describe "ReferencesController", ->
beforeEach ->
@projectId = '2222'
@controller = SandboxedModule.require modulePath, requires:
'logger-sharelatex': {
log: ->
err: ->
},
'settings-sharelatex': @settings = {
apis: {web: {url: 'http://some.url'}}
},
'./ReferencesHandler': @ReferencesHandler = {
index: sinon.stub()
indexAll: sinon.stub()
},
'../Editor/EditorRealTimeController': @EditorRealTimeController = {
emitToRoom: sinon.stub()
}
@req = new MockRequest()
@req.params.Project_id = @projectId
@req.body =
docIds: @docIds = ['aaa', 'bbb']
shouldBroadcast: false
@res = new MockResponse()
@res.json = sinon.stub()
@res.send = sinon.stub()
@res.sendStatus = sinon.stub()
@fakeResponseData =
projectId: @projectId,
keys: ['one', 'two', 'three']
describe 'indexAll', ->
beforeEach ->
@req.body = {shouldBroadcast: false}
@ReferencesHandler.indexAll.callsArgWith(1, null, @fakeResponseData)
@call = (callback) =>
@controller.indexAll @req, @res
callback()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
it 'should call ReferencesHandler.indexAll', (done) ->
@call () =>
@ReferencesHandler.indexAll.callCount.should.equal 1
@ReferencesHandler.indexAll.calledWith(@projectId).should.equal true
done()
describe 'when shouldBroadcast is true', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = true
it 'should call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 1
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'when shouldBroadcast is false', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = false
it 'should not call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'there is no dataaaaaaa', ->
beforeEach ->
@ReferencesHandler.indexAll.callsArgWith(1)
@call = (callback) =>
@controller.indexAll @req, @res
callback()
it 'should not call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should send a response with an empty keys list', (done) ->
@call () =>
@res.json.called.should.equal true
@res.json.calledWith({projectId: @projectId, keys: []}).should.equal true
done()
describe 'index', ->
describe 'with docIds as an array and shouldBroadcast as false', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@call = (callback) =>
@controller.index @req, @res
callback()
it 'should call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 1
@ReferencesHandler.index.calledWith(@projectId, @docIds).should.equal true
done()
it 'should return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should not call EditorRealTimController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
describe 'when ReferencesHandler.index produces an error', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, new Error('woops'), null)
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(500).should.equal true
done()
describe 'when shouldBroadcast is true', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = true
it 'should call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 1
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'with missing docIds', ->
beforeEach ->
delete @req.body.docIds
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(400).should.equal true
done()
it 'should not call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 0
done()
describe 'with invalid docIds', ->
beforeEach ->
@req.body.docIds = 42
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(400).should.equal true
done()
it 'should not call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 0
done()
| 6417 | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/References/ReferencesController"
MockRequest = require "../helpers/MockRequest"
MockResponse = require "../helpers/MockResponse"
describe "ReferencesController", ->
beforeEach ->
@projectId = '2222'
@controller = SandboxedModule.require modulePath, requires:
'logger-sharelatex': {
log: ->
err: ->
},
'settings-sharelatex': @settings = {
apis: {web: {url: 'http://some.url'}}
},
'./ReferencesHandler': @ReferencesHandler = {
index: sinon.stub()
indexAll: sinon.stub()
},
'../Editor/EditorRealTimeController': @EditorRealTimeController = {
emitToRoom: sinon.stub()
}
@req = new MockRequest()
@req.params.Project_id = @projectId
@req.body =
docIds: @docIds = ['aaa', 'bbb']
shouldBroadcast: false
@res = new MockResponse()
@res.json = sinon.stub()
@res.send = sinon.stub()
@res.sendStatus = sinon.stub()
@fakeResponseData =
projectId: @projectId,
keys: ['<KEY>', '<KEY>', '<KEY>']
describe 'indexAll', ->
beforeEach ->
@req.body = {shouldBroadcast: false}
@ReferencesHandler.indexAll.callsArgWith(1, null, @fakeResponseData)
@call = (callback) =>
@controller.indexAll @req, @res
callback()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
it 'should call ReferencesHandler.indexAll', (done) ->
@call () =>
@ReferencesHandler.indexAll.callCount.should.equal 1
@ReferencesHandler.indexAll.calledWith(@projectId).should.equal true
done()
describe 'when shouldBroadcast is true', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = true
it 'should call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 1
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'when shouldBroadcast is false', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = false
it 'should not call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'there is no dataaaaaaa', ->
beforeEach ->
@ReferencesHandler.indexAll.callsArgWith(1)
@call = (callback) =>
@controller.indexAll @req, @res
callback()
it 'should not call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should send a response with an empty keys list', (done) ->
@call () =>
@res.json.called.should.equal true
@res.json.calledWith({projectId: @projectId, keys: []}).should.equal true
done()
describe 'index', ->
describe 'with docIds as an array and shouldBroadcast as false', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@call = (callback) =>
@controller.index @req, @res
callback()
it 'should call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 1
@ReferencesHandler.index.calledWith(@projectId, @docIds).should.equal true
done()
it 'should return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should not call EditorRealTimController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
describe 'when ReferencesHandler.index produces an error', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, new Error('woops'), null)
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(500).should.equal true
done()
describe 'when shouldBroadcast is true', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = true
it 'should call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 1
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'with missing docIds', ->
beforeEach ->
delete @req.body.docIds
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(400).should.equal true
done()
it 'should not call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 0
done()
describe 'with invalid docIds', ->
beforeEach ->
@req.body.docIds = 42
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(400).should.equal true
done()
it 'should not call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 0
done()
| true | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/References/ReferencesController"
MockRequest = require "../helpers/MockRequest"
MockResponse = require "../helpers/MockResponse"
describe "ReferencesController", ->
beforeEach ->
@projectId = '2222'
@controller = SandboxedModule.require modulePath, requires:
'logger-sharelatex': {
log: ->
err: ->
},
'settings-sharelatex': @settings = {
apis: {web: {url: 'http://some.url'}}
},
'./ReferencesHandler': @ReferencesHandler = {
index: sinon.stub()
indexAll: sinon.stub()
},
'../Editor/EditorRealTimeController': @EditorRealTimeController = {
emitToRoom: sinon.stub()
}
@req = new MockRequest()
@req.params.Project_id = @projectId
@req.body =
docIds: @docIds = ['aaa', 'bbb']
shouldBroadcast: false
@res = new MockResponse()
@res.json = sinon.stub()
@res.send = sinon.stub()
@res.sendStatus = sinon.stub()
@fakeResponseData =
projectId: @projectId,
keys: ['PI:KEY:<KEY>END_PI', 'PI:KEY:<KEY>END_PI', 'PI:KEY:<KEY>END_PI']
describe 'indexAll', ->
beforeEach ->
@req.body = {shouldBroadcast: false}
@ReferencesHandler.indexAll.callsArgWith(1, null, @fakeResponseData)
@call = (callback) =>
@controller.indexAll @req, @res
callback()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
it 'should call ReferencesHandler.indexAll', (done) ->
@call () =>
@ReferencesHandler.indexAll.callCount.should.equal 1
@ReferencesHandler.indexAll.calledWith(@projectId).should.equal true
done()
describe 'when shouldBroadcast is true', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = true
it 'should call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 1
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'when shouldBroadcast is false', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = false
it 'should not call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'there is no dataaaaaaa', ->
beforeEach ->
@ReferencesHandler.indexAll.callsArgWith(1)
@call = (callback) =>
@controller.indexAll @req, @res
callback()
it 'should not call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should send a response with an empty keys list', (done) ->
@call () =>
@res.json.called.should.equal true
@res.json.calledWith({projectId: @projectId, keys: []}).should.equal true
done()
describe 'index', ->
describe 'with docIds as an array and shouldBroadcast as false', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@call = (callback) =>
@controller.index @req, @res
callback()
it 'should call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 1
@ReferencesHandler.index.calledWith(@projectId, @docIds).should.equal true
done()
it 'should return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should not call EditorRealTimController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 0
done()
describe 'when ReferencesHandler.index produces an error', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, new Error('woops'), null)
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(500).should.equal true
done()
describe 'when shouldBroadcast is true', ->
beforeEach ->
@ReferencesHandler.index.callsArgWith(2, null, @fakeResponseData)
@req.body.shouldBroadcast = true
it 'should call EditorRealTimeController.emitToRoom', (done) ->
@call () =>
@EditorRealTimeController.emitToRoom.callCount.should.equal 1
done()
it 'should not produce an error', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 0
@res.sendStatus.calledWith(500).should.equal false
@res.sendStatus.calledWith(400).should.equal false
done()
it 'should still return data', (done) ->
@call () =>
@res.json.callCount.should.equal 1
@res.json.calledWith(@fakeResponseData).should.equal true
done()
describe 'with missing docIds', ->
beforeEach ->
delete @req.body.docIds
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(400).should.equal true
done()
it 'should not call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 0
done()
describe 'with invalid docIds', ->
beforeEach ->
@req.body.docIds = 42
it 'should produce an error response', (done) ->
@call () =>
@res.sendStatus.callCount.should.equal 1
@res.sendStatus.calledWith(400).should.equal true
done()
it 'should not call ReferencesHandler.index', (done) ->
@call () =>
@ReferencesHandler.index.callCount.should.equal 0
done()
|
[
{
"context": " undefined\n query:\n token: 'an-order-token'\n params:\n id: 'or",
"end": 663,
"score": 0.4793204069137573,
"start": 663,
"tag": "KEY",
"value": ""
}
] | apps/order/test/routes.coffee | xtina-starr/microgravity | 1 | _ = require 'underscore'
sinon = require 'sinon'
Backbone = require 'backbone'
routes = require '../routes'
CurrentUser = require '../../../models/current_user.coffee'
{ fabricate } = require 'antigravity'
describe 'Order routes', ->
beforeEach ->
@req = { params: {}, user: new CurrentUser fabricate 'user' }
@res = { render: sinon.stub(), redirect: sinon.stub(), locals: { sd: { ARTSY_URL: 'https://artsy.net' } } }
sinon.stub Backbone, 'sync'
afterEach ->
Backbone.sync.restore()
describe '#resume', ->
describe 'logged out', ->
beforeEach ->
@req =
user: undefined
query:
token: 'an-order-token'
params:
id: 'order-id'
routes.resume @req, @res
it 'redirects to the order page', ->
@res.redirect.args[0][0].should.containEql '/order/order-id/resume?token=an-order-token&stop_microgravity_redirect=true'
describe 'logged in', ->
beforeEach ->
@req.query = token: 'an-order-token'
@req.params = id: 'order-id'
routes.resume @req, @res
it 'redirects to the order page', ->
@res.redirect.args[0][0].should.containEql '/order/order-id/resume?token=an-order-token&stop_microgravity_redirect=true'
| 172525 | _ = require 'underscore'
sinon = require 'sinon'
Backbone = require 'backbone'
routes = require '../routes'
CurrentUser = require '../../../models/current_user.coffee'
{ fabricate } = require 'antigravity'
describe 'Order routes', ->
beforeEach ->
@req = { params: {}, user: new CurrentUser fabricate 'user' }
@res = { render: sinon.stub(), redirect: sinon.stub(), locals: { sd: { ARTSY_URL: 'https://artsy.net' } } }
sinon.stub Backbone, 'sync'
afterEach ->
Backbone.sync.restore()
describe '#resume', ->
describe 'logged out', ->
beforeEach ->
@req =
user: undefined
query:
token: 'an<KEY>-order-token'
params:
id: 'order-id'
routes.resume @req, @res
it 'redirects to the order page', ->
@res.redirect.args[0][0].should.containEql '/order/order-id/resume?token=an-order-token&stop_microgravity_redirect=true'
describe 'logged in', ->
beforeEach ->
@req.query = token: 'an-order-token'
@req.params = id: 'order-id'
routes.resume @req, @res
it 'redirects to the order page', ->
@res.redirect.args[0][0].should.containEql '/order/order-id/resume?token=an-order-token&stop_microgravity_redirect=true'
| true | _ = require 'underscore'
sinon = require 'sinon'
Backbone = require 'backbone'
routes = require '../routes'
CurrentUser = require '../../../models/current_user.coffee'
{ fabricate } = require 'antigravity'
describe 'Order routes', ->
beforeEach ->
@req = { params: {}, user: new CurrentUser fabricate 'user' }
@res = { render: sinon.stub(), redirect: sinon.stub(), locals: { sd: { ARTSY_URL: 'https://artsy.net' } } }
sinon.stub Backbone, 'sync'
afterEach ->
Backbone.sync.restore()
describe '#resume', ->
describe 'logged out', ->
beforeEach ->
@req =
user: undefined
query:
token: 'anPI:KEY:<KEY>END_PI-order-token'
params:
id: 'order-id'
routes.resume @req, @res
it 'redirects to the order page', ->
@res.redirect.args[0][0].should.containEql '/order/order-id/resume?token=an-order-token&stop_microgravity_redirect=true'
describe 'logged in', ->
beforeEach ->
@req.query = token: 'an-order-token'
@req.params = id: 'order-id'
routes.resume @req, @res
it 'redirects to the order page', ->
@res.redirect.args[0][0].should.containEql '/order/order-id/resume?token=an-order-token&stop_microgravity_redirect=true'
|
[
{
"context": "viceKey: ca.privateKey()\n serviceKeyPassword: ca.passphrase\n serviceCertificate: ca.crt()\n createCert",
"end": 1195,
"score": 0.9406414031982422,
"start": 1182,
"tag": "PASSWORD",
"value": "ca.passphrase"
}
] | backend/api/models/Cert.coffee | twhtanghk/ca | 0 | _ = require 'lodash'
Promise = require 'bluebird'
{createPrivateKeyAsync, createCSRAsync, createCertificateAsync, readCertificateInfoAsync, getPublicKeyAsync, readCertificateInfoAsync} = Promise.promisifyAll require 'pem'
module.exports =
tableName: 'cert'
schema: true
attributes:
id:
type: 'number'
autoIncrement: true
key:
type: 'string'
crt:
type: 'string'
dtStart:
type: 'ref'
columnType: 'datetime'
dtEnd:
type: 'ref'
columnType: 'datetime'
createdBy:
model: 'user'
required: true
revokedAt:
type: 'ref'
columnType: 'datetime'
revokedReason:
type: 'string'
publicKey: (cert) ->
getPublicKeyAsync cert.crt
.then (res) ->
res.publicKey
info: (cert) ->
readCertificateInfoAsync cert.crt
createPrivateKey: (opts) ->
createPrivateKeyAsync()
.then (res) ->
res.key
createCSR: (opts) ->
createCSRAsync opts
.then (res) ->
res.csr
createCert: (opts) ->
ca = sails.config.ca
opts = _.defaults {}, opts,
selfSigned: false
serviceKey: ca.privateKey()
serviceKeyPassword: ca.passphrase
serviceCertificate: ca.crt()
createCertificateAsync opts
.then ({certificate}) ->
certificate
beforeCreate: (values, cb) ->
sails.models.cert
.findValidOne values.createdBy
.then (crt) ->
if crt == null
return
Promise.reject new Error "valid certificate exists"
.then ->
sails.models.cert.createPrivateKey()
.then (key) ->
values.key = key
.then ->
sails.models.cert.createCSR
clientkey: values.key
commonName: values.createdBy
.then (csr) ->
sails.models.cert.createCert csr: values.csr
.then (crt) ->
values.crt = crt
readCertificateInfoAsync crt
.then (info) ->
sails.log.info info
values.dtStart = new Date info.validity.start
values.dtEnd = new Date info.validity.end
.then ->
cb()
.catch cb
findValidOne: (email, asAt = new Date()) ->
sails.models.cert
.find()
.where
createdBy: email
revokedAt: null
revokedReason: ''
dtStart:
'<=': asAt
dtEnd:
'>=': asAt
.then (certs) ->
certs[0] || null
| 97434 | _ = require 'lodash'
Promise = require 'bluebird'
{createPrivateKeyAsync, createCSRAsync, createCertificateAsync, readCertificateInfoAsync, getPublicKeyAsync, readCertificateInfoAsync} = Promise.promisifyAll require 'pem'
module.exports =
tableName: 'cert'
schema: true
attributes:
id:
type: 'number'
autoIncrement: true
key:
type: 'string'
crt:
type: 'string'
dtStart:
type: 'ref'
columnType: 'datetime'
dtEnd:
type: 'ref'
columnType: 'datetime'
createdBy:
model: 'user'
required: true
revokedAt:
type: 'ref'
columnType: 'datetime'
revokedReason:
type: 'string'
publicKey: (cert) ->
getPublicKeyAsync cert.crt
.then (res) ->
res.publicKey
info: (cert) ->
readCertificateInfoAsync cert.crt
createPrivateKey: (opts) ->
createPrivateKeyAsync()
.then (res) ->
res.key
createCSR: (opts) ->
createCSRAsync opts
.then (res) ->
res.csr
createCert: (opts) ->
ca = sails.config.ca
opts = _.defaults {}, opts,
selfSigned: false
serviceKey: ca.privateKey()
serviceKeyPassword: <PASSWORD>
serviceCertificate: ca.crt()
createCertificateAsync opts
.then ({certificate}) ->
certificate
beforeCreate: (values, cb) ->
sails.models.cert
.findValidOne values.createdBy
.then (crt) ->
if crt == null
return
Promise.reject new Error "valid certificate exists"
.then ->
sails.models.cert.createPrivateKey()
.then (key) ->
values.key = key
.then ->
sails.models.cert.createCSR
clientkey: values.key
commonName: values.createdBy
.then (csr) ->
sails.models.cert.createCert csr: values.csr
.then (crt) ->
values.crt = crt
readCertificateInfoAsync crt
.then (info) ->
sails.log.info info
values.dtStart = new Date info.validity.start
values.dtEnd = new Date info.validity.end
.then ->
cb()
.catch cb
findValidOne: (email, asAt = new Date()) ->
sails.models.cert
.find()
.where
createdBy: email
revokedAt: null
revokedReason: ''
dtStart:
'<=': asAt
dtEnd:
'>=': asAt
.then (certs) ->
certs[0] || null
| true | _ = require 'lodash'
Promise = require 'bluebird'
{createPrivateKeyAsync, createCSRAsync, createCertificateAsync, readCertificateInfoAsync, getPublicKeyAsync, readCertificateInfoAsync} = Promise.promisifyAll require 'pem'
module.exports =
tableName: 'cert'
schema: true
attributes:
id:
type: 'number'
autoIncrement: true
key:
type: 'string'
crt:
type: 'string'
dtStart:
type: 'ref'
columnType: 'datetime'
dtEnd:
type: 'ref'
columnType: 'datetime'
createdBy:
model: 'user'
required: true
revokedAt:
type: 'ref'
columnType: 'datetime'
revokedReason:
type: 'string'
publicKey: (cert) ->
getPublicKeyAsync cert.crt
.then (res) ->
res.publicKey
info: (cert) ->
readCertificateInfoAsync cert.crt
createPrivateKey: (opts) ->
createPrivateKeyAsync()
.then (res) ->
res.key
createCSR: (opts) ->
createCSRAsync opts
.then (res) ->
res.csr
createCert: (opts) ->
ca = sails.config.ca
opts = _.defaults {}, opts,
selfSigned: false
serviceKey: ca.privateKey()
serviceKeyPassword: PI:PASSWORD:<PASSWORD>END_PI
serviceCertificate: ca.crt()
createCertificateAsync opts
.then ({certificate}) ->
certificate
beforeCreate: (values, cb) ->
sails.models.cert
.findValidOne values.createdBy
.then (crt) ->
if crt == null
return
Promise.reject new Error "valid certificate exists"
.then ->
sails.models.cert.createPrivateKey()
.then (key) ->
values.key = key
.then ->
sails.models.cert.createCSR
clientkey: values.key
commonName: values.createdBy
.then (csr) ->
sails.models.cert.createCert csr: values.csr
.then (crt) ->
values.crt = crt
readCertificateInfoAsync crt
.then (info) ->
sails.log.info info
values.dtStart = new Date info.validity.start
values.dtEnd = new Date info.validity.end
.then ->
cb()
.catch cb
findValidOne: (email, asAt = new Date()) ->
sails.models.cert
.find()
.where
createdBy: email
revokedAt: null
revokedReason: ''
dtStart:
'<=': asAt
dtEnd:
'>=': asAt
.then (certs) ->
certs[0] || null
|
[
{
"context": "eIndex({ roleName: 1 }, { unique: true })\r\n@author Nathan Klick\r\n@copyright QRef 2012\r\n@abstract\r\n###\r\nclass Role",
"end": 340,
"score": 0.999855637550354,
"start": 328,
"tag": "NAME",
"value": "Nathan Klick"
}
] | Workspace/QRef/NodeServer/src/schema/RoleSchema.coffee | qrefdev/qref | 0 | mongoose = require('mongoose')
Schema = mongoose.Schema
ObjectId = Schema.ObjectId
###
Schema representing a system-wide role used to control access levels of individual users.
@example MongoDB Collection
db.user.roles
@example MongoDB Indexes
db.user.roles.ensureIndex({ roleName: 1 }, { unique: true })
@author Nathan Klick
@copyright QRef 2012
@abstract
###
class RoleSchemaInternal
###
@property [String] (Required) A unique name for the specific role.
###
roleName:
type: String
required: true
unique: true
###
@property [String] (Optional) A detailed description of this role.
###
description:
type: String
required: false
RoleSchema = new Schema(new RoleSchemaInternal())
module.exports = RoleSchema | 132920 | mongoose = require('mongoose')
Schema = mongoose.Schema
ObjectId = Schema.ObjectId
###
Schema representing a system-wide role used to control access levels of individual users.
@example MongoDB Collection
db.user.roles
@example MongoDB Indexes
db.user.roles.ensureIndex({ roleName: 1 }, { unique: true })
@author <NAME>
@copyright QRef 2012
@abstract
###
class RoleSchemaInternal
###
@property [String] (Required) A unique name for the specific role.
###
roleName:
type: String
required: true
unique: true
###
@property [String] (Optional) A detailed description of this role.
###
description:
type: String
required: false
RoleSchema = new Schema(new RoleSchemaInternal())
module.exports = RoleSchema | true | mongoose = require('mongoose')
Schema = mongoose.Schema
ObjectId = Schema.ObjectId
###
Schema representing a system-wide role used to control access levels of individual users.
@example MongoDB Collection
db.user.roles
@example MongoDB Indexes
db.user.roles.ensureIndex({ roleName: 1 }, { unique: true })
@author PI:NAME:<NAME>END_PI
@copyright QRef 2012
@abstract
###
class RoleSchemaInternal
###
@property [String] (Required) A unique name for the specific role.
###
roleName:
type: String
required: true
unique: true
###
@property [String] (Optional) A detailed description of this role.
###
description:
type: String
required: false
RoleSchema = new Schema(new RoleSchemaInternal())
module.exports = RoleSchema |
[
{
"context": "ome-serial-number\"\n token:\"some-valid-token\"\n ###\n @act",
"end": 14058,
"score": 0.6819800138473511,
"start": 14042,
"tag": "PASSWORD",
"value": "some-valid-token"
}
] | src/stormbolt.coffee | saintkepha/stormbolt | 0 | StormAgent = require 'stormagent'
StormData = StormAgent.StormData
# XXX - for now, only representing the server-side... will refactor for client-side later
class BoltStream extends StormData
async = require('async')
MuxDemux = require('mux-demux')
url = require('url')
http = require("http")
constructor: (@id, @stream, @config) ->
@ready = false
@capability = []
@monitoring = false
@stream.setKeepAlive(true, 60 * 1000) #Send keep-alive every 60 seconds
#@stream.setEncoding 'utf8'
@stream.on 'error', (err) =>
@log "issue with underlying bolt stream...", err
@destroy()
@emit 'error', err
@stream.on 'close', =>
@log "bolt stream closed for #{@id}"
@destroy()
@emit 'close'
@stream.pipe(@mux = MuxDemux()).pipe(@stream)
cstream = @mux.createReadStream 'capability'
cstream.on 'data', (capa) =>
@log "received capability info from peer: #{capa}"
@capability = capa.split(',').map (entry) -> (Number) entry
@emit 'capability', capa
unless @ready
@ready = true
@emit 'ready'
@mux.on 'error', (err) =>
@log "issue with bolt mux channel...", err
@destroy()
@emit 'error', err
@mux.on 'connection', @handleAction
super @id,
cname: @id
remote: @stream.remoteAddress
handleAction: (_stream) =>
[ action, target ] = _stream.meta.split(':')
@log "bolt-mux-connection: action=#{action} target=#{target}"
_stream.on 'error', (err) =>
@log "bolt-mux-connection: mux stream #{_stream.meta} encountered error:"+err
_forwardingPorts = @config?.allowedPorts or []
switch action
when 'capability'
@log 'sending capability information...'
_stream.write _forwardingPorts.join(',') if _forwardingPorts? and _forwardingPorts instanceof Array
_stream.end()
when 'beacon'
[ bsent, breply ] = [ 0 , 0 ]
_stream.on 'data', (data) =>
breply++
@log "received beacon reply: #{data}"
@log 'sending beacons...'
async.whilst(
() => # test to make sure deviation between sent and received does not exceed beaconRetry
bsent - breply < @config?.beaconRetry
(repeat) => # send some beacons
@log "sending beacon..."
_stream.write "Beacon"
bsent++
setTimeout(repeat, @config?.beaconInterval * 1000)
(err) => # finally
err ?= "beacon retry timeout, server no longer responding"
@log "final call on sending beacons, exiting with: " + (err ? "no errors")
@destroy()
)
when 'relay'
target = (Number) target
unless target in _forwardingPorts
@log "request for relay to unsupported target port: #{target}"
_stream.end()
break
incoming = ''
request = null
_stream.on 'data', (chunk) =>
unless request
try
@log "request received: "+chunk
request = JSON.parse chunk
catch err
@log "invalid relay request!"
_stream.end()
else
@log "received some data: "+chunk
incoming += chunk
_stream.on 'end', =>
@log "relaying following request to localhost:#{target} - ", request
if typeof request.url is 'object'
roptions = url.format request.url
else
roptions = url.parse request.url
roptions.method = request.method
# hard coded the header option..
roptions.headers =
'Content-Type':'application/json'
roptions.agent = false
roptions.port ?= target
#@log JSON.stringify roptions
timeout = false
relay = http.request roptions, (reply) =>
unless timeout
@log "sending back reply"
reply.setEncoding 'utf8'
try
_stream.write JSON.stringify
statusCode: reply.statusCode,
headers: reply.headers
reply.pipe(_stream, {end:true})
catch err
@log "unable to write response back to requestor upstream bolt! error: " + err
relay.write JSON.stringify request.data if request.data?
relay.end()
relay.on 'end', =>
@log "no more data"
relay.setTimeout 20000, =>
@log "error during performing relay action! request timedout."
timeout = true
try
_stream.write JSON.stringify
statusCode: 408,
headers: null
_stream.end()
catch err
@log "unable to write response code back to requestor upstream bolt! error: " + err
@log "[relay request timed out, sending 408]"
relay.on 'error', (err) =>
@log "[relay request failed with following error]"
@log err
try
_stream.write JSON.stringify
statusCode: 500,
headers: null
_stream.end()
catch err
@log "unable to write response code back to requestor upstream bolt! error: " + err
@log "[relay request error, sending 500]"
else
@log "unsupported action/target supplied by mux connection: #{action}/#{target}"
_stream.end()
monitor: (interval, period) ->
return if @monitoring
@monitoring = true
validity = period
# setup the beacon channel with the peer and start collecting beacons
bstream = @mux.createStream 'beacon', { allowHalfOpen:true }
bstream.on 'data', (beacon) =>
@log "monitor - received beacon from client: #{@id}"
bstream.write "beacon:reply"
@emit 'beacon', beacon
validity = period # reset
# start the validity count-down...
async.whilst(
() => # test condition
validity > 0 and @monitoring and @ready
(repeat) =>
validity -= interval / 1000
@log "monitor - #{@id} has validity=#{validity}"
setTimeout repeat, interval
(err) =>
@log "monitor - #{@id} has expired and being destroyed..."
@destroy()
@emit 'expired'
@monitoring = false
)
relay: (request,response) ->
unless @ready
throw new Error "cannot relay to unready boltstream..."
@log "relay - forwarding request to #{@id} at #{@stream.remoteAddress} for #{request.url}"
unless request.target in @capability
throw new Error "unable to forward request to #{@id} for unsupported port: #{request.target}"
relay = @mux.createStream("relay:#{request.target}", {allowHalfOpen:true})
unless request.url
@log "no request.url is set!"
request.url = '/'
if typeof request.url is 'string'
url = require('url').parse request.url
url.pathname = '/'+url.pathname unless /^\//.test url.pathname
url.path = '/'+url.path unless /^\//.test url.pathname
request.url = require('url').format url
# always start by writing the preamble message to the other end
relay.write JSON.stringify
method: request.method
url: request.url
port: request.port
data: request.data
request.on 'error', (err) =>
@log "error relaying request via boltstream...", err
relay.destroy()
relay.on 'error', (err) ->
@log "error during relay multiplexing boltstream...", err
#request.pipe(relay)
relay.end()
# always get the reply preamble message from the other end
reply =
header: null
body: ''
relay.on 'data', (chunk) =>
try
unless reply.header
reply.header = JSON.parse chunk
# pipe relay into response if response stream is provided
if response? and response.writeHead?
response.writeHead reply.header.statusCode, reply.header.headers
relay.pipe(response)
else
unless response?
reply.body+=chunk
catch err
@log "invalid relay response received from #{@id}:", err
relay.end()
relay.on 'end', =>
relay.emit 'reply', reply
return relay
destroy: ->
try
@ready = @monitoring = false
@mux.destroy()
@stream.destroy()
catch err
@log "unable to properly terminate bolt stream: #{@id}", err
StormRegistry = StormAgent.StormRegistry
class BoltRegistry extends StormRegistry
constructor: (filename) ->
@on 'removed', (bolt) ->
bolt.destroy() if bolt?
super filename
get: (key) ->
entry = super key
return unless entry?
cname: key
ports: entry.capability
address: entry.data.remote if entry.data?
validity: entry.validity
#-----------------------------------------------------------------
class StormBolt extends StormAgent
validate = require('json-schema').validate
tls = require("tls")
fs = require("fs")
http = require("http")
url = require('url')
MuxDemux = require('mux-demux')
async = require('async')
extend = require('util')._extend
schema =
name: "storm"
type: "object"
additionalProperties: true
properties:
cert: { type: "any", required: true }
key: { type: "any", required: true }
ca: { type: "any", required: true }
uplinks: { type: "array" }
uplinkStrategy: { type: "string" }
allowRelay: { type: "boolean" }
relayPort: { type: "integer" }
allowedPorts: { type: "array" }
listenPort: { type: "integer" }
beaconValidity: { type: "integer" }
beaconInterval: { type: "integer" }
beaconRetry: { type: "integer" }
constructor: (config) ->
super config
# key routine to import itself into agent base
@import module
@repeatInterval = 5 # in seconds
@clients = new BoltRegistry
@state.haveCredentials = false
if @config.insecure
#Workaround - fix it later, Avoids DEPTH_ZERO_SELF_SIGNED_CERT error for self-signed certs
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"
status: ->
state = super
state.uplink = @uplink ? null
state.clients = @clients.list()
state
run: (config) ->
# first try using the passed in config, get it validated and start the underlying agent
super config, schema
async.until(
() => # test condition
@state.haveCredentials
(repeat) => # repeat function
try
@log 'run - validating security credentials...'
unless @config.cert instanceof Buffer
@config.cert = fs.readFileSync "#{@config.cert}",'utf8'
unless @config.key instanceof Buffer
@config.key = fs.readFileSync "#{@config.key}",'utf8'
unless @config.ca instanceof Buffer
ca = []
chain = fs.readFileSync "#{@config.ca}", 'utf8'
chain = chain.split "\n"
cacert = []
for line in chain when line.length isnt 0
cacert.push line
if line.match /-END CERTIFICATE-/
ca.push cacert.join "\n"
cacert = []
@config.ca = ca
# if we get here, we've got something
@state.haveCredentials = true
repeat()
catch err
@log "run - missing proper security credentials, attempting to self-configure..."
storm = null
### uncomment during dev/testing
storm =
tracker: "https://stormtracker.dev.intercloud.net"
skey: "some-serial-number"
token:"some-valid-token"
###
@activate storm, (storm) =>
# first, validate whether the storm config is proper
if @validate storm.bolt, schema
@config = extend @config, storm.bolt
repeat()
else
@log "invalid 'storm.bolt' configuration retrieved during activation! (retry in 30 seconds)"
@state.activated = false
setTimeout repeat, 30000
(err) =>
if err? and err instanceof Error
@log "FATAL ERROR during stormbolt.run!"
return throw err
# here we start the main run logic for stormbolt
# check for bolt server config
if @config.listenPort? and @config.listenPort > 0
server = @listen @config.listenPort,
key: @config.key
cert: @config.cert
ca: @config.ca
requestCert: true
rejectUnauthorized: true, (bolt) =>
bolt.once 'ready', =>
# starts the bolt self-monitoring and initiates beacons request
bolt.monitor @config.repeatdelay, @config.beaconValidity
# after initialization complete, THEN we add to our clients!
@clients.add bolt.id, bolt
# we register for bolt close/error event only after it's ready and added...
bolt.on 'close', (err) =>
@log "bolt.close on #{bolt.id}:",err
@clients.remove bolt.id
bolt.on 'error', (err) =>
@log "bolt.error on #{bolt.id}:",err
@clients.remove bolt.id
server.on 'error', (err) =>
@log "fatal issue with bolt server: "+err
@clients.running = false
@emit 'server.error', err
# start client connection expiry checker
#
# XXX - this is no longer needed since each BoltStream self monitors!
#@clients.expires @config.repeatdelay
# check for client uplink to bolt server
if @config.uplinks? and @config.uplinks.length > 0
[ i, retries ] = [ 0, 0 ]
@connected = false
async.forever(
(next) =>
next new Error "retry max exceeded, unable to establish bolt server connection" if retries > 30
async.until(
() =>
@connected
(repeat) =>
uplink = @config.uplinks[i++]
[ host, port ] = uplink.split(':')
port ?= 443 # default port to try
@connect host,port,
key: @config.key
cert: @config.cert
ca: @config.ca
requestCert: true, (bolt) =>
unless bolt instanceof Error
bolt.once 'ready', =>
@connected = true
retries = 0
bolt.once 'close', (err) =>
@log "bolt.error on #{bolt.id}:",err
@connected = false
bolt.once 'error', (err) =>
@log "bolt.error on #{bolt.id}:",err
@connected = false
i = 0 unless i < @config.uplinks.length
setTimeout(repeat, 5000)
(err) =>
setTimeout(next, 5000)
)
(err) =>
@emit 'error', err if err?
)
# check for running the relay proxy
@proxy(@config.relayPort) if @config.allowRelay
)
# register one-time event handler for the overall agent... NOT SURE IF NEEDED!
@once "error", (err) =>
@log "run - bolt fizzled... should do something smart here"
proxy: (port) ->
unless port? and port > 0
@log "need to pass in valid port for performing relay"
return
@log 'starting the proxy relay on port ' + port
# after initial data, invoke HTTP server listener on port
acceptor = http.createServer().listen(port)
acceptor.on "request", (request,response) =>
target = request.headers['stormbolt-target']
[ cname, port ] = target.split(':') if target
entry = @clients.entries[cname]
unless entry and port in entry.capability
error = "stormfbolt-target [#{target}] cannot be reached!"
@log "error:", error
response.writeHead(404, {
'Content-Length': error.length,
'Content-Type': 'application/json',
'Connection': 'close' })
response.end(error,"utf8")
return
@log "[proxy] forwarding request to #{cname} #{entry.stream.remoteAddress}"
request.target = port
entry.relay request, response
# Method to start bolt server
listen: (port, options, callback) ->
@log "server port:" + port
#@log "options: " + @inspect options
server = tls.createServer options, (stream) =>
stream.on 'error', (err) =>
@log "unhandled exception with TLS...", err
stream.end()
try
@log "TLS connection established with VCG client from: " + stream.remoteAddress
certObj = stream.getPeerCertificate()
cname = certObj.subject.CN
@log "server connected from #{cname}: " + stream.authorized ? 'unauthorized'
callback? new BoltStream cname, stream, @config
catch error
@log 'unable to retrieve peer certificate and authorize connection!', error
stream.end()
server.on 'clientError', (exception) =>
@log 'TLS handshake error:', exception
server.on 'error', (err) =>
@log 'TLS server connection error :' + err.message
try
message = String(err.message)
if (message.indexOf ('ECONNRESET')) >= 0
@log 'throw error: ' + 'ECONNRESET'
throw new Error err
catch e
@log 'error e' + e
#process.exit(1)
server.listen port
return server
#Method to start bolt client
connect: (host, port, options, callback) ->
tls.SLAB_BUFFER_SIZE = 100 * 1024
# try to connect to the server
@log "making connection to bolt server at: "+host+':'+port
#@log @inspect options
calledReconnectOnce = false
stream = tls.connect(port, host, options, =>
@uplink =
host: host
port: port
try
@log "TLS connection established with bolt server from: " + stream.remoteAddress
certObj = stream.getPeerCertificate()
cname = certObj.subject.CN
@log "client connected to #{cname}: " + stream.authorized ? 'unauthorized'
callback? new BoltStream cname, stream, @config
catch error
@log 'unable to retrieve peer certificate and authorize connection!', error
stream.end()
callback? new Error "unable to establish bolt connection to server"
)
module.exports = StormBolt
#-------------------------------------------------------------------------------------------
if require.main is module
###
argv = require('minimist')(process.argv.slice(2))
if argv.h?
console.log """
-h view this help
-p port number
-l logfile
-d datadir
"""
return
config = {}
config.port = argv.p ? 5000
config.logfile = argv.l ? "/var/log/stormbolt.log"
config.datadir = argv.d ? "/var/stormstack"
###
config = null
storm = null # override during dev
agent = new StormBolt config
agent.run storm
| 10795 | StormAgent = require 'stormagent'
StormData = StormAgent.StormData
# XXX - for now, only representing the server-side... will refactor for client-side later
class BoltStream extends StormData
async = require('async')
MuxDemux = require('mux-demux')
url = require('url')
http = require("http")
constructor: (@id, @stream, @config) ->
@ready = false
@capability = []
@monitoring = false
@stream.setKeepAlive(true, 60 * 1000) #Send keep-alive every 60 seconds
#@stream.setEncoding 'utf8'
@stream.on 'error', (err) =>
@log "issue with underlying bolt stream...", err
@destroy()
@emit 'error', err
@stream.on 'close', =>
@log "bolt stream closed for #{@id}"
@destroy()
@emit 'close'
@stream.pipe(@mux = MuxDemux()).pipe(@stream)
cstream = @mux.createReadStream 'capability'
cstream.on 'data', (capa) =>
@log "received capability info from peer: #{capa}"
@capability = capa.split(',').map (entry) -> (Number) entry
@emit 'capability', capa
unless @ready
@ready = true
@emit 'ready'
@mux.on 'error', (err) =>
@log "issue with bolt mux channel...", err
@destroy()
@emit 'error', err
@mux.on 'connection', @handleAction
super @id,
cname: @id
remote: @stream.remoteAddress
handleAction: (_stream) =>
[ action, target ] = _stream.meta.split(':')
@log "bolt-mux-connection: action=#{action} target=#{target}"
_stream.on 'error', (err) =>
@log "bolt-mux-connection: mux stream #{_stream.meta} encountered error:"+err
_forwardingPorts = @config?.allowedPorts or []
switch action
when 'capability'
@log 'sending capability information...'
_stream.write _forwardingPorts.join(',') if _forwardingPorts? and _forwardingPorts instanceof Array
_stream.end()
when 'beacon'
[ bsent, breply ] = [ 0 , 0 ]
_stream.on 'data', (data) =>
breply++
@log "received beacon reply: #{data}"
@log 'sending beacons...'
async.whilst(
() => # test to make sure deviation between sent and received does not exceed beaconRetry
bsent - breply < @config?.beaconRetry
(repeat) => # send some beacons
@log "sending beacon..."
_stream.write "Beacon"
bsent++
setTimeout(repeat, @config?.beaconInterval * 1000)
(err) => # finally
err ?= "beacon retry timeout, server no longer responding"
@log "final call on sending beacons, exiting with: " + (err ? "no errors")
@destroy()
)
when 'relay'
target = (Number) target
unless target in _forwardingPorts
@log "request for relay to unsupported target port: #{target}"
_stream.end()
break
incoming = ''
request = null
_stream.on 'data', (chunk) =>
unless request
try
@log "request received: "+chunk
request = JSON.parse chunk
catch err
@log "invalid relay request!"
_stream.end()
else
@log "received some data: "+chunk
incoming += chunk
_stream.on 'end', =>
@log "relaying following request to localhost:#{target} - ", request
if typeof request.url is 'object'
roptions = url.format request.url
else
roptions = url.parse request.url
roptions.method = request.method
# hard coded the header option..
roptions.headers =
'Content-Type':'application/json'
roptions.agent = false
roptions.port ?= target
#@log JSON.stringify roptions
timeout = false
relay = http.request roptions, (reply) =>
unless timeout
@log "sending back reply"
reply.setEncoding 'utf8'
try
_stream.write JSON.stringify
statusCode: reply.statusCode,
headers: reply.headers
reply.pipe(_stream, {end:true})
catch err
@log "unable to write response back to requestor upstream bolt! error: " + err
relay.write JSON.stringify request.data if request.data?
relay.end()
relay.on 'end', =>
@log "no more data"
relay.setTimeout 20000, =>
@log "error during performing relay action! request timedout."
timeout = true
try
_stream.write JSON.stringify
statusCode: 408,
headers: null
_stream.end()
catch err
@log "unable to write response code back to requestor upstream bolt! error: " + err
@log "[relay request timed out, sending 408]"
relay.on 'error', (err) =>
@log "[relay request failed with following error]"
@log err
try
_stream.write JSON.stringify
statusCode: 500,
headers: null
_stream.end()
catch err
@log "unable to write response code back to requestor upstream bolt! error: " + err
@log "[relay request error, sending 500]"
else
@log "unsupported action/target supplied by mux connection: #{action}/#{target}"
_stream.end()
monitor: (interval, period) ->
return if @monitoring
@monitoring = true
validity = period
# setup the beacon channel with the peer and start collecting beacons
bstream = @mux.createStream 'beacon', { allowHalfOpen:true }
bstream.on 'data', (beacon) =>
@log "monitor - received beacon from client: #{@id}"
bstream.write "beacon:reply"
@emit 'beacon', beacon
validity = period # reset
# start the validity count-down...
async.whilst(
() => # test condition
validity > 0 and @monitoring and @ready
(repeat) =>
validity -= interval / 1000
@log "monitor - #{@id} has validity=#{validity}"
setTimeout repeat, interval
(err) =>
@log "monitor - #{@id} has expired and being destroyed..."
@destroy()
@emit 'expired'
@monitoring = false
)
relay: (request,response) ->
unless @ready
throw new Error "cannot relay to unready boltstream..."
@log "relay - forwarding request to #{@id} at #{@stream.remoteAddress} for #{request.url}"
unless request.target in @capability
throw new Error "unable to forward request to #{@id} for unsupported port: #{request.target}"
relay = @mux.createStream("relay:#{request.target}", {allowHalfOpen:true})
unless request.url
@log "no request.url is set!"
request.url = '/'
if typeof request.url is 'string'
url = require('url').parse request.url
url.pathname = '/'+url.pathname unless /^\//.test url.pathname
url.path = '/'+url.path unless /^\//.test url.pathname
request.url = require('url').format url
# always start by writing the preamble message to the other end
relay.write JSON.stringify
method: request.method
url: request.url
port: request.port
data: request.data
request.on 'error', (err) =>
@log "error relaying request via boltstream...", err
relay.destroy()
relay.on 'error', (err) ->
@log "error during relay multiplexing boltstream...", err
#request.pipe(relay)
relay.end()
# always get the reply preamble message from the other end
reply =
header: null
body: ''
relay.on 'data', (chunk) =>
try
unless reply.header
reply.header = JSON.parse chunk
# pipe relay into response if response stream is provided
if response? and response.writeHead?
response.writeHead reply.header.statusCode, reply.header.headers
relay.pipe(response)
else
unless response?
reply.body+=chunk
catch err
@log "invalid relay response received from #{@id}:", err
relay.end()
relay.on 'end', =>
relay.emit 'reply', reply
return relay
destroy: ->
try
@ready = @monitoring = false
@mux.destroy()
@stream.destroy()
catch err
@log "unable to properly terminate bolt stream: #{@id}", err
StormRegistry = StormAgent.StormRegistry
class BoltRegistry extends StormRegistry
constructor: (filename) ->
@on 'removed', (bolt) ->
bolt.destroy() if bolt?
super filename
get: (key) ->
entry = super key
return unless entry?
cname: key
ports: entry.capability
address: entry.data.remote if entry.data?
validity: entry.validity
#-----------------------------------------------------------------
class StormBolt extends StormAgent
validate = require('json-schema').validate
tls = require("tls")
fs = require("fs")
http = require("http")
url = require('url')
MuxDemux = require('mux-demux')
async = require('async')
extend = require('util')._extend
schema =
name: "storm"
type: "object"
additionalProperties: true
properties:
cert: { type: "any", required: true }
key: { type: "any", required: true }
ca: { type: "any", required: true }
uplinks: { type: "array" }
uplinkStrategy: { type: "string" }
allowRelay: { type: "boolean" }
relayPort: { type: "integer" }
allowedPorts: { type: "array" }
listenPort: { type: "integer" }
beaconValidity: { type: "integer" }
beaconInterval: { type: "integer" }
beaconRetry: { type: "integer" }
constructor: (config) ->
super config
# key routine to import itself into agent base
@import module
@repeatInterval = 5 # in seconds
@clients = new BoltRegistry
@state.haveCredentials = false
if @config.insecure
#Workaround - fix it later, Avoids DEPTH_ZERO_SELF_SIGNED_CERT error for self-signed certs
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"
status: ->
state = super
state.uplink = @uplink ? null
state.clients = @clients.list()
state
run: (config) ->
# first try using the passed in config, get it validated and start the underlying agent
super config, schema
async.until(
() => # test condition
@state.haveCredentials
(repeat) => # repeat function
try
@log 'run - validating security credentials...'
unless @config.cert instanceof Buffer
@config.cert = fs.readFileSync "#{@config.cert}",'utf8'
unless @config.key instanceof Buffer
@config.key = fs.readFileSync "#{@config.key}",'utf8'
unless @config.ca instanceof Buffer
ca = []
chain = fs.readFileSync "#{@config.ca}", 'utf8'
chain = chain.split "\n"
cacert = []
for line in chain when line.length isnt 0
cacert.push line
if line.match /-END CERTIFICATE-/
ca.push cacert.join "\n"
cacert = []
@config.ca = ca
# if we get here, we've got something
@state.haveCredentials = true
repeat()
catch err
@log "run - missing proper security credentials, attempting to self-configure..."
storm = null
### uncomment during dev/testing
storm =
tracker: "https://stormtracker.dev.intercloud.net"
skey: "some-serial-number"
token:"<PASSWORD>"
###
@activate storm, (storm) =>
# first, validate whether the storm config is proper
if @validate storm.bolt, schema
@config = extend @config, storm.bolt
repeat()
else
@log "invalid 'storm.bolt' configuration retrieved during activation! (retry in 30 seconds)"
@state.activated = false
setTimeout repeat, 30000
(err) =>
if err? and err instanceof Error
@log "FATAL ERROR during stormbolt.run!"
return throw err
# here we start the main run logic for stormbolt
# check for bolt server config
if @config.listenPort? and @config.listenPort > 0
server = @listen @config.listenPort,
key: @config.key
cert: @config.cert
ca: @config.ca
requestCert: true
rejectUnauthorized: true, (bolt) =>
bolt.once 'ready', =>
# starts the bolt self-monitoring and initiates beacons request
bolt.monitor @config.repeatdelay, @config.beaconValidity
# after initialization complete, THEN we add to our clients!
@clients.add bolt.id, bolt
# we register for bolt close/error event only after it's ready and added...
bolt.on 'close', (err) =>
@log "bolt.close on #{bolt.id}:",err
@clients.remove bolt.id
bolt.on 'error', (err) =>
@log "bolt.error on #{bolt.id}:",err
@clients.remove bolt.id
server.on 'error', (err) =>
@log "fatal issue with bolt server: "+err
@clients.running = false
@emit 'server.error', err
# start client connection expiry checker
#
# XXX - this is no longer needed since each BoltStream self monitors!
#@clients.expires @config.repeatdelay
# check for client uplink to bolt server
if @config.uplinks? and @config.uplinks.length > 0
[ i, retries ] = [ 0, 0 ]
@connected = false
async.forever(
(next) =>
next new Error "retry max exceeded, unable to establish bolt server connection" if retries > 30
async.until(
() =>
@connected
(repeat) =>
uplink = @config.uplinks[i++]
[ host, port ] = uplink.split(':')
port ?= 443 # default port to try
@connect host,port,
key: @config.key
cert: @config.cert
ca: @config.ca
requestCert: true, (bolt) =>
unless bolt instanceof Error
bolt.once 'ready', =>
@connected = true
retries = 0
bolt.once 'close', (err) =>
@log "bolt.error on #{bolt.id}:",err
@connected = false
bolt.once 'error', (err) =>
@log "bolt.error on #{bolt.id}:",err
@connected = false
i = 0 unless i < @config.uplinks.length
setTimeout(repeat, 5000)
(err) =>
setTimeout(next, 5000)
)
(err) =>
@emit 'error', err if err?
)
# check for running the relay proxy
@proxy(@config.relayPort) if @config.allowRelay
)
# register one-time event handler for the overall agent... NOT SURE IF NEEDED!
@once "error", (err) =>
@log "run - bolt fizzled... should do something smart here"
proxy: (port) ->
unless port? and port > 0
@log "need to pass in valid port for performing relay"
return
@log 'starting the proxy relay on port ' + port
# after initial data, invoke HTTP server listener on port
acceptor = http.createServer().listen(port)
acceptor.on "request", (request,response) =>
target = request.headers['stormbolt-target']
[ cname, port ] = target.split(':') if target
entry = @clients.entries[cname]
unless entry and port in entry.capability
error = "stormfbolt-target [#{target}] cannot be reached!"
@log "error:", error
response.writeHead(404, {
'Content-Length': error.length,
'Content-Type': 'application/json',
'Connection': 'close' })
response.end(error,"utf8")
return
@log "[proxy] forwarding request to #{cname} #{entry.stream.remoteAddress}"
request.target = port
entry.relay request, response
# Method to start bolt server
listen: (port, options, callback) ->
@log "server port:" + port
#@log "options: " + @inspect options
server = tls.createServer options, (stream) =>
stream.on 'error', (err) =>
@log "unhandled exception with TLS...", err
stream.end()
try
@log "TLS connection established with VCG client from: " + stream.remoteAddress
certObj = stream.getPeerCertificate()
cname = certObj.subject.CN
@log "server connected from #{cname}: " + stream.authorized ? 'unauthorized'
callback? new BoltStream cname, stream, @config
catch error
@log 'unable to retrieve peer certificate and authorize connection!', error
stream.end()
server.on 'clientError', (exception) =>
@log 'TLS handshake error:', exception
server.on 'error', (err) =>
@log 'TLS server connection error :' + err.message
try
message = String(err.message)
if (message.indexOf ('ECONNRESET')) >= 0
@log 'throw error: ' + 'ECONNRESET'
throw new Error err
catch e
@log 'error e' + e
#process.exit(1)
server.listen port
return server
#Method to start bolt client
connect: (host, port, options, callback) ->
tls.SLAB_BUFFER_SIZE = 100 * 1024
# try to connect to the server
@log "making connection to bolt server at: "+host+':'+port
#@log @inspect options
calledReconnectOnce = false
stream = tls.connect(port, host, options, =>
@uplink =
host: host
port: port
try
@log "TLS connection established with bolt server from: " + stream.remoteAddress
certObj = stream.getPeerCertificate()
cname = certObj.subject.CN
@log "client connected to #{cname}: " + stream.authorized ? 'unauthorized'
callback? new BoltStream cname, stream, @config
catch error
@log 'unable to retrieve peer certificate and authorize connection!', error
stream.end()
callback? new Error "unable to establish bolt connection to server"
)
module.exports = StormBolt
#-------------------------------------------------------------------------------------------
if require.main is module
###
argv = require('minimist')(process.argv.slice(2))
if argv.h?
console.log """
-h view this help
-p port number
-l logfile
-d datadir
"""
return
config = {}
config.port = argv.p ? 5000
config.logfile = argv.l ? "/var/log/stormbolt.log"
config.datadir = argv.d ? "/var/stormstack"
###
config = null
storm = null # override during dev
agent = new StormBolt config
agent.run storm
| true | StormAgent = require 'stormagent'
StormData = StormAgent.StormData
# XXX - for now, only representing the server-side... will refactor for client-side later
class BoltStream extends StormData
async = require('async')
MuxDemux = require('mux-demux')
url = require('url')
http = require("http")
constructor: (@id, @stream, @config) ->
@ready = false
@capability = []
@monitoring = false
@stream.setKeepAlive(true, 60 * 1000) #Send keep-alive every 60 seconds
#@stream.setEncoding 'utf8'
@stream.on 'error', (err) =>
@log "issue with underlying bolt stream...", err
@destroy()
@emit 'error', err
@stream.on 'close', =>
@log "bolt stream closed for #{@id}"
@destroy()
@emit 'close'
@stream.pipe(@mux = MuxDemux()).pipe(@stream)
cstream = @mux.createReadStream 'capability'
cstream.on 'data', (capa) =>
@log "received capability info from peer: #{capa}"
@capability = capa.split(',').map (entry) -> (Number) entry
@emit 'capability', capa
unless @ready
@ready = true
@emit 'ready'
@mux.on 'error', (err) =>
@log "issue with bolt mux channel...", err
@destroy()
@emit 'error', err
@mux.on 'connection', @handleAction
super @id,
cname: @id
remote: @stream.remoteAddress
handleAction: (_stream) =>
[ action, target ] = _stream.meta.split(':')
@log "bolt-mux-connection: action=#{action} target=#{target}"
_stream.on 'error', (err) =>
@log "bolt-mux-connection: mux stream #{_stream.meta} encountered error:"+err
_forwardingPorts = @config?.allowedPorts or []
switch action
when 'capability'
@log 'sending capability information...'
_stream.write _forwardingPorts.join(',') if _forwardingPorts? and _forwardingPorts instanceof Array
_stream.end()
when 'beacon'
[ bsent, breply ] = [ 0 , 0 ]
_stream.on 'data', (data) =>
breply++
@log "received beacon reply: #{data}"
@log 'sending beacons...'
async.whilst(
() => # test to make sure deviation between sent and received does not exceed beaconRetry
bsent - breply < @config?.beaconRetry
(repeat) => # send some beacons
@log "sending beacon..."
_stream.write "Beacon"
bsent++
setTimeout(repeat, @config?.beaconInterval * 1000)
(err) => # finally
err ?= "beacon retry timeout, server no longer responding"
@log "final call on sending beacons, exiting with: " + (err ? "no errors")
@destroy()
)
when 'relay'
target = (Number) target
unless target in _forwardingPorts
@log "request for relay to unsupported target port: #{target}"
_stream.end()
break
incoming = ''
request = null
_stream.on 'data', (chunk) =>
unless request
try
@log "request received: "+chunk
request = JSON.parse chunk
catch err
@log "invalid relay request!"
_stream.end()
else
@log "received some data: "+chunk
incoming += chunk
_stream.on 'end', =>
@log "relaying following request to localhost:#{target} - ", request
if typeof request.url is 'object'
roptions = url.format request.url
else
roptions = url.parse request.url
roptions.method = request.method
# hard coded the header option..
roptions.headers =
'Content-Type':'application/json'
roptions.agent = false
roptions.port ?= target
#@log JSON.stringify roptions
timeout = false
relay = http.request roptions, (reply) =>
unless timeout
@log "sending back reply"
reply.setEncoding 'utf8'
try
_stream.write JSON.stringify
statusCode: reply.statusCode,
headers: reply.headers
reply.pipe(_stream, {end:true})
catch err
@log "unable to write response back to requestor upstream bolt! error: " + err
relay.write JSON.stringify request.data if request.data?
relay.end()
relay.on 'end', =>
@log "no more data"
relay.setTimeout 20000, =>
@log "error during performing relay action! request timedout."
timeout = true
try
_stream.write JSON.stringify
statusCode: 408,
headers: null
_stream.end()
catch err
@log "unable to write response code back to requestor upstream bolt! error: " + err
@log "[relay request timed out, sending 408]"
relay.on 'error', (err) =>
@log "[relay request failed with following error]"
@log err
try
_stream.write JSON.stringify
statusCode: 500,
headers: null
_stream.end()
catch err
@log "unable to write response code back to requestor upstream bolt! error: " + err
@log "[relay request error, sending 500]"
else
@log "unsupported action/target supplied by mux connection: #{action}/#{target}"
_stream.end()
monitor: (interval, period) ->
return if @monitoring
@monitoring = true
validity = period
# setup the beacon channel with the peer and start collecting beacons
bstream = @mux.createStream 'beacon', { allowHalfOpen:true }
bstream.on 'data', (beacon) =>
@log "monitor - received beacon from client: #{@id}"
bstream.write "beacon:reply"
@emit 'beacon', beacon
validity = period # reset
# start the validity count-down...
async.whilst(
() => # test condition
validity > 0 and @monitoring and @ready
(repeat) =>
validity -= interval / 1000
@log "monitor - #{@id} has validity=#{validity}"
setTimeout repeat, interval
(err) =>
@log "monitor - #{@id} has expired and being destroyed..."
@destroy()
@emit 'expired'
@monitoring = false
)
relay: (request,response) ->
unless @ready
throw new Error "cannot relay to unready boltstream..."
@log "relay - forwarding request to #{@id} at #{@stream.remoteAddress} for #{request.url}"
unless request.target in @capability
throw new Error "unable to forward request to #{@id} for unsupported port: #{request.target}"
relay = @mux.createStream("relay:#{request.target}", {allowHalfOpen:true})
unless request.url
@log "no request.url is set!"
request.url = '/'
if typeof request.url is 'string'
url = require('url').parse request.url
url.pathname = '/'+url.pathname unless /^\//.test url.pathname
url.path = '/'+url.path unless /^\//.test url.pathname
request.url = require('url').format url
# always start by writing the preamble message to the other end
relay.write JSON.stringify
method: request.method
url: request.url
port: request.port
data: request.data
request.on 'error', (err) =>
@log "error relaying request via boltstream...", err
relay.destroy()
relay.on 'error', (err) ->
@log "error during relay multiplexing boltstream...", err
#request.pipe(relay)
relay.end()
# always get the reply preamble message from the other end
reply =
header: null
body: ''
relay.on 'data', (chunk) =>
try
unless reply.header
reply.header = JSON.parse chunk
# pipe relay into response if response stream is provided
if response? and response.writeHead?
response.writeHead reply.header.statusCode, reply.header.headers
relay.pipe(response)
else
unless response?
reply.body+=chunk
catch err
@log "invalid relay response received from #{@id}:", err
relay.end()
relay.on 'end', =>
relay.emit 'reply', reply
return relay
destroy: ->
try
@ready = @monitoring = false
@mux.destroy()
@stream.destroy()
catch err
@log "unable to properly terminate bolt stream: #{@id}", err
StormRegistry = StormAgent.StormRegistry
class BoltRegistry extends StormRegistry
constructor: (filename) ->
@on 'removed', (bolt) ->
bolt.destroy() if bolt?
super filename
get: (key) ->
entry = super key
return unless entry?
cname: key
ports: entry.capability
address: entry.data.remote if entry.data?
validity: entry.validity
#-----------------------------------------------------------------
class StormBolt extends StormAgent
validate = require('json-schema').validate
tls = require("tls")
fs = require("fs")
http = require("http")
url = require('url')
MuxDemux = require('mux-demux')
async = require('async')
extend = require('util')._extend
schema =
name: "storm"
type: "object"
additionalProperties: true
properties:
cert: { type: "any", required: true }
key: { type: "any", required: true }
ca: { type: "any", required: true }
uplinks: { type: "array" }
uplinkStrategy: { type: "string" }
allowRelay: { type: "boolean" }
relayPort: { type: "integer" }
allowedPorts: { type: "array" }
listenPort: { type: "integer" }
beaconValidity: { type: "integer" }
beaconInterval: { type: "integer" }
beaconRetry: { type: "integer" }
constructor: (config) ->
super config
# key routine to import itself into agent base
@import module
@repeatInterval = 5 # in seconds
@clients = new BoltRegistry
@state.haveCredentials = false
if @config.insecure
#Workaround - fix it later, Avoids DEPTH_ZERO_SELF_SIGNED_CERT error for self-signed certs
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0"
status: ->
state = super
state.uplink = @uplink ? null
state.clients = @clients.list()
state
run: (config) ->
# first try using the passed in config, get it validated and start the underlying agent
super config, schema
async.until(
() => # test condition
@state.haveCredentials
(repeat) => # repeat function
try
@log 'run - validating security credentials...'
unless @config.cert instanceof Buffer
@config.cert = fs.readFileSync "#{@config.cert}",'utf8'
unless @config.key instanceof Buffer
@config.key = fs.readFileSync "#{@config.key}",'utf8'
unless @config.ca instanceof Buffer
ca = []
chain = fs.readFileSync "#{@config.ca}", 'utf8'
chain = chain.split "\n"
cacert = []
for line in chain when line.length isnt 0
cacert.push line
if line.match /-END CERTIFICATE-/
ca.push cacert.join "\n"
cacert = []
@config.ca = ca
# if we get here, we've got something
@state.haveCredentials = true
repeat()
catch err
@log "run - missing proper security credentials, attempting to self-configure..."
storm = null
### uncomment during dev/testing
storm =
tracker: "https://stormtracker.dev.intercloud.net"
skey: "some-serial-number"
token:"PI:PASSWORD:<PASSWORD>END_PI"
###
@activate storm, (storm) =>
# first, validate whether the storm config is proper
if @validate storm.bolt, schema
@config = extend @config, storm.bolt
repeat()
else
@log "invalid 'storm.bolt' configuration retrieved during activation! (retry in 30 seconds)"
@state.activated = false
setTimeout repeat, 30000
(err) =>
if err? and err instanceof Error
@log "FATAL ERROR during stormbolt.run!"
return throw err
# here we start the main run logic for stormbolt
# check for bolt server config
if @config.listenPort? and @config.listenPort > 0
server = @listen @config.listenPort,
key: @config.key
cert: @config.cert
ca: @config.ca
requestCert: true
rejectUnauthorized: true, (bolt) =>
bolt.once 'ready', =>
# starts the bolt self-monitoring and initiates beacons request
bolt.monitor @config.repeatdelay, @config.beaconValidity
# after initialization complete, THEN we add to our clients!
@clients.add bolt.id, bolt
# we register for bolt close/error event only after it's ready and added...
bolt.on 'close', (err) =>
@log "bolt.close on #{bolt.id}:",err
@clients.remove bolt.id
bolt.on 'error', (err) =>
@log "bolt.error on #{bolt.id}:",err
@clients.remove bolt.id
server.on 'error', (err) =>
@log "fatal issue with bolt server: "+err
@clients.running = false
@emit 'server.error', err
# start client connection expiry checker
#
# XXX - this is no longer needed since each BoltStream self monitors!
#@clients.expires @config.repeatdelay
# check for client uplink to bolt server
if @config.uplinks? and @config.uplinks.length > 0
[ i, retries ] = [ 0, 0 ]
@connected = false
async.forever(
(next) =>
next new Error "retry max exceeded, unable to establish bolt server connection" if retries > 30
async.until(
() =>
@connected
(repeat) =>
uplink = @config.uplinks[i++]
[ host, port ] = uplink.split(':')
port ?= 443 # default port to try
@connect host,port,
key: @config.key
cert: @config.cert
ca: @config.ca
requestCert: true, (bolt) =>
unless bolt instanceof Error
bolt.once 'ready', =>
@connected = true
retries = 0
bolt.once 'close', (err) =>
@log "bolt.error on #{bolt.id}:",err
@connected = false
bolt.once 'error', (err) =>
@log "bolt.error on #{bolt.id}:",err
@connected = false
i = 0 unless i < @config.uplinks.length
setTimeout(repeat, 5000)
(err) =>
setTimeout(next, 5000)
)
(err) =>
@emit 'error', err if err?
)
# check for running the relay proxy
@proxy(@config.relayPort) if @config.allowRelay
)
# register one-time event handler for the overall agent... NOT SURE IF NEEDED!
@once "error", (err) =>
@log "run - bolt fizzled... should do something smart here"
proxy: (port) ->
unless port? and port > 0
@log "need to pass in valid port for performing relay"
return
@log 'starting the proxy relay on port ' + port
# after initial data, invoke HTTP server listener on port
acceptor = http.createServer().listen(port)
acceptor.on "request", (request,response) =>
target = request.headers['stormbolt-target']
[ cname, port ] = target.split(':') if target
entry = @clients.entries[cname]
unless entry and port in entry.capability
error = "stormfbolt-target [#{target}] cannot be reached!"
@log "error:", error
response.writeHead(404, {
'Content-Length': error.length,
'Content-Type': 'application/json',
'Connection': 'close' })
response.end(error,"utf8")
return
@log "[proxy] forwarding request to #{cname} #{entry.stream.remoteAddress}"
request.target = port
entry.relay request, response
# Method to start bolt server
listen: (port, options, callback) ->
@log "server port:" + port
#@log "options: " + @inspect options
server = tls.createServer options, (stream) =>
stream.on 'error', (err) =>
@log "unhandled exception with TLS...", err
stream.end()
try
@log "TLS connection established with VCG client from: " + stream.remoteAddress
certObj = stream.getPeerCertificate()
cname = certObj.subject.CN
@log "server connected from #{cname}: " + stream.authorized ? 'unauthorized'
callback? new BoltStream cname, stream, @config
catch error
@log 'unable to retrieve peer certificate and authorize connection!', error
stream.end()
server.on 'clientError', (exception) =>
@log 'TLS handshake error:', exception
server.on 'error', (err) =>
@log 'TLS server connection error :' + err.message
try
message = String(err.message)
if (message.indexOf ('ECONNRESET')) >= 0
@log 'throw error: ' + 'ECONNRESET'
throw new Error err
catch e
@log 'error e' + e
#process.exit(1)
server.listen port
return server
#Method to start bolt client
connect: (host, port, options, callback) ->
tls.SLAB_BUFFER_SIZE = 100 * 1024
# try to connect to the server
@log "making connection to bolt server at: "+host+':'+port
#@log @inspect options
calledReconnectOnce = false
stream = tls.connect(port, host, options, =>
@uplink =
host: host
port: port
try
@log "TLS connection established with bolt server from: " + stream.remoteAddress
certObj = stream.getPeerCertificate()
cname = certObj.subject.CN
@log "client connected to #{cname}: " + stream.authorized ? 'unauthorized'
callback? new BoltStream cname, stream, @config
catch error
@log 'unable to retrieve peer certificate and authorize connection!', error
stream.end()
callback? new Error "unable to establish bolt connection to server"
)
module.exports = StormBolt
#-------------------------------------------------------------------------------------------
if require.main is module
###
argv = require('minimist')(process.argv.slice(2))
if argv.h?
console.log """
-h view this help
-p port number
-l logfile
-d datadir
"""
return
config = {}
config.port = argv.p ? 5000
config.logfile = argv.l ? "/var/log/stormbolt.log"
config.datadir = argv.d ? "/var/stormstack"
###
config = null
storm = null # override during dev
agent = new StormBolt config
agent.run storm
|
[
{
"context": "cs/#info.info\n\nFramer.Info =\n\ttitle: \"\"\n\tauthor: \"Tony\"\n\ttwitter: \"\"\n\tdescription: \"\"\n\n\n# Video Setup\nvi",
"end": 146,
"score": 0.9985781311988831,
"start": 142,
"tag": "NAME",
"value": "Tony"
}
] | 67hyper.framer/app.coffee | gremjua-forks/100daysofframer | 26 | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: ""
author: "Tony"
twitter: ""
description: ""
# Video Setup
vid3 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim1.mp4"
vid3.x = vid3.originalX = -vid3.width/3
vid3.player.play()
Events.wrap(vid3.player).on "ended", ->
vid3.player.play()
vid2 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim2.mp4"
vid2.x = vid2.originalX = -vid2.width/2.7
vid2.player.play()
Events.wrap(vid2.player).on "ended", ->
vid2.player.play()
vid1 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim3.mp4"
vid1.x = vid1.originalX = -vid1.width/1.9
vid1.player.play()
Events.wrap(vid1.player).on "ended", ->
vid1.player.play()
# Text Vars
Framer["copy1"] = "Auto-Storify Your Videos"
Framer["copy2"] = "Auto-Add Effects and MetaData"
Framer["copy3"] = "Share Your Video Stories Today"
textStyle =
fontSize: "4.15rem"
lineHeight: "4.5rem"
textTransform: "uppercase"
fontFamily: "Montserrat, Helvetica Neue, sans-serif"
fontWeight: 700
textAlign: "center"
padding: "3rem"
letterSpacing: "-1px"
zIndex: 100
paddingTop: "#{Screen.height/2.2}px"
logoStyle =
fontSize: "4.2rem", zIndex: 101, textAlign: "center"
fontFamily: "Garamond", fontWeight: 900, fontStyle: "italic"
mixBlendMode: "overlay"
buttonStyle =
fontSize: "2.6rem", textAlign: "center"
textTransform: "uppercase"
fontFamily: "Montserrat, Helvetica Neue, sans-serif"
letterSpacing: "5px", color: "#86C9FE", zIndex: 101
fontWeight: 300
# Setup
page = new PageComponent
size: Screen
scrollVertical: false
for layer, t in [vid1, vid2, vid3]
Framer["label#{t+1}"] = new Layer
name: "label#{t+1}", parent: page.content
backgroundColor: "rgba(50,50,50,0.6)"
size: Screen
x: t * Screen.width
html: Framer["copy#{t+1}"]
style: textStyle
Framer["mask#{t+1}"] = new Layer
name: "mask#{t+1}", size: Screen, backgroundColor: ""
x: t * Screen.width
parent: page.content
clip: true
Framer["mask#{t+1}"].addChild(layer)
layer.player.play()
Framer["progressBar#{t+1}"] = new Layer
name: "progressBar#{t+1}"
width: 25, height: 40, backgroundColor: "#fff"
opacity: 0.4, x: (Screen.width/2 + 22) - t * 35
maxY: Screen.height - 125, originY: 0
Framer["progressBar#{t+1}"].originalX = (Screen.width/2 + 22) - t * 35
Framer["progressBar#{t+1}"].states.add on: height: 70, opacity: 0.8, maxY: Screen.height - 120
Framer["progressBar#{t+1}"].states.animationOptions =
curve: "spring(200, 20, 0)"
if t == 2
button = new Layer
width: Screen.width, parent: page.content,
y: Screen.height - 220
html: "Tap To Begin"
backgroundColor: "", x: t * Screen.width
style: buttonStyle
if t == 0
logo = new Layer
width: Screen.width
y: Align.center(-410), opacity: 1
html: "prescient", backgroundColor: null
style: logoStyle
Framer["progressBar3"].states.switch("on")
page.content.on "change:x", ->
if Framer["label2"].screenFrame.x < -3
for i in [1..3]
Framer["progressBar#{i}"].x = Framer["progressBar#{i}"].originalX + Framer["label2"].screenFrame.x
else
for i in [1..3]
Framer["progressBar#{i}"].x = Framer["progressBar#{i}"].originalX
for layer, t in [vid1, vid2, vid3]
Framer["offsetX#{t+1}"] = Framer["mask#{t+1}"].screenFrame.x
layer.x = layer.originalX + (-Framer["offsetX#{t+1}"])
page.on "change:currentPage", ->
if page.currentPage == Framer["label1"]
Framer["progressBar3"].states.switch("on")
Framer["progressBar2"].states.switch("default")
Framer["progressBar1"].states.switch("default")
if page.currentPage == Framer["label2"]
Framer["progressBar2"].states.switch("on")
Framer["progressBar1"].states.switch("default")
Framer["progressBar3"].states.switch("default")
if page.currentPage == Framer["label3"]
Framer["progressBar1"].states.switch("on")
| 203723 | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: ""
author: "<NAME>"
twitter: ""
description: ""
# Video Setup
vid3 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim1.mp4"
vid3.x = vid3.originalX = -vid3.width/3
vid3.player.play()
Events.wrap(vid3.player).on "ended", ->
vid3.player.play()
vid2 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim2.mp4"
vid2.x = vid2.originalX = -vid2.width/2.7
vid2.player.play()
Events.wrap(vid2.player).on "ended", ->
vid2.player.play()
vid1 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim3.mp4"
vid1.x = vid1.originalX = -vid1.width/1.9
vid1.player.play()
Events.wrap(vid1.player).on "ended", ->
vid1.player.play()
# Text Vars
Framer["copy1"] = "Auto-Storify Your Videos"
Framer["copy2"] = "Auto-Add Effects and MetaData"
Framer["copy3"] = "Share Your Video Stories Today"
textStyle =
fontSize: "4.15rem"
lineHeight: "4.5rem"
textTransform: "uppercase"
fontFamily: "Montserrat, Helvetica Neue, sans-serif"
fontWeight: 700
textAlign: "center"
padding: "3rem"
letterSpacing: "-1px"
zIndex: 100
paddingTop: "#{Screen.height/2.2}px"
logoStyle =
fontSize: "4.2rem", zIndex: 101, textAlign: "center"
fontFamily: "Garamond", fontWeight: 900, fontStyle: "italic"
mixBlendMode: "overlay"
buttonStyle =
fontSize: "2.6rem", textAlign: "center"
textTransform: "uppercase"
fontFamily: "Montserrat, Helvetica Neue, sans-serif"
letterSpacing: "5px", color: "#86C9FE", zIndex: 101
fontWeight: 300
# Setup
page = new PageComponent
size: Screen
scrollVertical: false
for layer, t in [vid1, vid2, vid3]
Framer["label#{t+1}"] = new Layer
name: "label#{t+1}", parent: page.content
backgroundColor: "rgba(50,50,50,0.6)"
size: Screen
x: t * Screen.width
html: Framer["copy#{t+1}"]
style: textStyle
Framer["mask#{t+1}"] = new Layer
name: "mask#{t+1}", size: Screen, backgroundColor: ""
x: t * Screen.width
parent: page.content
clip: true
Framer["mask#{t+1}"].addChild(layer)
layer.player.play()
Framer["progressBar#{t+1}"] = new Layer
name: "progressBar#{t+1}"
width: 25, height: 40, backgroundColor: "#fff"
opacity: 0.4, x: (Screen.width/2 + 22) - t * 35
maxY: Screen.height - 125, originY: 0
Framer["progressBar#{t+1}"].originalX = (Screen.width/2 + 22) - t * 35
Framer["progressBar#{t+1}"].states.add on: height: 70, opacity: 0.8, maxY: Screen.height - 120
Framer["progressBar#{t+1}"].states.animationOptions =
curve: "spring(200, 20, 0)"
if t == 2
button = new Layer
width: Screen.width, parent: page.content,
y: Screen.height - 220
html: "Tap To Begin"
backgroundColor: "", x: t * Screen.width
style: buttonStyle
if t == 0
logo = new Layer
width: Screen.width
y: Align.center(-410), opacity: 1
html: "prescient", backgroundColor: null
style: logoStyle
Framer["progressBar3"].states.switch("on")
page.content.on "change:x", ->
if Framer["label2"].screenFrame.x < -3
for i in [1..3]
Framer["progressBar#{i}"].x = Framer["progressBar#{i}"].originalX + Framer["label2"].screenFrame.x
else
for i in [1..3]
Framer["progressBar#{i}"].x = Framer["progressBar#{i}"].originalX
for layer, t in [vid1, vid2, vid3]
Framer["offsetX#{t+1}"] = Framer["mask#{t+1}"].screenFrame.x
layer.x = layer.originalX + (-Framer["offsetX#{t+1}"])
page.on "change:currentPage", ->
if page.currentPage == Framer["label1"]
Framer["progressBar3"].states.switch("on")
Framer["progressBar2"].states.switch("default")
Framer["progressBar1"].states.switch("default")
if page.currentPage == Framer["label2"]
Framer["progressBar2"].states.switch("on")
Framer["progressBar1"].states.switch("default")
Framer["progressBar3"].states.switch("default")
if page.currentPage == Framer["label3"]
Framer["progressBar1"].states.switch("on")
| true | # Project Info
# This info is presented in a widget when you share.
# http://framerjs.com/docs/#info.info
Framer.Info =
title: ""
author: "PI:NAME:<NAME>END_PI"
twitter: ""
description: ""
# Video Setup
vid3 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim1.mp4"
vid3.x = vid3.originalX = -vid3.width/3
vid3.player.play()
Events.wrap(vid3.player).on "ended", ->
vid3.player.play()
vid2 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim2.mp4"
vid2.x = vid2.originalX = -vid2.width/2.7
vid2.player.play()
Events.wrap(vid2.player).on "ended", ->
vid2.player.play()
vid1 = new VideoLayer
width: 1920 * (Screen.height/1080)
height: Screen.height
video: "images/trim3.mp4"
vid1.x = vid1.originalX = -vid1.width/1.9
vid1.player.play()
Events.wrap(vid1.player).on "ended", ->
vid1.player.play()
# Text Vars
Framer["copy1"] = "Auto-Storify Your Videos"
Framer["copy2"] = "Auto-Add Effects and MetaData"
Framer["copy3"] = "Share Your Video Stories Today"
textStyle =
fontSize: "4.15rem"
lineHeight: "4.5rem"
textTransform: "uppercase"
fontFamily: "Montserrat, Helvetica Neue, sans-serif"
fontWeight: 700
textAlign: "center"
padding: "3rem"
letterSpacing: "-1px"
zIndex: 100
paddingTop: "#{Screen.height/2.2}px"
logoStyle =
fontSize: "4.2rem", zIndex: 101, textAlign: "center"
fontFamily: "Garamond", fontWeight: 900, fontStyle: "italic"
mixBlendMode: "overlay"
buttonStyle =
fontSize: "2.6rem", textAlign: "center"
textTransform: "uppercase"
fontFamily: "Montserrat, Helvetica Neue, sans-serif"
letterSpacing: "5px", color: "#86C9FE", zIndex: 101
fontWeight: 300
# Setup
page = new PageComponent
size: Screen
scrollVertical: false
for layer, t in [vid1, vid2, vid3]
Framer["label#{t+1}"] = new Layer
name: "label#{t+1}", parent: page.content
backgroundColor: "rgba(50,50,50,0.6)"
size: Screen
x: t * Screen.width
html: Framer["copy#{t+1}"]
style: textStyle
Framer["mask#{t+1}"] = new Layer
name: "mask#{t+1}", size: Screen, backgroundColor: ""
x: t * Screen.width
parent: page.content
clip: true
Framer["mask#{t+1}"].addChild(layer)
layer.player.play()
Framer["progressBar#{t+1}"] = new Layer
name: "progressBar#{t+1}"
width: 25, height: 40, backgroundColor: "#fff"
opacity: 0.4, x: (Screen.width/2 + 22) - t * 35
maxY: Screen.height - 125, originY: 0
Framer["progressBar#{t+1}"].originalX = (Screen.width/2 + 22) - t * 35
Framer["progressBar#{t+1}"].states.add on: height: 70, opacity: 0.8, maxY: Screen.height - 120
Framer["progressBar#{t+1}"].states.animationOptions =
curve: "spring(200, 20, 0)"
if t == 2
button = new Layer
width: Screen.width, parent: page.content,
y: Screen.height - 220
html: "Tap To Begin"
backgroundColor: "", x: t * Screen.width
style: buttonStyle
if t == 0
logo = new Layer
width: Screen.width
y: Align.center(-410), opacity: 1
html: "prescient", backgroundColor: null
style: logoStyle
Framer["progressBar3"].states.switch("on")
page.content.on "change:x", ->
if Framer["label2"].screenFrame.x < -3
for i in [1..3]
Framer["progressBar#{i}"].x = Framer["progressBar#{i}"].originalX + Framer["label2"].screenFrame.x
else
for i in [1..3]
Framer["progressBar#{i}"].x = Framer["progressBar#{i}"].originalX
for layer, t in [vid1, vid2, vid3]
Framer["offsetX#{t+1}"] = Framer["mask#{t+1}"].screenFrame.x
layer.x = layer.originalX + (-Framer["offsetX#{t+1}"])
page.on "change:currentPage", ->
if page.currentPage == Framer["label1"]
Framer["progressBar3"].states.switch("on")
Framer["progressBar2"].states.switch("default")
Framer["progressBar1"].states.switch("default")
if page.currentPage == Framer["label2"]
Framer["progressBar2"].states.switch("on")
Framer["progressBar1"].states.switch("default")
Framer["progressBar3"].states.switch("default")
if page.currentPage == Framer["label3"]
Framer["progressBar1"].states.switch("on")
|
[
{
"context": "r: process.env.SIMPLEFIN_ACCESS_USER\n pass: process.env.SIMPLEFIN_ACCESS_PASSWORD\n }\n url: proce",
"end": 761,
"score": 0.8258737325668335,
"start": 750,
"tag": "PASSWORD",
"value": "process.env"
}
] | sync.coffee | jayjanssen/simplefin-ynab | 0 | #!/usr/bin/env coffee
util = require 'util'
request = require 'request-promise-native'
ynab = require 'ynab'
moment = require 'moment'
md5 = require 'md5'
do () -> try
required_envs = [
'SIMPLEFIN_ACCESS_USER'
'SIMPLEFIN_ACCESS_PASSWORD'
'SIMPLEFIN_ACCESS_URL'
'SIMPLEFIN_ACCOUNT_NAME'
'TRANSACTION_AGE_DAYS'
'YNAB_API_TOKEN'
'YNAB_BUDGET_NAME'
'YNAB_ACCOUNT_NAME'
]
missing_envs = required_envs.filter (env) -> not process.env[env]?
if missing_envs.length > 0
throw new Error "Missing envs: #{missing_envs.join ', '}"
start = moment().subtract(process.env.TRANSACTION_AGE_DAYS, 'days')
try
data = await request.get {
auth: {
user: process.env.SIMPLEFIN_ACCESS_USER
pass: process.env.SIMPLEFIN_ACCESS_PASSWORD
}
url: process.env.SIMPLEFIN_ACCESS_URL + "/accounts?start-date=#{start.format 'X'}"
json: true
}
catch err
throw new Error "Got status code: #{err.statusCode} from Simplefin, check your credentials!" if err.name == 'StatusCodeError'
throw err
simplefin_account = data.accounts.find (a) -> a.name == process.env.SIMPLEFIN_ACCOUNT_NAME
throw new Error "Account: #{process.env.SIMPLEFIN_ACCOUNT_NAME} not found, set SIMPLEFIN_ACCOUNT_NAME env to one of: [#{data.accounts.map((a) -> a.name).join ', '}]" unless simplefin_account?
transactions = simplefin_account.transactions
console.log util.inspect transactions, {depth: 5}
ynab_api = new ynab.API process.env.YNAB_API_TOKEN
budgets = await ynab_api.budgets.getBudgets()
budget = budgets.data.budgets.find (b) -> b.name == process.env.YNAB_BUDGET_NAME
throw new Error "Budget: #{process.env.YNAB_BUDGET_NAME} not found, set YNAB_BUDGET_NAME env to one of: [#{budgets.data.budgets.map((b) -> b.name).join ', '}]" unless budget?
accounts = await ynab_api.accounts.getAccounts budget.id
account = accounts.data.accounts.find (a) -> a.name == process.env.YNAB_ACCOUNT_NAME
throw new Error "Account: #{process.env.YNAB_ACCOUNT_NAME} not found, set YNAB_ACCOUNT_NAME env to one of: [#{accounts.data.accounts.map((a) -> a.name).join ', '}]" unless account?
# console.log util.inspect account
ynab_trx = transactions
# Entry Memos are temporary and replaced with the actual transaction later. Don't write them to YNAB.
.filter (t) -> !t.description.includes "Entry Memo Posted Today"
# Skip Pre-authoriations
.filter (t) -> !t.description.includes "Pre auth"
# Transform simplefin transaction into ynab transaction
.map (t) ->
{
account_id: account.id
date: moment.unix(t.posted).format 'YYYY-MM-DD'
amount: Math.round t.amount * 1000 # convert to milliunits
payee_name: t.payee
# payee_name: if payee = t.description.match(/Payee\:\s+(.+)$/) then payee[1].substr(0,50) else t.description.substr(0,50)
memo: t.description
import_id: md5 t.id
cleared: "cleared"
}
total = duplicate = 0
for transaction from ynab_trx
try
await ynab_api.transactions.createTransactions budget.id, {transaction}
total += 1
catch err
if err.error.name == 'conflict' and err.error.detail.includes 'same import_id'
duplicate += 1
else
console.error err
console.error util.inspect transaction
console.log "Imported #{total}, Duplicate #{duplicate}"
catch err
console.error err
process.exit 1
| 94008 | #!/usr/bin/env coffee
util = require 'util'
request = require 'request-promise-native'
ynab = require 'ynab'
moment = require 'moment'
md5 = require 'md5'
do () -> try
required_envs = [
'SIMPLEFIN_ACCESS_USER'
'SIMPLEFIN_ACCESS_PASSWORD'
'SIMPLEFIN_ACCESS_URL'
'SIMPLEFIN_ACCOUNT_NAME'
'TRANSACTION_AGE_DAYS'
'YNAB_API_TOKEN'
'YNAB_BUDGET_NAME'
'YNAB_ACCOUNT_NAME'
]
missing_envs = required_envs.filter (env) -> not process.env[env]?
if missing_envs.length > 0
throw new Error "Missing envs: #{missing_envs.join ', '}"
start = moment().subtract(process.env.TRANSACTION_AGE_DAYS, 'days')
try
data = await request.get {
auth: {
user: process.env.SIMPLEFIN_ACCESS_USER
pass: <PASSWORD>.SIMPLEFIN_ACCESS_PASSWORD
}
url: process.env.SIMPLEFIN_ACCESS_URL + "/accounts?start-date=#{start.format 'X'}"
json: true
}
catch err
throw new Error "Got status code: #{err.statusCode} from Simplefin, check your credentials!" if err.name == 'StatusCodeError'
throw err
simplefin_account = data.accounts.find (a) -> a.name == process.env.SIMPLEFIN_ACCOUNT_NAME
throw new Error "Account: #{process.env.SIMPLEFIN_ACCOUNT_NAME} not found, set SIMPLEFIN_ACCOUNT_NAME env to one of: [#{data.accounts.map((a) -> a.name).join ', '}]" unless simplefin_account?
transactions = simplefin_account.transactions
console.log util.inspect transactions, {depth: 5}
ynab_api = new ynab.API process.env.YNAB_API_TOKEN
budgets = await ynab_api.budgets.getBudgets()
budget = budgets.data.budgets.find (b) -> b.name == process.env.YNAB_BUDGET_NAME
throw new Error "Budget: #{process.env.YNAB_BUDGET_NAME} not found, set YNAB_BUDGET_NAME env to one of: [#{budgets.data.budgets.map((b) -> b.name).join ', '}]" unless budget?
accounts = await ynab_api.accounts.getAccounts budget.id
account = accounts.data.accounts.find (a) -> a.name == process.env.YNAB_ACCOUNT_NAME
throw new Error "Account: #{process.env.YNAB_ACCOUNT_NAME} not found, set YNAB_ACCOUNT_NAME env to one of: [#{accounts.data.accounts.map((a) -> a.name).join ', '}]" unless account?
# console.log util.inspect account
ynab_trx = transactions
# Entry Memos are temporary and replaced with the actual transaction later. Don't write them to YNAB.
.filter (t) -> !t.description.includes "Entry Memo Posted Today"
# Skip Pre-authoriations
.filter (t) -> !t.description.includes "Pre auth"
# Transform simplefin transaction into ynab transaction
.map (t) ->
{
account_id: account.id
date: moment.unix(t.posted).format 'YYYY-MM-DD'
amount: Math.round t.amount * 1000 # convert to milliunits
payee_name: t.payee
# payee_name: if payee = t.description.match(/Payee\:\s+(.+)$/) then payee[1].substr(0,50) else t.description.substr(0,50)
memo: t.description
import_id: md5 t.id
cleared: "cleared"
}
total = duplicate = 0
for transaction from ynab_trx
try
await ynab_api.transactions.createTransactions budget.id, {transaction}
total += 1
catch err
if err.error.name == 'conflict' and err.error.detail.includes 'same import_id'
duplicate += 1
else
console.error err
console.error util.inspect transaction
console.log "Imported #{total}, Duplicate #{duplicate}"
catch err
console.error err
process.exit 1
| true | #!/usr/bin/env coffee
util = require 'util'
request = require 'request-promise-native'
ynab = require 'ynab'
moment = require 'moment'
md5 = require 'md5'
do () -> try
required_envs = [
'SIMPLEFIN_ACCESS_USER'
'SIMPLEFIN_ACCESS_PASSWORD'
'SIMPLEFIN_ACCESS_URL'
'SIMPLEFIN_ACCOUNT_NAME'
'TRANSACTION_AGE_DAYS'
'YNAB_API_TOKEN'
'YNAB_BUDGET_NAME'
'YNAB_ACCOUNT_NAME'
]
missing_envs = required_envs.filter (env) -> not process.env[env]?
if missing_envs.length > 0
throw new Error "Missing envs: #{missing_envs.join ', '}"
start = moment().subtract(process.env.TRANSACTION_AGE_DAYS, 'days')
try
data = await request.get {
auth: {
user: process.env.SIMPLEFIN_ACCESS_USER
pass: PI:PASSWORD:<PASSWORD>END_PI.SIMPLEFIN_ACCESS_PASSWORD
}
url: process.env.SIMPLEFIN_ACCESS_URL + "/accounts?start-date=#{start.format 'X'}"
json: true
}
catch err
throw new Error "Got status code: #{err.statusCode} from Simplefin, check your credentials!" if err.name == 'StatusCodeError'
throw err
simplefin_account = data.accounts.find (a) -> a.name == process.env.SIMPLEFIN_ACCOUNT_NAME
throw new Error "Account: #{process.env.SIMPLEFIN_ACCOUNT_NAME} not found, set SIMPLEFIN_ACCOUNT_NAME env to one of: [#{data.accounts.map((a) -> a.name).join ', '}]" unless simplefin_account?
transactions = simplefin_account.transactions
console.log util.inspect transactions, {depth: 5}
ynab_api = new ynab.API process.env.YNAB_API_TOKEN
budgets = await ynab_api.budgets.getBudgets()
budget = budgets.data.budgets.find (b) -> b.name == process.env.YNAB_BUDGET_NAME
throw new Error "Budget: #{process.env.YNAB_BUDGET_NAME} not found, set YNAB_BUDGET_NAME env to one of: [#{budgets.data.budgets.map((b) -> b.name).join ', '}]" unless budget?
accounts = await ynab_api.accounts.getAccounts budget.id
account = accounts.data.accounts.find (a) -> a.name == process.env.YNAB_ACCOUNT_NAME
throw new Error "Account: #{process.env.YNAB_ACCOUNT_NAME} not found, set YNAB_ACCOUNT_NAME env to one of: [#{accounts.data.accounts.map((a) -> a.name).join ', '}]" unless account?
# console.log util.inspect account
ynab_trx = transactions
# Entry Memos are temporary and replaced with the actual transaction later. Don't write them to YNAB.
.filter (t) -> !t.description.includes "Entry Memo Posted Today"
# Skip Pre-authoriations
.filter (t) -> !t.description.includes "Pre auth"
# Transform simplefin transaction into ynab transaction
.map (t) ->
{
account_id: account.id
date: moment.unix(t.posted).format 'YYYY-MM-DD'
amount: Math.round t.amount * 1000 # convert to milliunits
payee_name: t.payee
# payee_name: if payee = t.description.match(/Payee\:\s+(.+)$/) then payee[1].substr(0,50) else t.description.substr(0,50)
memo: t.description
import_id: md5 t.id
cleared: "cleared"
}
total = duplicate = 0
for transaction from ynab_trx
try
await ynab_api.transactions.createTransactions budget.id, {transaction}
total += 1
catch err
if err.error.name == 'conflict' and err.error.detail.includes 'same import_id'
duplicate += 1
else
console.error err
console.error util.inspect transaction
console.log "Imported #{total}, Duplicate #{duplicate}"
catch err
console.error err
process.exit 1
|
[
{
"context": "VEL='error'\n process.env.FOURSQUARE_CLIENT_ID='foobar1'\n process.env.FOURSQUARE_CLIENT_SECRET='foobar",
"end": 478,
"score": 0.9260905981063843,
"start": 471,
"tag": "USERNAME",
"value": "foobar1"
},
{
"context": "oobar1'\n process.env.FOURSQUARE_CLIENT_SECRE... | test/foursquare-lunch-slack-test.coffee | stephenyeargin/hubot-foursquare-lunch | 0 | Helper = require('hubot-test-helper')
chai = require 'chai'
nock = require 'nock'
expect = chai.expect
helper = new Helper [
'adapters/slack.coffee',
'../src/foursquare-lunch.coffee'
]
# Alter time as test runs
originalDateNow = Date.now
mockDateNow = () ->
return Date.parse('Tue Mar 30 2018 14:10:00 GMT-0500 (CDT)')
describe 'hubot-foursquare-lunch for slack', ->
beforeEach ->
process.env.HUBOT_LOG_LEVEL='error'
process.env.FOURSQUARE_CLIENT_ID='foobar1'
process.env.FOURSQUARE_CLIENT_SECRET='foobar2'
process.env.HUBOT_DEFAULT_LATITUDE=36.1514179
process.env.HUBOT_DEFAULT_LONGITUDE=-86.8262359
Date.now = mockDateNow
nock.disableNetConnect()
@room = helper.createRoom()
afterEach ->
delete process.env.HUBOT_LOG_LEVEL
delete process.env.FOURSQUARE_CLIENT_ID
delete process.env.FOURSQUARE_CLIENT_SECRET
delete process.env.HUBOT_DEFAULT_LATITUDE
delete process.env.HUBOT_DEFAULT_LONGITUDE
Date.now = originalDateNow
nock.cleanAll()
@room.destroy()
# hubot lunch
it 'responds with a lunch location', (done) ->
nock('https://api.foursquare.com')
.get('/v2/venues/explore')
.query(
price: '1,2,3',
openNow: true,
query: 'lunch',
radius: 1600,
ll: '36.1514179,-86.8262359',
client_id: 'foobar1',
client_secret: 'foobar2',
v: '20140806'
)
.replyWithFile(200, __dirname + '/fixtures/venues-explore-single.json')
selfRoom = @room
selfRoom.user.say('alice', '@hubot lunch')
setTimeout(() ->
try
expect(selfRoom.messages).to.eql [
['alice', '@hubot lunch']
[
'hubot',
{
"attachments": [
{
"color": "good",
"fallback": "AVO Nashville (3001 Charlotte Ave Ste 200) - http://www.eatavo.com",
"title": "AVO Nashville",
"title_link": "https://foursquare.com/v/5592de25498e1053218edf29",
"thumb_url": "https://ss3.4sqi.net/img/categories_v2/food/vegetarian_bg_120.png",
"fields": [
{
"title": "Address",
"value": "3001 Charlotte Ave Ste 200",
"short": true
},
{
"title": "Menu",
"value": "<http://www.eatavo.com/menu/|View Menu>",
"short": true
},
{
"title": "Website",
"value": "http://www.eatavo.com",
"short": true
},
{
"title": "Rating",
"value": "8 out of 10",
"short": true
},
{
"title": "Category",
"value": "Vegetarian / Vegan",
"short": true
},
{
"title": "Price",
"value": "Moderate",
"short": true
}
]
}
],
"unfurl_links": false
}
]
]
done()
catch err
done err
return
, 1000)
# hubot lunch
it 'responds with a lunch location (random test)', (done) ->
nock('https://api.foursquare.com')
.get('/v2/venues/explore')
.query(
price: '1,2,3',
openNow: true,
query: 'lunch',
radius: 1600,
ll: '36.1514179,-86.8262359',
client_id: 'foobar1',
client_secret: 'foobar2',
v: '20140806'
)
.replyWithFile(200, __dirname + '/fixtures/venues-explore-full.json')
selfRoom = @room
selfRoom.user.say('alice', '@hubot lunch')
setTimeout(() ->
try
expect(selfRoom.messages[1][1]).to.be.a('object')
expect(selfRoom.messages[1][1]).to.have.property('attachments')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('title')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('title_link')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('fallback')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('fields')
done()
catch err
done err
return
, 1000)
| 28379 | Helper = require('hubot-test-helper')
chai = require 'chai'
nock = require 'nock'
expect = chai.expect
helper = new Helper [
'adapters/slack.coffee',
'../src/foursquare-lunch.coffee'
]
# Alter time as test runs
originalDateNow = Date.now
mockDateNow = () ->
return Date.parse('Tue Mar 30 2018 14:10:00 GMT-0500 (CDT)')
describe 'hubot-foursquare-lunch for slack', ->
beforeEach ->
process.env.HUBOT_LOG_LEVEL='error'
process.env.FOURSQUARE_CLIENT_ID='foobar1'
process.env.FOURSQUARE_CLIENT_SECRET='<KEY>'
process.env.HUBOT_DEFAULT_LATITUDE=36.1514179
process.env.HUBOT_DEFAULT_LONGITUDE=-86.8262359
Date.now = mockDateNow
nock.disableNetConnect()
@room = helper.createRoom()
afterEach ->
delete process.env.HUBOT_LOG_LEVEL
delete process.env.FOURSQUARE_CLIENT_ID
delete process.env.FOURSQUARE_CLIENT_SECRET
delete process.env.HUBOT_DEFAULT_LATITUDE
delete process.env.HUBOT_DEFAULT_LONGITUDE
Date.now = originalDateNow
nock.cleanAll()
@room.destroy()
# hubot lunch
it 'responds with a lunch location', (done) ->
nock('https://api.foursquare.com')
.get('/v2/venues/explore')
.query(
price: '1,2,3',
openNow: true,
query: 'lunch',
radius: 1600,
ll: '36.1514179,-86.8262359',
client_id: 'foobar1',
client_secret: '<KEY>',
v: '20140806'
)
.replyWithFile(200, __dirname + '/fixtures/venues-explore-single.json')
selfRoom = @room
selfRoom.user.say('alice', '@hubot lunch')
setTimeout(() ->
try
expect(selfRoom.messages).to.eql [
['alice', '@hubot lunch']
[
'hubot',
{
"attachments": [
{
"color": "good",
"fallback": "AVO Nashville (3001 Charlotte Ave Ste 200) - http://www.eatavo.com",
"title": "AVO Nashville",
"title_link": "https://foursquare.com/v/5592de25498e1053218edf29",
"thumb_url": "https://ss3.4sqi.net/img/categories_v2/food/vegetarian_bg_120.png",
"fields": [
{
"title": "Address",
"value": "3001 Charlotte Ave Ste 200",
"short": true
},
{
"title": "Menu",
"value": "<http://www.eatavo.com/menu/|View Menu>",
"short": true
},
{
"title": "Website",
"value": "http://www.eatavo.com",
"short": true
},
{
"title": "Rating",
"value": "8 out of 10",
"short": true
},
{
"title": "Category",
"value": "Vegetarian / Vegan",
"short": true
},
{
"title": "Price",
"value": "Moderate",
"short": true
}
]
}
],
"unfurl_links": false
}
]
]
done()
catch err
done err
return
, 1000)
# hubot lunch
it 'responds with a lunch location (random test)', (done) ->
nock('https://api.foursquare.com')
.get('/v2/venues/explore')
.query(
price: '1,2,3',
openNow: true,
query: 'lunch',
radius: 1600,
ll: '36.1514179,-86.8262359',
client_id: 'foobar1',
client_secret: '<KEY>',
v: '20140806'
)
.replyWithFile(200, __dirname + '/fixtures/venues-explore-full.json')
selfRoom = @room
selfRoom.user.say('alice', '@hubot lunch')
setTimeout(() ->
try
expect(selfRoom.messages[1][1]).to.be.a('object')
expect(selfRoom.messages[1][1]).to.have.property('attachments')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('title')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('title_link')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('fallback')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('fields')
done()
catch err
done err
return
, 1000)
| true | Helper = require('hubot-test-helper')
chai = require 'chai'
nock = require 'nock'
expect = chai.expect
helper = new Helper [
'adapters/slack.coffee',
'../src/foursquare-lunch.coffee'
]
# Alter time as test runs
originalDateNow = Date.now
mockDateNow = () ->
return Date.parse('Tue Mar 30 2018 14:10:00 GMT-0500 (CDT)')
describe 'hubot-foursquare-lunch for slack', ->
beforeEach ->
process.env.HUBOT_LOG_LEVEL='error'
process.env.FOURSQUARE_CLIENT_ID='foobar1'
process.env.FOURSQUARE_CLIENT_SECRET='PI:KEY:<KEY>END_PI'
process.env.HUBOT_DEFAULT_LATITUDE=36.1514179
process.env.HUBOT_DEFAULT_LONGITUDE=-86.8262359
Date.now = mockDateNow
nock.disableNetConnect()
@room = helper.createRoom()
afterEach ->
delete process.env.HUBOT_LOG_LEVEL
delete process.env.FOURSQUARE_CLIENT_ID
delete process.env.FOURSQUARE_CLIENT_SECRET
delete process.env.HUBOT_DEFAULT_LATITUDE
delete process.env.HUBOT_DEFAULT_LONGITUDE
Date.now = originalDateNow
nock.cleanAll()
@room.destroy()
# hubot lunch
it 'responds with a lunch location', (done) ->
nock('https://api.foursquare.com')
.get('/v2/venues/explore')
.query(
price: '1,2,3',
openNow: true,
query: 'lunch',
radius: 1600,
ll: '36.1514179,-86.8262359',
client_id: 'foobar1',
client_secret: 'PI:KEY:<KEY>END_PI',
v: '20140806'
)
.replyWithFile(200, __dirname + '/fixtures/venues-explore-single.json')
selfRoom = @room
selfRoom.user.say('alice', '@hubot lunch')
setTimeout(() ->
try
expect(selfRoom.messages).to.eql [
['alice', '@hubot lunch']
[
'hubot',
{
"attachments": [
{
"color": "good",
"fallback": "AVO Nashville (3001 Charlotte Ave Ste 200) - http://www.eatavo.com",
"title": "AVO Nashville",
"title_link": "https://foursquare.com/v/5592de25498e1053218edf29",
"thumb_url": "https://ss3.4sqi.net/img/categories_v2/food/vegetarian_bg_120.png",
"fields": [
{
"title": "Address",
"value": "3001 Charlotte Ave Ste 200",
"short": true
},
{
"title": "Menu",
"value": "<http://www.eatavo.com/menu/|View Menu>",
"short": true
},
{
"title": "Website",
"value": "http://www.eatavo.com",
"short": true
},
{
"title": "Rating",
"value": "8 out of 10",
"short": true
},
{
"title": "Category",
"value": "Vegetarian / Vegan",
"short": true
},
{
"title": "Price",
"value": "Moderate",
"short": true
}
]
}
],
"unfurl_links": false
}
]
]
done()
catch err
done err
return
, 1000)
# hubot lunch
it 'responds with a lunch location (random test)', (done) ->
nock('https://api.foursquare.com')
.get('/v2/venues/explore')
.query(
price: '1,2,3',
openNow: true,
query: 'lunch',
radius: 1600,
ll: '36.1514179,-86.8262359',
client_id: 'foobar1',
client_secret: 'PI:KEY:<KEY>END_PI',
v: '20140806'
)
.replyWithFile(200, __dirname + '/fixtures/venues-explore-full.json')
selfRoom = @room
selfRoom.user.say('alice', '@hubot lunch')
setTimeout(() ->
try
expect(selfRoom.messages[1][1]).to.be.a('object')
expect(selfRoom.messages[1][1]).to.have.property('attachments')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('title')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('title_link')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('fallback')
expect(selfRoom.messages[1][1]['attachments'][0]).to.have.property('fields')
done()
catch err
done err
return
, 1000)
|
[
{
"context": " false\n name = Duggarify.name name\n if name is \"Josh Bob\" or name is \"Joshua Bob\"\n $('.num').text(\"You ",
"end": 399,
"score": 0.999880313873291,
"start": 391,
"tag": "NAME",
"value": "Josh Bob"
},
{
"context": "ify.name name\n if name is \"Josh Bob\" or n... | src/javascript/global.coffee | adampash/dug_name_generator | 0 | # Browserify entry point for the global.js bundle (yay CoffeeScript!)
$ = require 'jquery'
Duggarify = require './duggarify'
name = ""
post_url = "http://gawker.com/whats-your-duggar-name-1707464686"
$('.name_form').on 'submit', ->
name = $(@).find('input').val()
$('input').blur()
unless name != ''
$('input').focus()
return false
name = Duggarify.name name
if name is "Josh Bob" or name is "Joshua Bob"
$('.num').text("You should probably get")
$('.last').text("a different name.")
$('.name_container').addClass 'small'
else
$('.num').text name
$('.last').text("Duggar")
$('.name_container').removeClass 'small'
$('.big_j span').addClass 'spin'
setTimeout ->
$('#content').addClass 'done'
, 2000
setTimeout ->
$('.big_j').addClass 'done'
, 2900
$('.deets').show()
false
$('.start_over').on 'click', ->
$('.done').removeClass('done')
$('.spin').removeClass('spin')
$('.big_j span').removeClass 'spin'
$('input').val('').focus()
$('.facebook').on 'click', ->
description = encodeURIComponent "Get your Duggar name before it's too late!"
picture = encodeURIComponent "http://i.kinja-img.com/gawker-media/image/upload/s---kfRt6zT--/c_fit,fl_progressive,q_80,w_636/1270976878793119010.jpg"
title = encodeURIComponent ""
text = encodeURIComponent "My Duggar name is #{name} Duggar! What's yours?"
app_id = "1619332488296427"
url = "https://www.facebook.com/dialog/feed?description=#{description}&picture=#{picture}&name=#{text}&link=#{encodeURIComponent post_url}&app_id=#{app_id}&display=page&redirect_uri=#{encodeURIComponent post_url}"
window.open url
$('.twitter').on 'click', ->
text = "My Duggar name is #{name} Duggar! What's yours?!?!?!?!?!?!?!??!?!?!?!?!??!11 #MyDuggarName "
link = "https://twitter.com/home?status=#{encodeURIComponent text}#{encodeURIComponent post_url}"
window.open link
| 929 | # Browserify entry point for the global.js bundle (yay CoffeeScript!)
$ = require 'jquery'
Duggarify = require './duggarify'
name = ""
post_url = "http://gawker.com/whats-your-duggar-name-1707464686"
$('.name_form').on 'submit', ->
name = $(@).find('input').val()
$('input').blur()
unless name != ''
$('input').focus()
return false
name = Duggarify.name name
if name is "<NAME>" or name is "<NAME>"
$('.num').text("You should probably get")
$('.last').text("a different name.")
$('.name_container').addClass 'small'
else
$('.num').text name
$('.last').text("Duggar")
$('.name_container').removeClass 'small'
$('.big_j span').addClass 'spin'
setTimeout ->
$('#content').addClass 'done'
, 2000
setTimeout ->
$('.big_j').addClass 'done'
, 2900
$('.deets').show()
false
$('.start_over').on 'click', ->
$('.done').removeClass('done')
$('.spin').removeClass('spin')
$('.big_j span').removeClass 'spin'
$('input').val('').focus()
$('.facebook').on 'click', ->
description = encodeURIComponent "Get your Duggar name before it's too late!"
picture = encodeURIComponent "http://i.kinja-img.com/gawker-media/image/upload/s---kfRt6zT--/c_fit,fl_progressive,q_80,w_636/1270976878793119010.jpg"
title = encodeURIComponent ""
text = encodeURIComponent "My Duggar name is #{name} Duggar! What's yours?"
app_id = "1619332488296427"
url = "https://www.facebook.com/dialog/feed?description=#{description}&picture=#{picture}&name=#{text}&link=#{encodeURIComponent post_url}&app_id=#{app_id}&display=page&redirect_uri=#{encodeURIComponent post_url}"
window.open url
$('.twitter').on 'click', ->
text = "My Duggar name is #{name} <NAME>! What's yours?!?!?!?!?!?!?!??!?!?!?!?!??!11 #MyDuggarName "
link = "https://twitter.com/home?status=#{encodeURIComponent text}#{encodeURIComponent post_url}"
window.open link
| true | # Browserify entry point for the global.js bundle (yay CoffeeScript!)
$ = require 'jquery'
Duggarify = require './duggarify'
name = ""
post_url = "http://gawker.com/whats-your-duggar-name-1707464686"
$('.name_form').on 'submit', ->
name = $(@).find('input').val()
$('input').blur()
unless name != ''
$('input').focus()
return false
name = Duggarify.name name
if name is "PI:NAME:<NAME>END_PI" or name is "PI:NAME:<NAME>END_PI"
$('.num').text("You should probably get")
$('.last').text("a different name.")
$('.name_container').addClass 'small'
else
$('.num').text name
$('.last').text("Duggar")
$('.name_container').removeClass 'small'
$('.big_j span').addClass 'spin'
setTimeout ->
$('#content').addClass 'done'
, 2000
setTimeout ->
$('.big_j').addClass 'done'
, 2900
$('.deets').show()
false
$('.start_over').on 'click', ->
$('.done').removeClass('done')
$('.spin').removeClass('spin')
$('.big_j span').removeClass 'spin'
$('input').val('').focus()
$('.facebook').on 'click', ->
description = encodeURIComponent "Get your Duggar name before it's too late!"
picture = encodeURIComponent "http://i.kinja-img.com/gawker-media/image/upload/s---kfRt6zT--/c_fit,fl_progressive,q_80,w_636/1270976878793119010.jpg"
title = encodeURIComponent ""
text = encodeURIComponent "My Duggar name is #{name} Duggar! What's yours?"
app_id = "1619332488296427"
url = "https://www.facebook.com/dialog/feed?description=#{description}&picture=#{picture}&name=#{text}&link=#{encodeURIComponent post_url}&app_id=#{app_id}&display=page&redirect_uri=#{encodeURIComponent post_url}"
window.open url
$('.twitter').on 'click', ->
text = "My Duggar name is #{name} PI:NAME:<NAME>END_PI! What's yours?!?!?!?!?!?!?!??!?!?!?!?!??!11 #MyDuggarName "
link = "https://twitter.com/home?status=#{encodeURIComponent text}#{encodeURIComponent post_url}"
window.open link
|
[
{
"context": "nd shared variable exchange for WolfCage.\n\n@author Destin Moulton\n@git https://github.com/destinmoulton/wolfcage\n@l",
"end": 88,
"score": 0.9998769760131836,
"start": 74,
"tag": "NAME",
"value": "Destin Moulton"
},
{
"context": ".\n\n@author Destin Moulton\n@git htt... | src/Bus.coffee | destinmoulton/cagen | 0 | ###
A pub/sub system and shared variable exchange for WolfCage.
@author Destin Moulton
@git https://github.com/destinmoulton/wolfcage
@license MIT
Subscribe and publish to a channel.
Set and get shared variables.
###
class Bus
constructor:()->
@_channels = {}
@_vault = {}
subscribe: (channel, callback)=>
if not @_channels.hasOwnProperty(channel)
@_channels[channel] = []
@_channels[channel].push(callback)
broadcast: (channel, payload)->
if @_channels.hasOwnProperty(channel)
for subscriber in @_channels[channel]
subscriber(payload)
else console.log("Bus: Unable to find #{channel} channel.")
set: (name, variable)->
@_vault[name] = variable
get: (name)->
if not @_vault.hasOwnProperty(name)
console.log("Bus: Unable to find #{name} in variable vault.")
else return @_vault[name]
module.exports = Bus | 132774 | ###
A pub/sub system and shared variable exchange for WolfCage.
@author <NAME>
@git https://github.com/destinmoulton/wolfcage
@license MIT
Subscribe and publish to a channel.
Set and get shared variables.
###
class Bus
constructor:()->
@_channels = {}
@_vault = {}
subscribe: (channel, callback)=>
if not @_channels.hasOwnProperty(channel)
@_channels[channel] = []
@_channels[channel].push(callback)
broadcast: (channel, payload)->
if @_channels.hasOwnProperty(channel)
for subscriber in @_channels[channel]
subscriber(payload)
else console.log("Bus: Unable to find #{channel} channel.")
set: (name, variable)->
@_vault[name] = variable
get: (name)->
if not @_vault.hasOwnProperty(name)
console.log("Bus: Unable to find #{name} in variable vault.")
else return @_vault[name]
module.exports = Bus | true | ###
A pub/sub system and shared variable exchange for WolfCage.
@author PI:NAME:<NAME>END_PI
@git https://github.com/destinmoulton/wolfcage
@license MIT
Subscribe and publish to a channel.
Set and get shared variables.
###
class Bus
constructor:()->
@_channels = {}
@_vault = {}
subscribe: (channel, callback)=>
if not @_channels.hasOwnProperty(channel)
@_channels[channel] = []
@_channels[channel].push(callback)
broadcast: (channel, payload)->
if @_channels.hasOwnProperty(channel)
for subscriber in @_channels[channel]
subscriber(payload)
else console.log("Bus: Unable to find #{channel} channel.")
set: (name, variable)->
@_vault[name] = variable
get: (name)->
if not @_vault.hasOwnProperty(name)
console.log("Bus: Unable to find #{name} in variable vault.")
else return @_vault[name]
module.exports = Bus |
[
{
"context": " { method: 'POST', url: '/people', json: { name: 'jakob', age: 27 } }\n .then ({ json }) ->\n ",
"end": 3500,
"score": 0.9986087083816528,
"start": 3495,
"tag": "NAME",
"value": "jakob"
},
{
"context": " { method: 'POST', url: '/people', json: { name: 'juli... | spec/mocha.coffee | jakobmattsson/rester | 0 | jscov = require 'jscov'
rester = require jscov.cover('..', 'lib', 'rester')
should = require 'should'
express = require 'express'
runr = require 'runr'
manikin = require 'manikin-mongodb'
request = require 'request'
mongojs = require 'mongojs'
_ = require 'underscore'
Q = require 'q'
qRequest = (config = {}) ->
(params) ->
deferred = Q.defer()
if typeof params == 'string'
params = { url: params }
if config.baseUrl? || params.url?
params = _.extend({}, params, { url: (config.url ? '') + (params.url ? '') })
request params, (err, res, body) ->
if err
deferred.reject(err)
else
jsonBody = null
if typeof body == 'string'
try
jsonBody = JSON.parse(body)
catch ex
jsonBody = {}
else
jsonBody = body
deferred.resolve({ res: res, body: body, json: jsonBody })
deferred.promise
it "should have the right methods", ->
rester.should.have.keys [
'exec'
'acm'
]
it "should do things", (done) ->
runr.up 'mongodb', {}, ->
mongodb = 'mongodb://localhost/rester-test'
port = 1337
req = qRequest({ url: 'http://localhost:' + port })
# define the app
app = express()
app.use express.json()
app.use express.urlencoded()
app.use express.responseTime()
# define the auth
userFromDb = (req, callback) -> callback(null, {})
# define the db
db = manikin.create()
models =
people:
fields:
name: 'string'
age: 'number'
pets:
owners: person: 'people'
fields:
name: 'string'
race: 'string'
foods:
fields:
name: 'string'
eatenBy: { type: 'hasMany', model: 'pets', inverseName: 'eats' }
# run tests
mongojs.connect(mongodb).dropDatabase ->
db.connect mongodb, models, ->
rester.exec(app, db, models, userFromDb, { verbose: false })
app.listen(port)
s = {}
req
url: '/'
.then ({ json }) ->
json.should.eql
roots: ['people', 'foods']
verbs: []
# Inserting and updating some food
# ================================
.then ->
req '/foods'
.then ({ json }) ->
json.should.eql []
.then ->
req { method: 'POST', url: '/foods', json: { name: 'f0' } }
.then ({ json }) ->
s.food0 = json
.then ->
req { method: 'POST', url: '/foods', json: { name: 'testFood' } }
.then ({ json }) ->
s.food1 = json
json.should.have.keys ['id', 'name', 'eatenBy']
_(json).omit('id').should.eql { name: 'testFood', eatenBy: [] }
.then ->
req '/foods'
.then ({ json }) ->
json.map((x) -> _(x).omit('id')).should.eql [ { name: 'f0', eatenBy: [] }, { name: 'testFood', eatenBy: [] } ]
.then ->
req "/foods/#{s.food1.id}"
.then ({ json }) ->
json.should.eql { id: s.food1.id, name: 'testFood', eatenBy: [] }
.then ->
req { method: 'PUT', url: "/foods/#{s.food1.id}", json: { name: 'f1' } }
.then ({ json }) ->
json.should.eql { id: s.food1.id, name: 'f1', eatenBy: [] }
# Inserting and updating some people and pets
# ===========================================
.then ->
req { method: 'POST', url: '/people', json: { name: 'jakob', age: 27 } }
.then ({ json }) ->
s.jakob = json
.then ->
req { method: 'POST', url: '/people', json: { name: 'julia', age: 26 } }
.then ({ json }) ->
s.julia = json
json.should.have.keys ['id', 'name', 'age']
_(json).omit('id').should.eql { name: 'julia', age: 26 }
.then ->
req { method: 'POST', url: "/people/#{s.julia.id}/pets", json: { name: 'sixten', race: 'cat' } }
.then ({ json }) ->
s.sixten = json
json.should.have.keys ['id', 'name', 'race', 'person', 'eats']
_(json).omit('id').should.eql { name: 'sixten', race: 'cat', person: s.julia.id, eats: [] }
.then ->
req { method: 'POST', url: "/pets", json: { name: 'dog', race: 'dog', person: s.julia.id } }
.then ({ json }) ->
s.dog = json
.then ->
req "/people/#{s.julia.id}/pets"
.then ({ json }) ->
json.should.eql [
id: s.sixten.id
name: 'sixten'
race: 'cat'
person: s.julia.id
eats: []
,
id: s.dog.id
name: 'dog'
race: 'dog'
person: s.julia.id
eats: []
]
.then ->
req { url: "/pets/#{s.dog.id}", method: 'DELETE' }
.then ({ json }) ->
json.should.eql
id: s.dog.id
name: 'dog'
race: 'dog'
person: s.julia.id
eats: []
# Doing some many to many
# =======================
.then ->
req { url: "/foods/#{s.food1.id}/eatenBy/#{s.sixten.id}", method: 'POST' }
.then ({ json }) ->
json.should.eql { status: 'inserted' }
.then ->
req { url: "/foods/#{s.food0.id}/eatenBy/#{s.sixten.id}", method: 'POST' }
.then ({ json }) ->
json.should.eql { status: 'inserted' }
.then ->
req "/pets/#{s.sixten.id}"
.then ({ json }) ->
json.should.eql
id: s.sixten.id
name: 'sixten'
race: 'cat'
person: s.julia.id
eats: [s.food1.id, s.food0.id]
.then ->
req "/pets/#{s.sixten.id}/eats"
.then ({ json }) ->
json.should.eql [
name: 'f1'
eatenBy: [s.sixten.id]
id: s.food1.id
,
name: 'f0'
eatenBy: [s.sixten.id]
id: s.food0.id
]
.then ->
req "/foods/#{s.food0.id}/eatenBy"
.then ({ json }) ->
json.should.eql [
id: s.sixten.id
name: 'sixten'
race: 'cat'
person: s.julia.id
eats: [s.food1.id, s.food0.id]
]
.then ->
req { url: "/pets/#{s.sixten.id}/eats/#{s.food0.id}", method: 'DELETE' }
.then ->
req "/pets/#{s.sixten.id}"
.then ({ json }) ->
json.should.eql
id: s.sixten.id
name: 'sixten'
race: 'cat'
person: s.julia.id
eats: [s.food1.id]
.then ->
req "/foods/#{s.food0.id}/eatenBy"
.then ({ json }) ->
json.should.eql []
# Cascading
# =========
.then ->
req '/people'
.then ({ json }) ->
json.length.should.eql 2
.then ->
req '/foods'
.then ({ json }) ->
json.length.should.eql 2
.then ->
req '/pets'
.then ({ json }) ->
json.length.should.eql 1
.then ->
req { url: "/people/#{s.julia.id}", method: 'DELETE' }
.then ->
req '/people'
.then ({ json }) ->
json.length.should.eql 1
.then ->
req '/foods'
.then ({ json }) ->
json.should.eql [
name: 'f1'
eatenBy: []
id: s.food1.id
,
name: 'f0'
eatenBy: []
id: s.food0.id
]
.then ->
req '/pets'
.then ({ json }) ->
json.length.should.eql 0
# Finishing and catching failures
# ===============================
.then ->
done()
.fail(done)
| 146918 | jscov = require 'jscov'
rester = require jscov.cover('..', 'lib', 'rester')
should = require 'should'
express = require 'express'
runr = require 'runr'
manikin = require 'manikin-mongodb'
request = require 'request'
mongojs = require 'mongojs'
_ = require 'underscore'
Q = require 'q'
qRequest = (config = {}) ->
(params) ->
deferred = Q.defer()
if typeof params == 'string'
params = { url: params }
if config.baseUrl? || params.url?
params = _.extend({}, params, { url: (config.url ? '') + (params.url ? '') })
request params, (err, res, body) ->
if err
deferred.reject(err)
else
jsonBody = null
if typeof body == 'string'
try
jsonBody = JSON.parse(body)
catch ex
jsonBody = {}
else
jsonBody = body
deferred.resolve({ res: res, body: body, json: jsonBody })
deferred.promise
it "should have the right methods", ->
rester.should.have.keys [
'exec'
'acm'
]
it "should do things", (done) ->
runr.up 'mongodb', {}, ->
mongodb = 'mongodb://localhost/rester-test'
port = 1337
req = qRequest({ url: 'http://localhost:' + port })
# define the app
app = express()
app.use express.json()
app.use express.urlencoded()
app.use express.responseTime()
# define the auth
userFromDb = (req, callback) -> callback(null, {})
# define the db
db = manikin.create()
models =
people:
fields:
name: 'string'
age: 'number'
pets:
owners: person: 'people'
fields:
name: 'string'
race: 'string'
foods:
fields:
name: 'string'
eatenBy: { type: 'hasMany', model: 'pets', inverseName: 'eats' }
# run tests
mongojs.connect(mongodb).dropDatabase ->
db.connect mongodb, models, ->
rester.exec(app, db, models, userFromDb, { verbose: false })
app.listen(port)
s = {}
req
url: '/'
.then ({ json }) ->
json.should.eql
roots: ['people', 'foods']
verbs: []
# Inserting and updating some food
# ================================
.then ->
req '/foods'
.then ({ json }) ->
json.should.eql []
.then ->
req { method: 'POST', url: '/foods', json: { name: 'f0' } }
.then ({ json }) ->
s.food0 = json
.then ->
req { method: 'POST', url: '/foods', json: { name: 'testFood' } }
.then ({ json }) ->
s.food1 = json
json.should.have.keys ['id', 'name', 'eatenBy']
_(json).omit('id').should.eql { name: 'testFood', eatenBy: [] }
.then ->
req '/foods'
.then ({ json }) ->
json.map((x) -> _(x).omit('id')).should.eql [ { name: 'f0', eatenBy: [] }, { name: 'testFood', eatenBy: [] } ]
.then ->
req "/foods/#{s.food1.id}"
.then ({ json }) ->
json.should.eql { id: s.food1.id, name: 'testFood', eatenBy: [] }
.then ->
req { method: 'PUT', url: "/foods/#{s.food1.id}", json: { name: 'f1' } }
.then ({ json }) ->
json.should.eql { id: s.food1.id, name: 'f1', eatenBy: [] }
# Inserting and updating some people and pets
# ===========================================
.then ->
req { method: 'POST', url: '/people', json: { name: '<NAME>', age: 27 } }
.then ({ json }) ->
s.jakob = json
.then ->
req { method: 'POST', url: '/people', json: { name: '<NAME>', age: 26 } }
.then ({ json }) ->
s.julia = json
json.should.have.keys ['id', 'name', 'age']
_(json).omit('id').should.eql { name: '<NAME>', age: 26 }
.then ->
req { method: 'POST', url: "/people/#{s.julia.id}/pets", json: { name: '<NAME>', race: 'cat' } }
.then ({ json }) ->
s.sixten = json
json.should.have.keys ['id', 'name', 'race', 'person', 'eats']
_(json).omit('id').should.eql { name: '<NAME>', race: 'cat', person: s.julia.id, eats: [] }
.then ->
req { method: 'POST', url: "/pets", json: { name: '<NAME>', race: 'dog', person: s.julia.id } }
.then ({ json }) ->
s.dog = json
.then ->
req "/people/#{s.julia.id}/pets"
.then ({ json }) ->
json.should.eql [
id: s.sixten.id
name: '<NAME>'
race: 'cat'
person: s.julia.id
eats: []
,
id: s.dog.id
name: '<NAME>'
race: 'dog'
person: s.julia.id
eats: []
]
.then ->
req { url: "/pets/#{s.dog.id}", method: 'DELETE' }
.then ({ json }) ->
json.should.eql
id: s.dog.id
name: '<NAME>'
race: 'dog'
person: s.julia.id
eats: []
# Doing some many to many
# =======================
.then ->
req { url: "/foods/#{s.food1.id}/eatenBy/#{s.sixten.id}", method: 'POST' }
.then ({ json }) ->
json.should.eql { status: 'inserted' }
.then ->
req { url: "/foods/#{s.food0.id}/eatenBy/#{s.sixten.id}", method: 'POST' }
.then ({ json }) ->
json.should.eql { status: 'inserted' }
.then ->
req "/pets/#{s.sixten.id}"
.then ({ json }) ->
json.should.eql
id: s.sixten.id
name: '<NAME>'
race: 'cat'
person: s.julia.id
eats: [s.food1.id, s.food0.id]
.then ->
req "/pets/#{s.sixten.id}/eats"
.then ({ json }) ->
json.should.eql [
name: 'f1'
eatenBy: [s.sixten.id]
id: s.food1.id
,
name: 'f0'
eatenBy: [s.sixten.id]
id: s.food0.id
]
.then ->
req "/foods/#{s.food0.id}/eatenBy"
.then ({ json }) ->
json.should.eql [
id: s.sixten.id
name: '<NAME>'
race: 'cat'
person: s.julia.id
eats: [s.food1.id, s.food0.id]
]
.then ->
req { url: "/pets/#{s.sixten.id}/eats/#{s.food0.id}", method: 'DELETE' }
.then ->
req "/pets/#{s.sixten.id}"
.then ({ json }) ->
json.should.eql
id: s.sixten.id
name: '<NAME>'
race: 'cat'
person: s.<NAME>.id
eats: [s.food1.id]
.then ->
req "/foods/#{s.food0.id}/eatenBy"
.then ({ json }) ->
json.should.eql []
# Cascading
# =========
.then ->
req '/people'
.then ({ json }) ->
json.length.should.eql 2
.then ->
req '/foods'
.then ({ json }) ->
json.length.should.eql 2
.then ->
req '/pets'
.then ({ json }) ->
json.length.should.eql 1
.then ->
req { url: "/people/#{s.julia.id}", method: 'DELETE' }
.then ->
req '/people'
.then ({ json }) ->
json.length.should.eql 1
.then ->
req '/foods'
.then ({ json }) ->
json.should.eql [
name: 'f1'
eatenBy: []
id: s.food1.id
,
name: 'f0'
eatenBy: []
id: s.food0.id
]
.then ->
req '/pets'
.then ({ json }) ->
json.length.should.eql 0
# Finishing and catching failures
# ===============================
.then ->
done()
.fail(done)
| true | jscov = require 'jscov'
rester = require jscov.cover('..', 'lib', 'rester')
should = require 'should'
express = require 'express'
runr = require 'runr'
manikin = require 'manikin-mongodb'
request = require 'request'
mongojs = require 'mongojs'
_ = require 'underscore'
Q = require 'q'
qRequest = (config = {}) ->
(params) ->
deferred = Q.defer()
if typeof params == 'string'
params = { url: params }
if config.baseUrl? || params.url?
params = _.extend({}, params, { url: (config.url ? '') + (params.url ? '') })
request params, (err, res, body) ->
if err
deferred.reject(err)
else
jsonBody = null
if typeof body == 'string'
try
jsonBody = JSON.parse(body)
catch ex
jsonBody = {}
else
jsonBody = body
deferred.resolve({ res: res, body: body, json: jsonBody })
deferred.promise
it "should have the right methods", ->
rester.should.have.keys [
'exec'
'acm'
]
it "should do things", (done) ->
runr.up 'mongodb', {}, ->
mongodb = 'mongodb://localhost/rester-test'
port = 1337
req = qRequest({ url: 'http://localhost:' + port })
# define the app
app = express()
app.use express.json()
app.use express.urlencoded()
app.use express.responseTime()
# define the auth
userFromDb = (req, callback) -> callback(null, {})
# define the db
db = manikin.create()
models =
people:
fields:
name: 'string'
age: 'number'
pets:
owners: person: 'people'
fields:
name: 'string'
race: 'string'
foods:
fields:
name: 'string'
eatenBy: { type: 'hasMany', model: 'pets', inverseName: 'eats' }
# run tests
mongojs.connect(mongodb).dropDatabase ->
db.connect mongodb, models, ->
rester.exec(app, db, models, userFromDb, { verbose: false })
app.listen(port)
s = {}
req
url: '/'
.then ({ json }) ->
json.should.eql
roots: ['people', 'foods']
verbs: []
# Inserting and updating some food
# ================================
.then ->
req '/foods'
.then ({ json }) ->
json.should.eql []
.then ->
req { method: 'POST', url: '/foods', json: { name: 'f0' } }
.then ({ json }) ->
s.food0 = json
.then ->
req { method: 'POST', url: '/foods', json: { name: 'testFood' } }
.then ({ json }) ->
s.food1 = json
json.should.have.keys ['id', 'name', 'eatenBy']
_(json).omit('id').should.eql { name: 'testFood', eatenBy: [] }
.then ->
req '/foods'
.then ({ json }) ->
json.map((x) -> _(x).omit('id')).should.eql [ { name: 'f0', eatenBy: [] }, { name: 'testFood', eatenBy: [] } ]
.then ->
req "/foods/#{s.food1.id}"
.then ({ json }) ->
json.should.eql { id: s.food1.id, name: 'testFood', eatenBy: [] }
.then ->
req { method: 'PUT', url: "/foods/#{s.food1.id}", json: { name: 'f1' } }
.then ({ json }) ->
json.should.eql { id: s.food1.id, name: 'f1', eatenBy: [] }
# Inserting and updating some people and pets
# ===========================================
.then ->
req { method: 'POST', url: '/people', json: { name: 'PI:NAME:<NAME>END_PI', age: 27 } }
.then ({ json }) ->
s.jakob = json
.then ->
req { method: 'POST', url: '/people', json: { name: 'PI:NAME:<NAME>END_PI', age: 26 } }
.then ({ json }) ->
s.julia = json
json.should.have.keys ['id', 'name', 'age']
_(json).omit('id').should.eql { name: 'PI:NAME:<NAME>END_PI', age: 26 }
.then ->
req { method: 'POST', url: "/people/#{s.julia.id}/pets", json: { name: 'PI:NAME:<NAME>END_PI', race: 'cat' } }
.then ({ json }) ->
s.sixten = json
json.should.have.keys ['id', 'name', 'race', 'person', 'eats']
_(json).omit('id').should.eql { name: 'PI:NAME:<NAME>END_PI', race: 'cat', person: s.julia.id, eats: [] }
.then ->
req { method: 'POST', url: "/pets", json: { name: 'PI:NAME:<NAME>END_PI', race: 'dog', person: s.julia.id } }
.then ({ json }) ->
s.dog = json
.then ->
req "/people/#{s.julia.id}/pets"
.then ({ json }) ->
json.should.eql [
id: s.sixten.id
name: 'PI:NAME:<NAME>END_PI'
race: 'cat'
person: s.julia.id
eats: []
,
id: s.dog.id
name: 'PI:NAME:<NAME>END_PI'
race: 'dog'
person: s.julia.id
eats: []
]
.then ->
req { url: "/pets/#{s.dog.id}", method: 'DELETE' }
.then ({ json }) ->
json.should.eql
id: s.dog.id
name: 'PI:NAME:<NAME>END_PI'
race: 'dog'
person: s.julia.id
eats: []
# Doing some many to many
# =======================
.then ->
req { url: "/foods/#{s.food1.id}/eatenBy/#{s.sixten.id}", method: 'POST' }
.then ({ json }) ->
json.should.eql { status: 'inserted' }
.then ->
req { url: "/foods/#{s.food0.id}/eatenBy/#{s.sixten.id}", method: 'POST' }
.then ({ json }) ->
json.should.eql { status: 'inserted' }
.then ->
req "/pets/#{s.sixten.id}"
.then ({ json }) ->
json.should.eql
id: s.sixten.id
name: 'PI:NAME:<NAME>END_PI'
race: 'cat'
person: s.julia.id
eats: [s.food1.id, s.food0.id]
.then ->
req "/pets/#{s.sixten.id}/eats"
.then ({ json }) ->
json.should.eql [
name: 'f1'
eatenBy: [s.sixten.id]
id: s.food1.id
,
name: 'f0'
eatenBy: [s.sixten.id]
id: s.food0.id
]
.then ->
req "/foods/#{s.food0.id}/eatenBy"
.then ({ json }) ->
json.should.eql [
id: s.sixten.id
name: 'PI:NAME:<NAME>END_PI'
race: 'cat'
person: s.julia.id
eats: [s.food1.id, s.food0.id]
]
.then ->
req { url: "/pets/#{s.sixten.id}/eats/#{s.food0.id}", method: 'DELETE' }
.then ->
req "/pets/#{s.sixten.id}"
.then ({ json }) ->
json.should.eql
id: s.sixten.id
name: 'PI:NAME:<NAME>END_PI'
race: 'cat'
person: s.PI:NAME:<NAME>END_PI.id
eats: [s.food1.id]
.then ->
req "/foods/#{s.food0.id}/eatenBy"
.then ({ json }) ->
json.should.eql []
# Cascading
# =========
.then ->
req '/people'
.then ({ json }) ->
json.length.should.eql 2
.then ->
req '/foods'
.then ({ json }) ->
json.length.should.eql 2
.then ->
req '/pets'
.then ({ json }) ->
json.length.should.eql 1
.then ->
req { url: "/people/#{s.julia.id}", method: 'DELETE' }
.then ->
req '/people'
.then ({ json }) ->
json.length.should.eql 1
.then ->
req '/foods'
.then ({ json }) ->
json.should.eql [
name: 'f1'
eatenBy: []
id: s.food1.id
,
name: 'f0'
eatenBy: []
id: s.food0.id
]
.then ->
req '/pets'
.then ({ json }) ->
json.length.should.eql 0
# Finishing and catching failures
# ===============================
.then ->
done()
.fail(done)
|
[
{
"context": "ame of the wallet to create\n # new_passphrase `new_passphrase` - a passphrase for encrypting the wallet\n # b",
"end": 887,
"score": 0.6649025082588196,
"start": 873,
"tag": "PASSWORD",
"value": "new_passphrase"
},
{
"context": "he Bitcoin/PTS wallet file path\n ... | app/js/services/wallet_api.coffee | AlexChien/web_wallet | 0 | # Warning: this is a generated file, any changes made here will be overwritten by the build process
class WalletAPI
constructor: (@q, @log, @rpc, @interval) ->
#@log.info "---- WalletAPI Constructor ----"
# Extra information about the wallet.
# parameters:
# return_type: `json_object`
get_info: (error_handler = null) ->
@rpc.request('wallet_get_info', error_handler).then (response) ->
response.result
# Opens the wallet of the given name
# parameters:
# wallet_name `wallet_name` - the name of the wallet to open
# return_type: `void`
open: (wallet_name, error_handler = null) ->
@rpc.request('wallet_open', [wallet_name], error_handler).then (response) ->
response.result
# Creates a wallet with the given name
# parameters:
# wallet_name `wallet_name` - name of the wallet to create
# new_passphrase `new_passphrase` - a passphrase for encrypting the wallet
# brainkey `brain_key` - a strong passphrase that will be used to generate all private keys, defaults to a large random number
# return_type: `void`
create: (wallet_name, new_passphrase, brain_key, error_handler = null) ->
@rpc.request('wallet_create', [wallet_name, new_passphrase, brain_key], error_handler).then (response) ->
response.result
# Returns the wallet name passed to wallet_open
# parameters:
# return_type: `optional_wallet_name`
get_name: (error_handler = null) ->
@rpc.request('wallet_get_name', error_handler).then (response) ->
response.result
# Loads the private key into the specified account. Returns which account it was actually imported to.
# parameters:
# wif_private_key `wif_key` - A private key in bitcoin Wallet Import Format (WIF)
# account_name `account_name` - the name of the account the key should be imported into, if null then the key must belong to an active account
# bool `create_new_account` - If true, the wallet will attempt to create a new account for the name provided rather than import the key into an existing account
# bool `rescan` - If true, the wallet will rescan the blockchain looking for transactions that involve this private key
# return_type: `account_name`
import_private_key: (wif_key, account_name, create_new_account, rescan, error_handler = null) ->
@rpc.request('wallet_import_private_key', [wif_key, account_name, create_new_account, rescan], error_handler).then (response) ->
response.result
# Imports a Bitcoin Core or BitShares PTS wallet
# parameters:
# filename `wallet_filename` - the Bitcoin/PTS wallet file path
# passphrase `passphrase` - the imported wallet's password
# account_name `account_name` - the account to receive the contents of the wallet
# return_type: `uint32_t`
import_bitcoin: (wallet_filename, passphrase, account_name, error_handler = null) ->
@rpc.request('wallet_import_bitcoin', [wallet_filename, passphrase, account_name], error_handler).then (response) ->
response.result
# Imports an Electrum wallet
# parameters:
# filename `wallet_filename` - the Electrum wallet file path
# passphrase `passphrase` - the imported wallet's password
# account_name `account_name` - the account to receive the contents of the wallet
# return_type: `uint32_t`
import_electrum: (wallet_filename, passphrase, account_name, error_handler = null) ->
@rpc.request('wallet_import_electrum', [wallet_filename, passphrase, account_name], error_handler).then (response) ->
response.result
# Create the key from keyhotee config and import it to the wallet, creating a new account using this key
# parameters:
# name `firstname` - first name in keyhotee profile config, for salting the seed of private key
# name `middlename` - middle name in keyhotee profile config, for salting the seed of private key
# name `lastname` - last name in keyhotee profile config, for salting the seed of private key
# brainkey `brainkey` - brainkey in keyhotee profile config, for salting the seed of private key
# keyhoteeid `keyhoteeid` - using keyhotee id as account name
# return_type: `void`
import_keyhotee: (firstname, middlename, lastname, brainkey, keyhoteeid, error_handler = null) ->
@rpc.request('wallet_import_keyhotee', [firstname, middlename, lastname, brainkey, keyhoteeid], error_handler).then (response) ->
response.result
# Imports anything that looks like a private key from the given JSON file.
# parameters:
# filename `json_filename` - the full path and filename of JSON wallet to import, example: /path/to/exported_wallet.json
# passphrase `imported_wallet_passphrase` - passphrase for encrypted keys
# account_name `account` - Account into which to import keys.
# return_type: `void`
import_keys_from_json: (json_filename, imported_wallet_passphrase, account, error_handler = null) ->
@rpc.request('wallet_import_keys_from_json', [json_filename, imported_wallet_passphrase, account], error_handler).then (response) ->
response.result
# Closes the curent wallet if one is open
# parameters:
# return_type: `void`
close: (error_handler = null) ->
@rpc.request('wallet_close', error_handler).then (response) ->
response.result
# Exports the current wallet to a JSON file
# parameters:
# filename `json_filename` - the full path and filename of JSON file to generate, example: /path/to/exported_wallet.json
# return_type: `void`
backup_create: (json_filename, error_handler = null) ->
@rpc.request('wallet_backup_create', [json_filename], error_handler).then (response) ->
response.result
# Creates a new wallet from an exported JSON file
# parameters:
# filename `json_filename` - the full path and filename of JSON wallet to import, example: /path/to/exported_wallet.json
# wallet_name `wallet_name` - name of the wallet to create
# passphrase `imported_wallet_passphrase` - passphrase of the imported wallet
# return_type: `void`
backup_restore: (json_filename, wallet_name, imported_wallet_passphrase, error_handler = null) ->
@rpc.request('wallet_backup_restore', [json_filename, wallet_name, imported_wallet_passphrase], error_handler).then (response) ->
response.result
# Enables or disables automatic wallet backups
# parameters:
# bool `enabled` - true to enable and false to disable
# return_type: `bool`
set_automatic_backups: (enabled, error_handler = null) ->
@rpc.request('wallet_set_automatic_backups', [enabled], error_handler).then (response) ->
response.result
# Set transaction expiration time
# parameters:
# uint32_t `seconds` - seconds before new transactions expire
# return_type: `uint32_t`
set_transaction_expiration_time: (seconds, error_handler = null) ->
@rpc.request('wallet_set_transaction_expiration_time', [seconds], error_handler).then (response) ->
response.result
# Creates a normal user object. If no owner info is specified, uses a new address from payer.
# parameters:
# account_name `account` -
# variant `user_data` -
# int32_t `m` -
# address_list `owners` -
# return_type: `transaction_record`
object_create: (account, user_data, m, owners, error_handler = null) ->
@rpc.request('wallet_object_create', [account, user_data, m, owners], error_handler).then (response) ->
response.result
# Update a normal user object.
# parameters:
# sending_account_name `paying_account_name` - the source account to draw the shares from
# object_id_type `object_id` - the object to update
# variant `user_data` -
# bool `sign_and_broadcast` -
# return_type: `transaction_builder`
object_update: (paying_account_name, object_id, user_data, sign_and_broadcast, error_handler = null) ->
@rpc.request('wallet_object_update', [paying_account_name, object_id, user_data, sign_and_broadcast], error_handler).then (response) ->
response.result
# Update a normal user object's owner.
# parameters:
# sending_account_name `paying_account_name` - the source account to draw the shares from
# object_id_type `object_id` - the object to update
# uint32_t `m` -
# address_list `owners` -
# bool `sign_and_broadcast` -
# return_type: `transaction_builder`
object_transfer: (paying_account_name, object_id, m, owners, sign_and_broadcast, error_handler = null) ->
@rpc.request('wallet_object_transfer', [paying_account_name, object_id, m, owners, sign_and_broadcast], error_handler).then (response) ->
response.result
# List objects that belong to an account.
# parameters:
# account_name `account` - Account to fetch objects for
# return_type: `object_array`
object_list: (account, error_handler = null) ->
@rpc.request('wallet_object_list', [account], error_handler).then (response) ->
response.result
# Create or update an edge object.
# parameters:
# account_name `paying_account` - Account that will pay for this transaction
# object_id_type `from` -
# object_id_type `to` - Account that will pay for this transaction
# string `name` - The edge name (the 'key', used in index)
# variant `value` - The edge 'value', not part of the index
# return_type: `transaction_builder`
set_edge: (paying_account, from, to, name, value, error_handler = null) ->
@rpc.request('wallet_set_edge', [paying_account, from, to, name, value], error_handler).then (response) ->
response.result
# Lists transaction history for the specified account
# parameters:
# string `account_name` - the name of the account for which the transaction history will be returned, "" for all accounts, example: alice
# string `asset_symbol` - only include transactions involving the specified asset, or "" to include all
# int32_t `limit` - limit the number of returned transactions; negative for most recent and positive for least recent. 0 does not limit
# uint32_t `start_block_num` - the earliest block number to list transactions from; 0 to include all transactions starting from genesis
# uint32_t `end_block_num` - the latest block to list transaction from; -1 to include all transactions ending at the head block
# return_type: `pretty_transactions`
account_transaction_history: (account_name, asset_symbol, limit, start_block_num, end_block_num, error_handler = null) ->
@rpc.request('wallet_account_transaction_history', [account_name, asset_symbol, limit, start_block_num, end_block_num], error_handler).then (response) ->
response.result
#
# parameters:
# string `account_name` - the name of the account for which the transaction history will be returned, "" for all accounts, example: alice
# return_type: `experimental_transactions`
transaction_history_experimental: (account_name, error_handler = null) ->
@rpc.request('wallet_transaction_history_experimental', [account_name], error_handler).then (response) ->
response.result
#
# parameters:
# return_type: `snapshot_record_list`
check_sharedrop: (error_handler = null) ->
@rpc.request('wallet_check_sharedrop', error_handler).then (response) ->
response.result
# Removes the specified transaction record from your transaction history. USE WITH CAUTION! Rescan cannot reconstruct all transaction details
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction record
# return_type: `void`
remove_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_remove_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Return any errors for your currently pending transactions
# parameters:
# string `filename` - filename to save pending transaction errors to
# return_type: `map<transaction_id_type, fc::exception>`
get_pending_transaction_errors: (filename, error_handler = null) ->
@rpc.request('wallet_get_pending_transaction_errors', [filename], error_handler).then (response) ->
response.result
# Lock the private keys in wallet, disables spending commands until unlocked
# parameters:
# return_type: `void`
lock: (error_handler = null) ->
@rpc.request('wallet_lock', error_handler).then (response) ->
response.result
# Unlock the private keys in the wallet to enable spending operations
# parameters:
# uint32_t `timeout` - the number of seconds to keep the wallet unlocked
# passphrase `passphrase` - the passphrase for encrypting the wallet
# return_type: `void`
unlock: (timeout, passphrase, error_handler = null) ->
@rpc.request('wallet_unlock', [timeout, passphrase], error_handler).then (response) ->
response.result
# Change the password of the current wallet
# parameters:
# passphrase `passphrase` - the passphrase for encrypting the wallet
# return_type: `void`
change_passphrase: (passphrase, error_handler = null) ->
@rpc.request('wallet_change_passphrase', [passphrase], error_handler).then (response) ->
response.result
# Return a list of wallets in the current data directory
# parameters:
# return_type: `wallet_name_array`
list: (error_handler = null) ->
@rpc.request('wallet_list', error_handler).then (response) ->
response.result
# Add new account for receiving payments
# parameters:
# account_name `account_name` - the name you will use to refer to this receive account
# json_variant `private_data` - Extra data to store with this account record
# return_type: `public_key`
account_create: (account_name, private_data, error_handler = null) ->
@rpc.request('wallet_account_create', [account_name, private_data], error_handler).then (response) ->
response.result
# Updates the favorited status of the specified account
# parameters:
# account_name `account_name` - the name of the account to set favorited status on
# bool `is_favorite` - true if account should be marked as a favorite; false otherwise
# return_type: `void`
account_set_favorite: (account_name, is_favorite, error_handler = null) ->
@rpc.request('wallet_account_set_favorite', [account_name, is_favorite], error_handler).then (response) ->
response.result
# Updates your approval of the specified account
# parameters:
# account_name `account_name` - the name of the account to set approval for
# int8_t `approval` - 1, 0, or -1 respectively for approve, neutral, or disapprove
# return_type: `int8_t`
account_set_approval: (account_name, approval, error_handler = null) ->
@rpc.request('wallet_account_set_approval', [account_name, approval], error_handler).then (response) ->
response.result
# Add new account for sending payments
# parameters:
# account_name `account_name` - the name you will use to refer to this sending account
# public_key `account_key` - the key associated with this sending account
# return_type: `void`
add_contact_account: (account_name, account_key, error_handler = null) ->
@rpc.request('wallet_add_contact_account', [account_name, account_key], error_handler).then (response) ->
response.result
# Authorizes a public key to control funds of a particular asset class. Requires authority of asset issuer
# parameters:
# account_name `paying_account` - the account that will pay the transaction fee
# asset_symbol `symbol` - the asset granting authorization
# string `address` - the address being granted permission, or the public key, or the account name
# object_id_type `meta` - -1 to remove authorization, otherwise a link to an object in the object graph
# return_type: `transaction_record`
asset_authorize_key: (paying_account, symbol, address, meta, error_handler = null) ->
@rpc.request('wallet_asset_authorize_key', [paying_account, symbol, address, meta], error_handler).then (response) ->
response.result
# Burns given amount to the given account. This will allow you to post message and +/- sentiment on someones account as a form of reputation.
# parameters:
# real_amount `amount_to_burn` - the amount of shares to burn
# asset_symbol `asset_symbol` - the asset to burn
# sending_account_name `from_account_name` - the source account to draw the shares from
# string `for_or_against` - the value 'for' or 'against'
# receive_account_name `to_account_name` - the account to which the burn should be credited (for or against) and on which the public message will appear
# string `public_message` - a public message to post
# bool `anonymous` - true if anonymous, else signed by from_account_name
# return_type: `transaction_record`
burn: (amount_to_burn, asset_symbol, from_account_name, for_or_against, to_account_name, public_message, anonymous, error_handler = null) ->
@rpc.request('wallet_burn', [amount_to_burn, asset_symbol, from_account_name, for_or_against, to_account_name, public_message, anonymous], error_handler).then (response) ->
response.result
# Creates an address which can be used for a simple (non-TITAN) transfer.
# parameters:
# string `account_name` - The account name that will own this address
# string `label` -
# int32_t `legacy_network_byte` - If not -1, use this as the network byte for a BTC-style address.
# return_type: `string`
address_create: (account_name, label, legacy_network_byte, error_handler = null) ->
@rpc.request('wallet_address_create', [account_name, label, legacy_network_byte], error_handler).then (response) ->
response.result
# Do a simple (non-TITAN) transfer to a BTC-style address
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# account_name `from_account_name` - the source account to draw the shares from
# legacy_address `to_address` - the address to transfer to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_legacy_address: (amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_legacy_address', [amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method], error_handler).then (response) ->
response.result
# Do a simple (non-TITAN) transfer to an address
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# account_name `from_account_name` - the source account to draw the shares from
# address `to_address` - the address to transfer to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_address: (amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_address', [amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given account, with the from field set to the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `from_account_name` - the source account to draw the shares from
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer: (amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer', [amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given account
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `from_account_name` - the source account to draw the shares from
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_public_account: (amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_public_account', [amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
#
# parameters:
# string `symbol` - which asset
# uint32_t `m` - Required number of signatures
# address_list `addresses` - List of possible addresses for signatures
# return_type: `address`
multisig_get_balance_id: (symbol, m, addresses, error_handler = null) ->
@rpc.request('wallet_multisig_get_balance_id', [symbol, m, addresses], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# string `from_name` - TITAN name to withdraw from
# uint32_t `m` - Required number of signatures
# address_list `addresses` - List of possible addresses for signatures
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
multisig_deposit: (amount, symbol, from_name, m, addresses, vote_method, error_handler = null) ->
@rpc.request('wallet_multisig_deposit', [amount, symbol, from_name, m, addresses, vote_method], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# address `from_address` - the balance address to withdraw from
# string `to` - address or account to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
withdraw_from_address: (amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_withdraw_from_address', [amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# legacy_address `from_address` - the balance address to withdraw from
# string `to` - address or account to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
withdraw_from_legacy_address: (amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_withdraw_from_legacy_address', [amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# address `from` - multisig balance ID to withdraw from
# address `to_address` - address to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
multisig_withdraw_start: (amount, symbol, from, to_address, vote_method, builder_path, error_handler = null) ->
@rpc.request('wallet_multisig_withdraw_start', [amount, symbol, from, to_address, vote_method, builder_path], error_handler).then (response) ->
response.result
# Review a transaction and add a signature.
# parameters:
# transaction_builder `builder` - A transaction builder object created by a wallet. If null, tries to use builder in file.
# bool `broadcast` - Try to broadcast this transaction?
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
builder_add_signature: (builder, broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_builder_add_signature', [builder, broadcast, builder_path], error_handler).then (response) ->
response.result
# Review a transaction in a builder file and add a signature.
# parameters:
# bool `broadcast` - Try to broadcast this transaction?
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
builder_file_add_signature: (broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_builder_file_add_signature', [broadcast, builder_path], error_handler).then (response) ->
response.result
# Releases escrow balance to third parties
# parameters:
# account_name `pay_fee_with_account_name` - when releasing escrow a transaction fee must be paid by funds not in escrow, this account will pay the fee
# address `escrow_balance_id` - The balance id of the escrow to be released.
# account_name `released_by_account` - the account that is to perform the release.
# share_type `amount_to_sender` - Amount to release back to the sender.
# share_type `amount_to_receiver` - Amount to release to receiver.
# return_type: `transaction_record`
release_escrow: (pay_fee_with_account_name, escrow_balance_id, released_by_account, amount_to_sender, amount_to_receiver, error_handler = null) ->
@rpc.request('wallet_release_escrow', [pay_fee_with_account_name, escrow_balance_id, released_by_account, amount_to_sender, amount_to_receiver], error_handler).then (response) ->
response.result
# Sends given amount to the given name, with the from field set to a different account than the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `paying_account_name` - the source account to draw the shares from
# sending_account_name `from_account_name` - the account to show the recipient as being the sender (requires account's private key to be in wallet).
# receive_account_name `to_account_name` - the account to transfer the shares to
# account_name `escrow_account_name` - the account of the escrow agent which has the power to decide how to divide the funds among from/to accounts.
# digest `agreement` - the hash of an agreement between the sender/receiver in the event a dispute arises can be given to escrow agent
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_from_with_escrow: (amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, escrow_account_name, agreement, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_from_with_escrow', [amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, escrow_account_name, agreement, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given name, with the from field set to a different account than the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `paying_account_name` - the source account to draw the shares from
# sending_account_name `from_account_name` - the account to show the recipient as being the sender (requires account's private key to be in wallet).
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_from: (amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_from', [amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
# Scans the blockchain history for operations relevant to this wallet.
# parameters:
# uint32_t `first_block_number` - the first block to scan
# uint32_t `num_blocks` - the number of blocks to scan
# bool `fast_scan` - true to scan as fast as possible but freeze the rest of your computer, and false otherwise
# return_type: `void`
rescan_blockchain: (first_block_number, num_blocks, fast_scan, error_handler = null) ->
@rpc.request('wallet_rescan_blockchain', [first_block_number, num_blocks, fast_scan], error_handler).then (response) ->
response.result
# Queries your wallet for the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# return_type: `transaction_record`
get_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_get_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Scans the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# bool `overwrite_existing` - true to overwrite existing wallet transaction record and false otherwise
# return_type: `transaction_record`
scan_transaction: (transaction_id, overwrite_existing, error_handler = null) ->
@rpc.request('wallet_scan_transaction', [transaction_id, overwrite_existing], error_handler).then (response) ->
response.result
# Scans the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# bool `overwrite_existing` - true to overwrite existing wallet transaction record and false otherwise
# return_type: `void`
scan_transaction_experimental: (transaction_id, overwrite_existing, error_handler = null) ->
@rpc.request('wallet_scan_transaction_experimental', [transaction_id, overwrite_existing], error_handler).then (response) ->
response.result
# Adds a custom note to the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# string `note` - note to add
# return_type: `void`
add_transaction_note_experimental: (transaction_id, note, error_handler = null) ->
@rpc.request('wallet_add_transaction_note_experimental', [transaction_id, note], error_handler).then (response) ->
response.result
# Rebroadcasts the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# return_type: `void`
rebroadcast_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_rebroadcast_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Updates the data published about a given account
# parameters:
# account_name `account_name` - the account that will be updated
# account_name `pay_from_account` - the account from which fees will be paid
# json_variant `public_data` - public data about the account
# uint8_t `delegate_pay_rate` - -1 for non-delegates; otherwise the percent of delegate pay to accept per produced block
# string `account_type` - titan_account | public_account - public accounts do not receive memos and all payments are made to the active key
# return_type: `transaction_record`
account_register: (account_name, pay_from_account, public_data, delegate_pay_rate, account_type, error_handler = null) ->
@rpc.request('wallet_account_register', [account_name, pay_from_account, public_data, delegate_pay_rate, account_type], error_handler).then (response) ->
response.result
# Updates the local private data for an account
# parameters:
# account_name `account_name` - the account that will be updated
# json_variant `private_data` - private data about the account
# return_type: `void`
account_update_private_data: (account_name, private_data, error_handler = null) ->
@rpc.request('wallet_account_update_private_data', [account_name, private_data], error_handler).then (response) ->
response.result
# Updates the data published about a given account
# parameters:
# account_name `account_name` - the account that will be updated
# account_name `pay_from_account` - the account from which fees will be paid
# json_variant `public_data` - public data about the account
# uint8_t `delegate_pay_rate` - -1 for non-delegates; otherwise the percent of delegate pay to accept per produced block
# return_type: `transaction_record`
account_update_registration: (account_name, pay_from_account, public_data, delegate_pay_rate, error_handler = null) ->
@rpc.request('wallet_account_update_registration', [account_name, pay_from_account, public_data, delegate_pay_rate], error_handler).then (response) ->
response.result
# Updates the specified account's active key and broadcasts the transaction.
# parameters:
# account_name `account_to_update` - The name of the account to update the active key of.
# account_name `pay_from_account` - The account from which fees will be paid.
# string `new_active_key` - WIF private key to update active key to. If empty, a new key will be generated.
# return_type: `transaction_record`
account_update_active_key: (account_to_update, pay_from_account, new_active_key, error_handler = null) ->
@rpc.request('wallet_account_update_active_key', [account_to_update, pay_from_account, new_active_key], error_handler).then (response) ->
response.result
# Lists all accounts associated with this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_accounts: (error_handler = null) ->
@rpc.request('wallet_list_accounts', error_handler).then (response) ->
response.result
# Lists all accounts which have been marked as favorites.
# parameters:
# return_type: `wallet_account_record_array`
list_favorite_accounts: (error_handler = null) ->
@rpc.request('wallet_list_favorite_accounts', error_handler).then (response) ->
response.result
# Lists all unregistered accounts belonging to this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_unregistered_accounts: (error_handler = null) ->
@rpc.request('wallet_list_unregistered_accounts', error_handler).then (response) ->
response.result
# Lists all accounts for which we have a private key in this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_my_accounts: (error_handler = null) ->
@rpc.request('wallet_list_my_accounts', error_handler).then (response) ->
response.result
# Get the account record for a given name
# parameters:
# account_name `account_name` - the name of the account to retrieve
# return_type: `wallet_account_record`
get_account: (account_name, error_handler = null) ->
@rpc.request('wallet_get_account', [account_name], error_handler).then (response) ->
response.result
# Get the account record for a given name
# parameters:
# account_name `account_name` - the name of the account whose public address you want
# return_type: `address`
get_account_public_address: (account_name, error_handler = null) ->
@rpc.request('wallet_get_account_public_address', [account_name], error_handler).then (response) ->
response.result
# Remove a contact account from your wallet
# parameters:
# account_name `account_name` - the name of the contact
# return_type: `void`
remove_contact_account: (account_name, error_handler = null) ->
@rpc.request('wallet_remove_contact_account', [account_name], error_handler).then (response) ->
response.result
# Rename an account in wallet
# parameters:
# account_name `current_account_name` - the current name of the account
# new_account_name `new_account_name` - the new name for the account
# return_type: `void`
account_rename: (current_account_name, new_account_name, error_handler = null) ->
@rpc.request('wallet_account_rename', [current_account_name, new_account_name], error_handler).then (response) ->
response.result
# Creates a new user issued asset
# parameters:
# asset_symbol `symbol` - the ticker symbol for the new asset
# string `asset_name` - the name of the asset
# string `issuer_name` - the name of the issuer of the asset
# string `description` - a description of the asset
# real_amount `maximum_share_supply` - the maximum number of shares of the asset
# uint64_t `precision` - defines where the decimal should be displayed, must be a power of 10
# json_variant `public_data` - arbitrary data attached to the asset
# bool `is_market_issued` - creation of a new BitAsset that is created by shorting
# return_type: `transaction_record`
asset_create: (symbol, asset_name, issuer_name, description, maximum_share_supply, precision, public_data, is_market_issued, error_handler = null) ->
@rpc.request('wallet_asset_create', [symbol, asset_name, issuer_name, description, maximum_share_supply, precision, public_data, is_market_issued], error_handler).then (response) ->
response.result
# Updates an existing user-issued asset; only the public_data can be updated if any shares of the asset exist
# parameters:
# asset_symbol `symbol` - the ticker symbol for the asset to update
# optional_string `name` - the new name to give the asset; or null to keep the current name
# optional_string `description` - the new description to give the asset; or null to keep the current description
# optional_variant `public_data` - the new public_data to give the asset; or null to keep the current public_data
# optional_double `maximum_share_supply` - the new maximum_share_supply to give the asset; or null to keep the current maximum_share_supply
# optional_uint64_t `precision` - the new precision to give the asset; or null to keep the current precision
# share_type `issuer_transaction_fee` - an additional fee (denominated in issued asset) charged by the issuer on every transaction that uses this asset type
# asset_permission_array `flags` - a set of flags set by the issuer (if they have permission to set them)
# asset_permission_array `issuer_permissions` - a set of permissions an issuer retains
# account_name `issuer_account_name` - used to transfer the asset to a new user
# uint32_t `required_sigs` - number of signatures from the authority required to control this asset record
# address_list `authority` - owner keys that control this asset record
# return_type: `transaction_record`
asset_update: (symbol, name, description, public_data, maximum_share_supply, precision, issuer_transaction_fee, flags, issuer_permissions, issuer_account_name, required_sigs, authority, error_handler = null) ->
@rpc.request('wallet_asset_update', [symbol, name, description, public_data, maximum_share_supply, precision, issuer_transaction_fee, flags, issuer_permissions, issuer_account_name, required_sigs, authority], error_handler).then (response) ->
response.result
# Issues new shares of a given asset type
# parameters:
# real_amount `amount` - the amount of shares to issue
# asset_symbol `symbol` - the ticker symbol for asset
# account_name `to_account_name` - the name of the account to receive the shares
# string `memo_message` - the memo to send to the receiver
# return_type: `transaction_record`
asset_issue: (amount, symbol, to_account_name, memo_message, error_handler = null) ->
@rpc.request('wallet_asset_issue', [amount, symbol, to_account_name, memo_message], error_handler).then (response) ->
response.result
# Lists the total asset balances for all open escrows
# parameters:
# account_name `account_name` - the account to get a escrow summary for, or leave empty for all accounts
# return_type: `escrow_summary_array`
escrow_summary: (account_name, error_handler = null) ->
@rpc.request('wallet_escrow_summary', [account_name], error_handler).then (response) ->
response.result
# Lists the total asset balances for the specified account
# parameters:
# account_name `account_name` - the account to get a balance for, or leave empty for all accounts
# return_type: `account_balance_summary_type`
account_balance: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance', [account_name], error_handler).then (response) ->
response.result
# Lists the total asset balances across all withdraw condition types for the specified account
# parameters:
# account_name `account_name` - the account to get a balance for, or leave empty for all accounts
# return_type: `account_extended_balance_type`
account_balance_extended: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance_extended', [account_name], error_handler).then (response) ->
response.result
# Lists the balance record ids for the specified account
# parameters:
# account_name `account_name` - the account to list balance ids for, or leave empty for all accounts
# return_type: `account_balance_id_summary_type`
account_balance_ids: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance_ids', [account_name], error_handler).then (response) ->
response.result
# Lists the total accumulated yield for asset balances
# parameters:
# account_name `account_name` - the account to get yield for, or leave empty for all accounts
# return_type: `account_balance_summary_type`
account_yield: (account_name, error_handler = null) ->
@rpc.request('wallet_account_yield', [account_name], error_handler).then (response) ->
response.result
# Lists all public keys in this account
# parameters:
# account_name `account_name` - the account for which public keys should be listed
# return_type: `public_key_summary_array`
account_list_public_keys: (account_name, error_handler = null) ->
@rpc.request('wallet_account_list_public_keys', [account_name], error_handler).then (response) ->
response.result
# Used to transfer some of the delegate's pay from their balance
# parameters:
# account_name `delegate_name` - the delegate whose pay is being cashed out
# account_name `to_account_name` - the account that should receive the funds
# real_amount `amount_to_withdraw` - the amount to withdraw
# return_type: `transaction_record`
delegate_withdraw_pay: (delegate_name, to_account_name, amount_to_withdraw, error_handler = null) ->
@rpc.request('wallet_delegate_withdraw_pay', [delegate_name, to_account_name, amount_to_withdraw], error_handler).then (response) ->
response.result
# Set the fee to add to new transactions
# parameters:
# real_amount `fee` - the wallet transaction fee to set
# return_type: `asset`
set_transaction_fee: (fee, error_handler = null) ->
@rpc.request('wallet_set_transaction_fee', [fee], error_handler).then (response) ->
response.result
# Returns
# parameters:
# asset_symbol `symbol` - the wallet transaction if paid in the given asset type
# return_type: `asset`
get_transaction_fee: (symbol, error_handler = null) ->
@rpc.request('wallet_get_transaction_fee', [symbol], error_handler).then (response) ->
response.result
# Used to place a request to buy a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the bid
# string `quantity` - the quantity of items you would like to buy
# asset_symbol `quantity_symbol` - the type of items you would like to buy
# string `base_price` - the price you would like to pay
# asset_symbol `base_symbol` - the type of asset you would like to pay with
# bool `allow_stupid_bid` - Allow user to place bid at more than 5% above the current sell price.
# return_type: `transaction_record`
market_submit_bid: (from_account_name, quantity, quantity_symbol, base_price, base_symbol, allow_stupid_bid, error_handler = null) ->
@rpc.request('wallet_market_submit_bid', [from_account_name, quantity, quantity_symbol, base_price, base_symbol, allow_stupid_bid], error_handler).then (response) ->
response.result
# Used to place a request to buy a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the bid
# string `quantity` - the quantity of items you would like to buy
# asset_symbol `quantity_symbol` - the type of items you would like to buy
# string `relative_price` - the price relative to the feed you would like to pay
# asset_symbol `base_symbol` - the type of asset you would like to pay with
# string `limit_price` - the limit on what you are willing to pay
# return_type: `transaction_record`
market_submit_relative_bid: (from_account_name, quantity, quantity_symbol, relative_price, base_symbol, limit_price, error_handler = null) ->
@rpc.request('wallet_market_submit_relative_bid', [from_account_name, quantity, quantity_symbol, relative_price, base_symbol, limit_price], error_handler).then (response) ->
response.result
# Used to place a request to sell a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `sell_quantity` - the quantity of items you would like to sell
# asset_symbol `sell_quantity_symbol` - the type of items you would like to sell
# string `ask_price` - the price per unit sold.
# asset_symbol `ask_price_symbol` - the type of asset you would like to be paid
# bool `allow_stupid_ask` - Allow user to place ask at more than 5% below the current buy price.
# return_type: `transaction_record`
market_submit_ask: (from_account_name, sell_quantity, sell_quantity_symbol, ask_price, ask_price_symbol, allow_stupid_ask, error_handler = null) ->
@rpc.request('wallet_market_submit_ask', [from_account_name, sell_quantity, sell_quantity_symbol, ask_price, ask_price_symbol, allow_stupid_ask], error_handler).then (response) ->
response.result
# Used to place a request to sell a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `sell_quantity` - the quantity of items you would like to sell
# asset_symbol `sell_quantity_symbol` - the type of items you would like to sell
# string `relative_ask_price` - the relative price per unit sold.
# asset_symbol `ask_price_symbol` - the type of asset you would like to be paid
# string `limit_ask_price` - the minimum price per unit sold.
# return_type: `transaction_record`
market_submit_relative_ask: (from_account_name, sell_quantity, sell_quantity_symbol, relative_ask_price, ask_price_symbol, limit_ask_price, error_handler = null) ->
@rpc.request('wallet_market_submit_relative_ask', [from_account_name, sell_quantity, sell_quantity_symbol, relative_ask_price, ask_price_symbol, limit_ask_price], error_handler).then (response) ->
response.result
# Used to place a request to short sell a quantity of assets at a price specified
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `short_collateral` - the amount of collateral you wish to fund this short with
# asset_symbol `collateral_symbol` - the type of asset collateralizing this short (i.e. XTS)
# string `interest_rate` - the APR you wish to pay interest at (0.0% to 1000.0%)
# asset_symbol `quote_symbol` - the asset to short sell (i.e. USD)
# string `short_price_limit` - maximim price (USD per XTS) that the short will execute at, if 0 then no limit will be applied
# return_type: `transaction_record`
market_submit_short: (from_account_name, short_collateral, collateral_symbol, interest_rate, quote_symbol, short_price_limit, error_handler = null) ->
@rpc.request('wallet_market_submit_short', [from_account_name, short_collateral, collateral_symbol, interest_rate, quote_symbol, short_price_limit], error_handler).then (response) ->
response.result
# Used to place a request to cover an existing short position
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `quantity` - the quantity of asset you would like to cover
# asset_symbol `quantity_symbol` - the type of asset you are covering (ie: USD)
# order_id `cover_id` - the order ID you would like to cover
# return_type: `transaction_record`
market_cover: (from_account_name, quantity, quantity_symbol, cover_id, error_handler = null) ->
@rpc.request('wallet_market_cover', [from_account_name, quantity, quantity_symbol, cover_id], error_handler).then (response) ->
response.result
# Cancel and/or create many market orders in a single transaction.
# parameters:
# order_ids `cancel_order_ids` - Order IDs of all market orders to cancel in this transaction.
# order_descriptions `new_orders` - Descriptions of all new orders to create in this transaction.
# bool `sign` - True if transaction should be signed and broadcast (if possible), false otherwse.
# return_type: `transaction_record`
market_batch_update: (cancel_order_ids, new_orders, sign, error_handler = null) ->
@rpc.request('wallet_market_batch_update', [cancel_order_ids, new_orders, sign], error_handler).then (response) ->
response.result
# Add collateral to a short position
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# order_id `cover_id` - the ID of the order to recollateralize
# string `real_quantity_collateral_to_add` - the quantity of collateral of the base asset to add to the specified position
# return_type: `transaction_record`
market_add_collateral: (from_account_name, cover_id, real_quantity_collateral_to_add, error_handler = null) ->
@rpc.request('wallet_market_add_collateral', [from_account_name, cover_id, real_quantity_collateral_to_add], error_handler).then (response) ->
response.result
# List an order list of a specific market
# parameters:
# asset_symbol `base_symbol` - the base symbol of the market
# asset_symbol `quote_symbol` - the quote symbol of the market
# uint32_t `limit` - the maximum number of items to return
# account_name `account_name` - the account for which to get the orders, or empty for all accounts
# return_type: `market_order_map`
market_order_list: (base_symbol, quote_symbol, limit, account_name, error_handler = null) ->
@rpc.request('wallet_market_order_list', [base_symbol, quote_symbol, limit, account_name], error_handler).then (response) ->
response.result
# List an order list of a specific account
# parameters:
# account_name `account_name` - the account for which to get the orders, or empty for all accounts
# uint32_t `limit` - the maximum number of items to return
# return_type: `market_order_map`
account_order_list: (account_name, limit, error_handler = null) ->
@rpc.request('wallet_account_order_list', [account_name, limit], error_handler).then (response) ->
response.result
# Cancel an order: deprecated - use wallet_market_cancel_orders
# parameters:
# order_id `order_id` - the ID of the order to cancel
# return_type: `transaction_record`
market_cancel_order: (order_id, error_handler = null) ->
@rpc.request('wallet_market_cancel_order', [order_id], error_handler).then (response) ->
response.result
# Cancel more than one order at a time
# parameters:
# order_ids `order_ids` - the IDs of the orders to cancel
# return_type: `transaction_record`
market_cancel_orders: (order_ids, error_handler = null) ->
@rpc.request('wallet_market_cancel_orders', [order_ids], error_handler).then (response) ->
response.result
# Reveals the private key corresponding to an account, public key, or address
# parameters:
# string `input` - an account name, public key, or address (quoted hash of public key)
# return_type: `string`
dump_private_key: (input, error_handler = null) ->
@rpc.request('wallet_dump_private_key', [input], error_handler).then (response) ->
response.result
# Returns the allocation of votes by this account
# parameters:
# account_name `account_name` - the account to report votes on, or empty for all accounts
# return_type: `account_vote_summary`
account_vote_summary: (account_name, error_handler = null) ->
@rpc.request('wallet_account_vote_summary', [account_name], error_handler).then (response) ->
response.result
# Check how much this account is utilizing its voting power
# parameters:
# account_name `account` -
# return_type: `vote_summary`
check_vote_proportion: (account, error_handler = null) ->
@rpc.request('wallet_check_vote_proportion', [account], error_handler).then (response) ->
response.result
# Set a property in the GUI settings DB
# parameters:
# string `name` - the name of the setting to set
# variant `value` - the value to set the setting to
# return_type: `void`
set_setting: (name, value, error_handler = null) ->
@rpc.request('wallet_set_setting', [name, value], error_handler).then (response) ->
response.result
# Get the value of the given setting
# parameters:
# string `name` - The name of the setting to fetch
# return_type: `optional_variant`
get_setting: (name, error_handler = null) ->
@rpc.request('wallet_get_setting', [name], error_handler).then (response) ->
response.result
# Enable or disable block production for a particular delegate account
# parameters:
# string `delegate_name` - The delegate to enable/disable block production for; ALL for all delegate accounts
# bool `enabled` - true to enable block production, false otherwise
# return_type: `void`
delegate_set_block_production: (delegate_name, enabled, error_handler = null) ->
@rpc.request('wallet_delegate_set_block_production', [delegate_name, enabled], error_handler).then (response) ->
response.result
# Enable or disable wallet transaction scanning
# parameters:
# bool `enabled` - true to enable transaction scanning, false otherwise
# return_type: `bool`
set_transaction_scanning: (enabled, error_handler = null) ->
@rpc.request('wallet_set_transaction_scanning', [enabled], error_handler).then (response) ->
response.result
# Signs the provided message digest with the account key
# parameters:
# string `signer` - A public key, address, or account name whose key to sign with
# sha256 `hash` - SHA256 digest of the message to sign
# return_type: `compact_signature`
sign_hash: (signer, hash, error_handler = null) ->
@rpc.request('wallet_sign_hash', [signer, hash], error_handler).then (response) ->
response.result
# Initiates the login procedure by providing a BitShares Login URL
# parameters:
# string `server_account` - Name of the account of the server. The user will be shown this name as the site he is logging into.
# return_type: `string`
login_start: (server_account, error_handler = null) ->
@rpc.request('wallet_login_start', [server_account], error_handler).then (response) ->
response.result
# Completes the login procedure by finding the user's public account key and shared secret
# parameters:
# public_key `server_key` - The one-time public key from wallet_login_start.
# public_key `client_key` - The client's one-time public key.
# compact_signature `client_signature` - The client's signature of the shared secret.
# return_type: `variant`
login_finish: (server_key, client_key, client_signature, error_handler = null) ->
@rpc.request('wallet_login_finish', [server_key, client_key, client_signature], error_handler).then (response) ->
response.result
# Set this balance's voting address and slate
# parameters:
# address `balance_id` - the current name of the account
# string `voter_address` - The new voting address. If none is specified, tries to re-use existing address.
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
balance_set_vote_info: (balance_id, voter_address, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_balance_set_vote_info', [balance_id, voter_address, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
# Publishes the current wallet delegate slate to the public data associated with the account
# parameters:
# account_name `publishing_account_name` - The account to publish the slate ID under
# account_name `paying_account_name` - The account to pay transaction fees or leave empty to pay with publishing account
# return_type: `transaction_record`
publish_slate: (publishing_account_name, paying_account_name, error_handler = null) ->
@rpc.request('wallet_publish_slate', [publishing_account_name, paying_account_name], error_handler).then (response) ->
response.result
# Publish your current client version to the specified account's public data record
# parameters:
# account_name `publishing_account_name` - The account to publish the client version under
# account_name `paying_account_name` - The account to pay transaction fees with or leave empty to pay with publishing account
# return_type: `transaction_record`
publish_version: (publishing_account_name, paying_account_name, error_handler = null) ->
@rpc.request('wallet_publish_version', [publishing_account_name, paying_account_name], error_handler).then (response) ->
response.result
# Collect specified account's genesis balances
# parameters:
# account_name `account_name` - account to collect genesis balances for
# return_type: `transaction_record`
collect_genesis_balances: (account_name, error_handler = null) ->
@rpc.request('wallet_collect_genesis_balances', [account_name], error_handler).then (response) ->
response.result
# Collect specified account's vested balances
# parameters:
# account_name `account_name` - account to collect vested balances for
# return_type: `transaction_record`
collect_vested_balances: (account_name, error_handler = null) ->
@rpc.request('wallet_collect_vested_balances', [account_name], error_handler).then (response) ->
response.result
# Update a delegate's block signing and feed publishing key
# parameters:
# account_name `authorizing_account_name` - The account that will authorize changing the block signing key
# account_name `delegate_name` - The delegate account which will have its block signing key changed
# public_key `signing_key` - The new key that will be used for block signing
# return_type: `transaction_record`
delegate_update_signing_key: (authorizing_account_name, delegate_name, signing_key, error_handler = null) ->
@rpc.request('wallet_delegate_update_signing_key', [authorizing_account_name, delegate_name, signing_key], error_handler).then (response) ->
response.result
# Attempts to recover accounts created after last backup was taken and returns number of successful recoveries. Use if you have restored from backup and are missing accounts.
# parameters:
# int32_t `accounts_to_recover` - The number of accounts to attept to recover
# int32_t `maximum_number_of_attempts` - The maximum number of keys to generate trying to recover accounts
# return_type: `int32_t`
recover_accounts: (accounts_to_recover, maximum_number_of_attempts, error_handler = null) ->
@rpc.request('wallet_recover_accounts', [accounts_to_recover, maximum_number_of_attempts], error_handler).then (response) ->
response.result
# Attempts to recover any missing recipient and memo information for the specified transaction
# parameters:
# string `transaction_id_prefix` - the id (or id prefix) of the transaction record
# string `recipient_account` - the account name of the recipient (if known)
# return_type: `transaction_record`
recover_transaction: (transaction_id_prefix, recipient_account, error_handler = null) ->
@rpc.request('wallet_recover_transaction', [transaction_id_prefix, recipient_account], error_handler).then (response) ->
response.result
# Verify whether the specified transaction made a TITAN deposit to the current wallet; returns null if not
# parameters:
# string `transaction_id_prefix` - the id (or id prefix) of the transaction record
# return_type: `optional_variant_object`
verify_titan_deposit: (transaction_id_prefix, error_handler = null) ->
@rpc.request('wallet_verify_titan_deposit', [transaction_id_prefix], error_handler).then (response) ->
response.result
# publishes a price feed for BitAssets, only active delegates may do this
# parameters:
# account_name `delegate_account` - the delegate to publish the price under
# real_amount `price` - the number of this asset per XTS
# asset_symbol `asset_symbol` - the type of asset being priced
# return_type: `transaction_record`
publish_price_feed: (delegate_account, price, asset_symbol, error_handler = null) ->
@rpc.request('wallet_publish_price_feed', [delegate_account, price, asset_symbol], error_handler).then (response) ->
response.result
# publishes a set of feeds for BitAssets, only active delegates may do this
# parameters:
# account_name `delegate_account` - the delegate to publish the price under
# price_map `symbol_to_price_map` - maps the BitAsset symbol to its price per share
# return_type: `transaction_record`
publish_feeds: (delegate_account, symbol_to_price_map, error_handler = null) ->
@rpc.request('wallet_publish_feeds', [delegate_account, symbol_to_price_map], error_handler).then (response) ->
response.result
# publishes a set of feeds for BitAssets for all active delegates, most useful for testnets
# parameters:
# price_map `symbol_to_price_map` - maps the BitAsset symbol to its price per share
# return_type: `vector<std::pair<string, wallet_transaction_record>>`
publish_feeds_multi_experimental: (symbol_to_price_map, error_handler = null) ->
@rpc.request('wallet_publish_feeds_multi_experimental', [symbol_to_price_map], error_handler).then (response) ->
response.result
# tries to repair any inconsistent wallet account, key, and transaction records
# parameters:
# account_name `collecting_account_name` - collect any orphan balances into this account
# return_type: `void`
repair_records: (collecting_account_name, error_handler = null) ->
@rpc.request('wallet_repair_records', [collecting_account_name], error_handler).then (response) ->
response.result
# regenerates private keys as part of wallet recovery
# parameters:
# account_name `account_name` - the account the generated keys should be a part of
# uint32_t `max_key_number` - the last key number to regenerate
# return_type: `int32_t`
regenerate_keys: (account_name, max_key_number, error_handler = null) ->
@rpc.request('wallet_regenerate_keys', [account_name, max_key_number], error_handler).then (response) ->
response.result
# Creates a new mail message and returns the unencrypted message.
# parameters:
# string `sender` - The name of the message's sender.
# string `subject` - The subject of the message.
# string `body` - The body of the message.
# message_id `reply_to` - The ID of the message this is in reply to.
# return_type: `message`
mail_create: (sender, subject, body, reply_to, error_handler = null) ->
@rpc.request('wallet_mail_create', [sender, subject, body, reply_to], error_handler).then (response) ->
response.result
# Encrypts a mail message and returns the encrypted message.
# parameters:
# string `recipient` - The name of the message's recipient.
# message `plaintext` - The plaintext message, such as from wallet_mail_create.
# return_type: `message`
mail_encrypt: (recipient, plaintext, error_handler = null) ->
@rpc.request('wallet_mail_encrypt', [recipient, plaintext], error_handler).then (response) ->
response.result
# Opens an encrypted mail message.
# parameters:
# address `recipient` - The address of the message's recipient.
# message `ciphertext` - The encrypted message.
# return_type: `message`
mail_open: (recipient, ciphertext, error_handler = null) ->
@rpc.request('wallet_mail_open', [recipient, ciphertext], error_handler).then (response) ->
response.result
# Sets the list of mail servers an account checks for his mail.
# parameters:
# string `account_name` - The name of the account whose mail servers should be updated.
# string_array `server_list` - A list of names of blockchain accounts who run mail servers.
# string `paying_account` - The name of the account to pay the transaction fee, if different from account_name.
# return_type: `void`
set_preferred_mail_servers: (account_name, server_list, paying_account, error_handler = null) ->
@rpc.request('wallet_set_preferred_mail_servers', [account_name, server_list, paying_account], error_handler).then (response) ->
response.result
# Retract (permanently disable) the specified account in case of master key compromise.
# parameters:
# account_name `account_to_retract` - The name of the account to retract.
# account_name `pay_from_account` - The account from which fees will be paid.
# return_type: `transaction_record`
account_retract: (account_to_retract, pay_from_account, error_handler = null) ->
@rpc.request('wallet_account_retract', [account_to_retract, pay_from_account], error_handler).then (response) ->
response.result
# Generates a human friendly brain wallet key starting with a public salt as the last word
# parameters:
# return_type: `string`
generate_brain_seed: (error_handler = null) ->
@rpc.request('wallet_generate_brain_seed', error_handler).then (response) ->
response.result
angular.module("app").service("WalletAPI", ["$q", "$log", "RpcService", "$interval", WalletAPI])
| 182773 | # Warning: this is a generated file, any changes made here will be overwritten by the build process
class WalletAPI
constructor: (@q, @log, @rpc, @interval) ->
#@log.info "---- WalletAPI Constructor ----"
# Extra information about the wallet.
# parameters:
# return_type: `json_object`
get_info: (error_handler = null) ->
@rpc.request('wallet_get_info', error_handler).then (response) ->
response.result
# Opens the wallet of the given name
# parameters:
# wallet_name `wallet_name` - the name of the wallet to open
# return_type: `void`
open: (wallet_name, error_handler = null) ->
@rpc.request('wallet_open', [wallet_name], error_handler).then (response) ->
response.result
# Creates a wallet with the given name
# parameters:
# wallet_name `wallet_name` - name of the wallet to create
# new_passphrase `<PASSWORD>` - a passphrase for encrypting the wallet
# brainkey `brain_key` - a strong passphrase that will be used to generate all private keys, defaults to a large random number
# return_type: `void`
create: (wallet_name, new_passphrase, brain_key, error_handler = null) ->
@rpc.request('wallet_create', [wallet_name, new_passphrase, brain_key], error_handler).then (response) ->
response.result
# Returns the wallet name passed to wallet_open
# parameters:
# return_type: `optional_wallet_name`
get_name: (error_handler = null) ->
@rpc.request('wallet_get_name', error_handler).then (response) ->
response.result
# Loads the private key into the specified account. Returns which account it was actually imported to.
# parameters:
# wif_private_key `wif_key` - A private key in bitcoin Wallet Import Format (WIF)
# account_name `account_name` - the name of the account the key should be imported into, if null then the key must belong to an active account
# bool `create_new_account` - If true, the wallet will attempt to create a new account for the name provided rather than import the key into an existing account
# bool `rescan` - If true, the wallet will rescan the blockchain looking for transactions that involve this private key
# return_type: `account_name`
import_private_key: (wif_key, account_name, create_new_account, rescan, error_handler = null) ->
@rpc.request('wallet_import_private_key', [wif_key, account_name, create_new_account, rescan], error_handler).then (response) ->
response.result
# Imports a Bitcoin Core or BitShares PTS wallet
# parameters:
# filename `wallet_filename` - the Bitcoin/PTS wallet file path
# passphrase `<PASSWORD>` - the imported wallet's password
# account_name `account_name` - the account to receive the contents of the wallet
# return_type: `uint32_t`
import_bitcoin: (wallet_filename, passphrase, account_name, error_handler = null) ->
@rpc.request('wallet_import_bitcoin', [wallet_filename, passphrase, account_name], error_handler).then (response) ->
response.result
# Imports an Electrum wallet
# parameters:
# filename `wallet_filename` - the Electrum wallet file path
# passphrase `<PASSWORD>` - the imported wallet's password
# account_name `account_name` - the account to receive the contents of the wallet
# return_type: `uint32_t`
import_electrum: (wallet_filename, passphrase, account_name, error_handler = null) ->
@rpc.request('wallet_import_electrum', [wallet_filename, passphrase, account_name], error_handler).then (response) ->
response.result
# Create the key from keyhotee config and import it to the wallet, creating a new account using this key
# parameters:
# name `firstname` - first name in keyhotee profile config, for salting the seed of private key
# name `middlename` - middle name in keyhotee profile config, for salting the seed of private key
# name `lastname` - last name in keyhotee profile config, for salting the seed of private key
# brainkey `brainkey` - brainkey in keyhotee profile config, for salting the seed of private key
# keyhoteeid `keyhoteeid` - using keyhotee id as account name
# return_type: `void`
import_keyhotee: (firstname, middlename, lastname, brainkey, keyhoteeid, error_handler = null) ->
@rpc.request('wallet_import_keyhotee', [firstname, middlename, lastname, brainkey, keyhoteeid], error_handler).then (response) ->
response.result
# Imports anything that looks like a private key from the given JSON file.
# parameters:
# filename `json_filename` - the full path and filename of JSON wallet to import, example: /path/to/exported_wallet.json
# passphrase `<PASSWORD>` - passphrase for encrypted keys
# account_name `account` - Account into which to import keys.
# return_type: `void`
import_keys_from_json: (json_filename, imported_wallet_passphrase, account, error_handler = null) ->
@rpc.request('wallet_import_keys_from_json', [json_filename, imported_wallet_passphrase, account], error_handler).then (response) ->
response.result
# Closes the curent wallet if one is open
# parameters:
# return_type: `void`
close: (error_handler = null) ->
@rpc.request('wallet_close', error_handler).then (response) ->
response.result
# Exports the current wallet to a JSON file
# parameters:
# filename `json_filename` - the full path and filename of JSON file to generate, example: /path/to/exported_wallet.json
# return_type: `void`
backup_create: (json_filename, error_handler = null) ->
@rpc.request('wallet_backup_create', [json_filename], error_handler).then (response) ->
response.result
# Creates a new wallet from an exported JSON file
# parameters:
# filename `json_filename` - the full path and filename of JSON wallet to import, example: /path/to/exported_wallet.json
# wallet_name `wallet_name` - name of the wallet to create
# passphrase `imported<PASSWORD>_wallet<PASSWORD>_passphrase` - passphrase of the imported wallet
# return_type: `void`
backup_restore: (json_filename, wallet_name, imported_wallet_passphrase, error_handler = null) ->
@rpc.request('wallet_backup_restore', [json_filename, wallet_name, imported_wallet_passphrase], error_handler).then (response) ->
response.result
# Enables or disables automatic wallet backups
# parameters:
# bool `enabled` - true to enable and false to disable
# return_type: `bool`
set_automatic_backups: (enabled, error_handler = null) ->
@rpc.request('wallet_set_automatic_backups', [enabled], error_handler).then (response) ->
response.result
# Set transaction expiration time
# parameters:
# uint32_t `seconds` - seconds before new transactions expire
# return_type: `uint32_t`
set_transaction_expiration_time: (seconds, error_handler = null) ->
@rpc.request('wallet_set_transaction_expiration_time', [seconds], error_handler).then (response) ->
response.result
# Creates a normal user object. If no owner info is specified, uses a new address from payer.
# parameters:
# account_name `account` -
# variant `user_data` -
# int32_t `m` -
# address_list `owners` -
# return_type: `transaction_record`
object_create: (account, user_data, m, owners, error_handler = null) ->
@rpc.request('wallet_object_create', [account, user_data, m, owners], error_handler).then (response) ->
response.result
# Update a normal user object.
# parameters:
# sending_account_name `paying_account_name` - the source account to draw the shares from
# object_id_type `object_id` - the object to update
# variant `user_data` -
# bool `sign_and_broadcast` -
# return_type: `transaction_builder`
object_update: (paying_account_name, object_id, user_data, sign_and_broadcast, error_handler = null) ->
@rpc.request('wallet_object_update', [paying_account_name, object_id, user_data, sign_and_broadcast], error_handler).then (response) ->
response.result
# Update a normal user object's owner.
# parameters:
# sending_account_name `paying_account_name` - the source account to draw the shares from
# object_id_type `object_id` - the object to update
# uint32_t `m` -
# address_list `owners` -
# bool `sign_and_broadcast` -
# return_type: `transaction_builder`
object_transfer: (paying_account_name, object_id, m, owners, sign_and_broadcast, error_handler = null) ->
@rpc.request('wallet_object_transfer', [paying_account_name, object_id, m, owners, sign_and_broadcast], error_handler).then (response) ->
response.result
# List objects that belong to an account.
# parameters:
# account_name `account` - Account to fetch objects for
# return_type: `object_array`
object_list: (account, error_handler = null) ->
@rpc.request('wallet_object_list', [account], error_handler).then (response) ->
response.result
# Create or update an edge object.
# parameters:
# account_name `paying_account` - Account that will pay for this transaction
# object_id_type `from` -
# object_id_type `to` - Account that will pay for this transaction
# string `name` - The edge name (the 'key', used in index)
# variant `value` - The edge 'value', not part of the index
# return_type: `transaction_builder`
set_edge: (paying_account, from, to, name, value, error_handler = null) ->
@rpc.request('wallet_set_edge', [paying_account, from, to, name, value], error_handler).then (response) ->
response.result
# Lists transaction history for the specified account
# parameters:
# string `account_name` - the name of the account for which the transaction history will be returned, "" for all accounts, example: alice
# string `asset_symbol` - only include transactions involving the specified asset, or "" to include all
# int32_t `limit` - limit the number of returned transactions; negative for most recent and positive for least recent. 0 does not limit
# uint32_t `start_block_num` - the earliest block number to list transactions from; 0 to include all transactions starting from genesis
# uint32_t `end_block_num` - the latest block to list transaction from; -1 to include all transactions ending at the head block
# return_type: `pretty_transactions`
account_transaction_history: (account_name, asset_symbol, limit, start_block_num, end_block_num, error_handler = null) ->
@rpc.request('wallet_account_transaction_history', [account_name, asset_symbol, limit, start_block_num, end_block_num], error_handler).then (response) ->
response.result
#
# parameters:
# string `account_name` - the name of the account for which the transaction history will be returned, "" for all accounts, example: alice
# return_type: `experimental_transactions`
transaction_history_experimental: (account_name, error_handler = null) ->
@rpc.request('wallet_transaction_history_experimental', [account_name], error_handler).then (response) ->
response.result
#
# parameters:
# return_type: `snapshot_record_list`
check_sharedrop: (error_handler = null) ->
@rpc.request('wallet_check_sharedrop', error_handler).then (response) ->
response.result
# Removes the specified transaction record from your transaction history. USE WITH CAUTION! Rescan cannot reconstruct all transaction details
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction record
# return_type: `void`
remove_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_remove_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Return any errors for your currently pending transactions
# parameters:
# string `filename` - filename to save pending transaction errors to
# return_type: `map<transaction_id_type, fc::exception>`
get_pending_transaction_errors: (filename, error_handler = null) ->
@rpc.request('wallet_get_pending_transaction_errors', [filename], error_handler).then (response) ->
response.result
# Lock the private keys in wallet, disables spending commands until unlocked
# parameters:
# return_type: `void`
lock: (error_handler = null) ->
@rpc.request('wallet_lock', error_handler).then (response) ->
response.result
# Unlock the private keys in the wallet to enable spending operations
# parameters:
# uint32_t `timeout` - the number of seconds to keep the wallet unlocked
# passphrase `<PASSWORD>` - the passphrase for encrypting the wallet
# return_type: `void`
unlock: (timeout, passphrase, error_handler = null) ->
@rpc.request('wallet_unlock', [timeout, passphrase], error_handler).then (response) ->
response.result
# Change the password of the current wallet
# parameters:
# passphrase `<PASSWORD>` - the passphrase for encrypting the wallet
# return_type: `void`
change_passphrase: (passphrase, error_handler = null) ->
@rpc.request('wallet_change_passphrase', [passphrase], error_handler).then (response) ->
response.result
# Return a list of wallets in the current data directory
# parameters:
# return_type: `wallet_name_array`
list: (error_handler = null) ->
@rpc.request('wallet_list', error_handler).then (response) ->
response.result
# Add new account for receiving payments
# parameters:
# account_name `account_name` - the name you will use to refer to this receive account
# json_variant `private_data` - Extra data to store with this account record
# return_type: `public_key`
account_create: (account_name, private_data, error_handler = null) ->
@rpc.request('wallet_account_create', [account_name, private_data], error_handler).then (response) ->
response.result
# Updates the favorited status of the specified account
# parameters:
# account_name `account_name` - the name of the account to set favorited status on
# bool `is_favorite` - true if account should be marked as a favorite; false otherwise
# return_type: `void`
account_set_favorite: (account_name, is_favorite, error_handler = null) ->
@rpc.request('wallet_account_set_favorite', [account_name, is_favorite], error_handler).then (response) ->
response.result
# Updates your approval of the specified account
# parameters:
# account_name `account_name` - the name of the account to set approval for
# int8_t `approval` - 1, 0, or -1 respectively for approve, neutral, or disapprove
# return_type: `int8_t`
account_set_approval: (account_name, approval, error_handler = null) ->
@rpc.request('wallet_account_set_approval', [account_name, approval], error_handler).then (response) ->
response.result
# Add new account for sending payments
# parameters:
# account_name `account_name` - the name you will use to refer to this sending account
# public_key `account_key` - the key associated with this sending account
# return_type: `void`
add_contact_account: (account_name, account_key, error_handler = null) ->
@rpc.request('wallet_add_contact_account', [account_name, account_key], error_handler).then (response) ->
response.result
# Authorizes a public key to control funds of a particular asset class. Requires authority of asset issuer
# parameters:
# account_name `paying_account` - the account that will pay the transaction fee
# asset_symbol `symbol` - the asset granting authorization
# string `address` - the address being granted permission, or the public key, or the account name
# object_id_type `meta` - -1 to remove authorization, otherwise a link to an object in the object graph
# return_type: `transaction_record`
asset_authorize_key: (paying_account, symbol, address, meta, error_handler = null) ->
@rpc.request('wallet_asset_authorize_key', [paying_account, symbol, address, meta], error_handler).then (response) ->
response.result
# Burns given amount to the given account. This will allow you to post message and +/- sentiment on someones account as a form of reputation.
# parameters:
# real_amount `amount_to_burn` - the amount of shares to burn
# asset_symbol `asset_symbol` - the asset to burn
# sending_account_name `from_account_name` - the source account to draw the shares from
# string `for_or_against` - the value 'for' or 'against'
# receive_account_name `to_account_name` - the account to which the burn should be credited (for or against) and on which the public message will appear
# string `public_message` - a public message to post
# bool `anonymous` - true if anonymous, else signed by from_account_name
# return_type: `transaction_record`
burn: (amount_to_burn, asset_symbol, from_account_name, for_or_against, to_account_name, public_message, anonymous, error_handler = null) ->
@rpc.request('wallet_burn', [amount_to_burn, asset_symbol, from_account_name, for_or_against, to_account_name, public_message, anonymous], error_handler).then (response) ->
response.result
# Creates an address which can be used for a simple (non-TITAN) transfer.
# parameters:
# string `account_name` - The account name that will own this address
# string `label` -
# int32_t `legacy_network_byte` - If not -1, use this as the network byte for a BTC-style address.
# return_type: `string`
address_create: (account_name, label, legacy_network_byte, error_handler = null) ->
@rpc.request('wallet_address_create', [account_name, label, legacy_network_byte], error_handler).then (response) ->
response.result
# Do a simple (non-TITAN) transfer to a BTC-style address
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# account_name `from_account_name` - the source account to draw the shares from
# legacy_address `to_address` - the address to transfer to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_legacy_address: (amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_legacy_address', [amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method], error_handler).then (response) ->
response.result
# Do a simple (non-TITAN) transfer to an address
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# account_name `from_account_name` - the source account to draw the shares from
# address `to_address` - the address to transfer to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_address: (amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_address', [amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given account, with the from field set to the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `from_account_name` - the source account to draw the shares from
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer: (amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer', [amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given account
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `from_account_name` - the source account to draw the shares from
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_public_account: (amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_public_account', [amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
#
# parameters:
# string `symbol` - which asset
# uint32_t `m` - Required number of signatures
# address_list `addresses` - List of possible addresses for signatures
# return_type: `address`
multisig_get_balance_id: (symbol, m, addresses, error_handler = null) ->
@rpc.request('wallet_multisig_get_balance_id', [symbol, m, addresses], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# string `from_name` - TITAN name to withdraw from
# uint32_t `m` - Required number of signatures
# address_list `addresses` - List of possible addresses for signatures
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
multisig_deposit: (amount, symbol, from_name, m, addresses, vote_method, error_handler = null) ->
@rpc.request('wallet_multisig_deposit', [amount, symbol, from_name, m, addresses, vote_method], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# address `from_address` - the balance address to withdraw from
# string `to` - address or account to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
withdraw_from_address: (amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_withdraw_from_address', [amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# legacy_address `from_address` - the balance address to withdraw from
# string `to` - address or account to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
withdraw_from_legacy_address: (amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_withdraw_from_legacy_address', [amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# address `from` - multisig balance ID to withdraw from
# address `to_address` - address to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
multisig_withdraw_start: (amount, symbol, from, to_address, vote_method, builder_path, error_handler = null) ->
@rpc.request('wallet_multisig_withdraw_start', [amount, symbol, from, to_address, vote_method, builder_path], error_handler).then (response) ->
response.result
# Review a transaction and add a signature.
# parameters:
# transaction_builder `builder` - A transaction builder object created by a wallet. If null, tries to use builder in file.
# bool `broadcast` - Try to broadcast this transaction?
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
builder_add_signature: (builder, broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_builder_add_signature', [builder, broadcast, builder_path], error_handler).then (response) ->
response.result
# Review a transaction in a builder file and add a signature.
# parameters:
# bool `broadcast` - Try to broadcast this transaction?
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
builder_file_add_signature: (broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_builder_file_add_signature', [broadcast, builder_path], error_handler).then (response) ->
response.result
# Releases escrow balance to third parties
# parameters:
# account_name `pay_fee_with_account_name` - when releasing escrow a transaction fee must be paid by funds not in escrow, this account will pay the fee
# address `escrow_balance_id` - The balance id of the escrow to be released.
# account_name `released_by_account` - the account that is to perform the release.
# share_type `amount_to_sender` - Amount to release back to the sender.
# share_type `amount_to_receiver` - Amount to release to receiver.
# return_type: `transaction_record`
release_escrow: (pay_fee_with_account_name, escrow_balance_id, released_by_account, amount_to_sender, amount_to_receiver, error_handler = null) ->
@rpc.request('wallet_release_escrow', [pay_fee_with_account_name, escrow_balance_id, released_by_account, amount_to_sender, amount_to_receiver], error_handler).then (response) ->
response.result
# Sends given amount to the given name, with the from field set to a different account than the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `paying_account_name` - the source account to draw the shares from
# sending_account_name `from_account_name` - the account to show the recipient as being the sender (requires account's private key to be in wallet).
# receive_account_name `to_account_name` - the account to transfer the shares to
# account_name `escrow_account_name` - the account of the escrow agent which has the power to decide how to divide the funds among from/to accounts.
# digest `agreement` - the hash of an agreement between the sender/receiver in the event a dispute arises can be given to escrow agent
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_from_with_escrow: (amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, escrow_account_name, agreement, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_from_with_escrow', [amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, escrow_account_name, agreement, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given name, with the from field set to a different account than the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `paying_account_name` - the source account to draw the shares from
# sending_account_name `from_account_name` - the account to show the recipient as being the sender (requires account's private key to be in wallet).
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_from: (amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_from', [amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
# Scans the blockchain history for operations relevant to this wallet.
# parameters:
# uint32_t `first_block_number` - the first block to scan
# uint32_t `num_blocks` - the number of blocks to scan
# bool `fast_scan` - true to scan as fast as possible but freeze the rest of your computer, and false otherwise
# return_type: `void`
rescan_blockchain: (first_block_number, num_blocks, fast_scan, error_handler = null) ->
@rpc.request('wallet_rescan_blockchain', [first_block_number, num_blocks, fast_scan], error_handler).then (response) ->
response.result
# Queries your wallet for the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# return_type: `transaction_record`
get_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_get_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Scans the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# bool `overwrite_existing` - true to overwrite existing wallet transaction record and false otherwise
# return_type: `transaction_record`
scan_transaction: (transaction_id, overwrite_existing, error_handler = null) ->
@rpc.request('wallet_scan_transaction', [transaction_id, overwrite_existing], error_handler).then (response) ->
response.result
# Scans the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# bool `overwrite_existing` - true to overwrite existing wallet transaction record and false otherwise
# return_type: `void`
scan_transaction_experimental: (transaction_id, overwrite_existing, error_handler = null) ->
@rpc.request('wallet_scan_transaction_experimental', [transaction_id, overwrite_existing], error_handler).then (response) ->
response.result
# Adds a custom note to the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# string `note` - note to add
# return_type: `void`
add_transaction_note_experimental: (transaction_id, note, error_handler = null) ->
@rpc.request('wallet_add_transaction_note_experimental', [transaction_id, note], error_handler).then (response) ->
response.result
# Rebroadcasts the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# return_type: `void`
rebroadcast_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_rebroadcast_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Updates the data published about a given account
# parameters:
# account_name `account_name` - the account that will be updated
# account_name `pay_from_account` - the account from which fees will be paid
# json_variant `public_data` - public data about the account
# uint8_t `delegate_pay_rate` - -1 for non-delegates; otherwise the percent of delegate pay to accept per produced block
# string `account_type` - titan_account | public_account - public accounts do not receive memos and all payments are made to the active key
# return_type: `transaction_record`
account_register: (account_name, pay_from_account, public_data, delegate_pay_rate, account_type, error_handler = null) ->
@rpc.request('wallet_account_register', [account_name, pay_from_account, public_data, delegate_pay_rate, account_type], error_handler).then (response) ->
response.result
# Updates the local private data for an account
# parameters:
# account_name `account_name` - the account that will be updated
# json_variant `private_data` - private data about the account
# return_type: `void`
account_update_private_data: (account_name, private_data, error_handler = null) ->
@rpc.request('wallet_account_update_private_data', [account_name, private_data], error_handler).then (response) ->
response.result
# Updates the data published about a given account
# parameters:
# account_name `account_name` - the account that will be updated
# account_name `pay_from_account` - the account from which fees will be paid
# json_variant `public_data` - public data about the account
# uint8_t `delegate_pay_rate` - -1 for non-delegates; otherwise the percent of delegate pay to accept per produced block
# return_type: `transaction_record`
account_update_registration: (account_name, pay_from_account, public_data, delegate_pay_rate, error_handler = null) ->
@rpc.request('wallet_account_update_registration', [account_name, pay_from_account, public_data, delegate_pay_rate], error_handler).then (response) ->
response.result
# Updates the specified account's active key and broadcasts the transaction.
# parameters:
# account_name `account_to_update` - The name of the account to update the active key of.
# account_name `pay_from_account` - The account from which fees will be paid.
# string `new_active_key` - WIF private key to update active key to. If empty, a new key will be generated.
# return_type: `transaction_record`
account_update_active_key: (account_to_update, pay_from_account, new_active_key, error_handler = null) ->
@rpc.request('wallet_account_update_active_key', [account_to_update, pay_from_account, new_active_key], error_handler).then (response) ->
response.result
# Lists all accounts associated with this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_accounts: (error_handler = null) ->
@rpc.request('wallet_list_accounts', error_handler).then (response) ->
response.result
# Lists all accounts which have been marked as favorites.
# parameters:
# return_type: `wallet_account_record_array`
list_favorite_accounts: (error_handler = null) ->
@rpc.request('wallet_list_favorite_accounts', error_handler).then (response) ->
response.result
# Lists all unregistered accounts belonging to this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_unregistered_accounts: (error_handler = null) ->
@rpc.request('wallet_list_unregistered_accounts', error_handler).then (response) ->
response.result
# Lists all accounts for which we have a private key in this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_my_accounts: (error_handler = null) ->
@rpc.request('wallet_list_my_accounts', error_handler).then (response) ->
response.result
# Get the account record for a given name
# parameters:
# account_name `account_name` - the name of the account to retrieve
# return_type: `wallet_account_record`
get_account: (account_name, error_handler = null) ->
@rpc.request('wallet_get_account', [account_name], error_handler).then (response) ->
response.result
# Get the account record for a given name
# parameters:
# account_name `account_name` - the name of the account whose public address you want
# return_type: `address`
get_account_public_address: (account_name, error_handler = null) ->
@rpc.request('wallet_get_account_public_address', [account_name], error_handler).then (response) ->
response.result
# Remove a contact account from your wallet
# parameters:
# account_name `account_name` - the name of the contact
# return_type: `void`
remove_contact_account: (account_name, error_handler = null) ->
@rpc.request('wallet_remove_contact_account', [account_name], error_handler).then (response) ->
response.result
# Rename an account in wallet
# parameters:
# account_name `current_account_name` - the current name of the account
# new_account_name `new_account_name` - the new name for the account
# return_type: `void`
account_rename: (current_account_name, new_account_name, error_handler = null) ->
@rpc.request('wallet_account_rename', [current_account_name, new_account_name], error_handler).then (response) ->
response.result
# Creates a new user issued asset
# parameters:
# asset_symbol `symbol` - the ticker symbol for the new asset
# string `asset_name` - the name of the asset
# string `issuer_name` - the name of the issuer of the asset
# string `description` - a description of the asset
# real_amount `maximum_share_supply` - the maximum number of shares of the asset
# uint64_t `precision` - defines where the decimal should be displayed, must be a power of 10
# json_variant `public_data` - arbitrary data attached to the asset
# bool `is_market_issued` - creation of a new BitAsset that is created by shorting
# return_type: `transaction_record`
asset_create: (symbol, asset_name, issuer_name, description, maximum_share_supply, precision, public_data, is_market_issued, error_handler = null) ->
@rpc.request('wallet_asset_create', [symbol, asset_name, issuer_name, description, maximum_share_supply, precision, public_data, is_market_issued], error_handler).then (response) ->
response.result
# Updates an existing user-issued asset; only the public_data can be updated if any shares of the asset exist
# parameters:
# asset_symbol `symbol` - the ticker symbol for the asset to update
# optional_string `name` - the new name to give the asset; or null to keep the current name
# optional_string `description` - the new description to give the asset; or null to keep the current description
# optional_variant `public_data` - the new public_data to give the asset; or null to keep the current public_data
# optional_double `maximum_share_supply` - the new maximum_share_supply to give the asset; or null to keep the current maximum_share_supply
# optional_uint64_t `precision` - the new precision to give the asset; or null to keep the current precision
# share_type `issuer_transaction_fee` - an additional fee (denominated in issued asset) charged by the issuer on every transaction that uses this asset type
# asset_permission_array `flags` - a set of flags set by the issuer (if they have permission to set them)
# asset_permission_array `issuer_permissions` - a set of permissions an issuer retains
# account_name `issuer_account_name` - used to transfer the asset to a new user
# uint32_t `required_sigs` - number of signatures from the authority required to control this asset record
# address_list `authority` - owner keys that control this asset record
# return_type: `transaction_record`
asset_update: (symbol, name, description, public_data, maximum_share_supply, precision, issuer_transaction_fee, flags, issuer_permissions, issuer_account_name, required_sigs, authority, error_handler = null) ->
@rpc.request('wallet_asset_update', [symbol, name, description, public_data, maximum_share_supply, precision, issuer_transaction_fee, flags, issuer_permissions, issuer_account_name, required_sigs, authority], error_handler).then (response) ->
response.result
# Issues new shares of a given asset type
# parameters:
# real_amount `amount` - the amount of shares to issue
# asset_symbol `symbol` - the ticker symbol for asset
# account_name `to_account_name` - the name of the account to receive the shares
# string `memo_message` - the memo to send to the receiver
# return_type: `transaction_record`
asset_issue: (amount, symbol, to_account_name, memo_message, error_handler = null) ->
@rpc.request('wallet_asset_issue', [amount, symbol, to_account_name, memo_message], error_handler).then (response) ->
response.result
# Lists the total asset balances for all open escrows
# parameters:
# account_name `account_name` - the account to get a escrow summary for, or leave empty for all accounts
# return_type: `escrow_summary_array`
escrow_summary: (account_name, error_handler = null) ->
@rpc.request('wallet_escrow_summary', [account_name], error_handler).then (response) ->
response.result
# Lists the total asset balances for the specified account
# parameters:
# account_name `account_name` - the account to get a balance for, or leave empty for all accounts
# return_type: `account_balance_summary_type`
account_balance: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance', [account_name], error_handler).then (response) ->
response.result
# Lists the total asset balances across all withdraw condition types for the specified account
# parameters:
# account_name `account_name` - the account to get a balance for, or leave empty for all accounts
# return_type: `account_extended_balance_type`
account_balance_extended: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance_extended', [account_name], error_handler).then (response) ->
response.result
# Lists the balance record ids for the specified account
# parameters:
# account_name `account_name` - the account to list balance ids for, or leave empty for all accounts
# return_type: `account_balance_id_summary_type`
account_balance_ids: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance_ids', [account_name], error_handler).then (response) ->
response.result
# Lists the total accumulated yield for asset balances
# parameters:
# account_name `account_name` - the account to get yield for, or leave empty for all accounts
# return_type: `account_balance_summary_type`
account_yield: (account_name, error_handler = null) ->
@rpc.request('wallet_account_yield', [account_name], error_handler).then (response) ->
response.result
# Lists all public keys in this account
# parameters:
# account_name `account_name` - the account for which public keys should be listed
# return_type: `public_key_summary_array`
account_list_public_keys: (account_name, error_handler = null) ->
@rpc.request('wallet_account_list_public_keys', [account_name], error_handler).then (response) ->
response.result
# Used to transfer some of the delegate's pay from their balance
# parameters:
# account_name `delegate_name` - the delegate whose pay is being cashed out
# account_name `to_account_name` - the account that should receive the funds
# real_amount `amount_to_withdraw` - the amount to withdraw
# return_type: `transaction_record`
delegate_withdraw_pay: (delegate_name, to_account_name, amount_to_withdraw, error_handler = null) ->
@rpc.request('wallet_delegate_withdraw_pay', [delegate_name, to_account_name, amount_to_withdraw], error_handler).then (response) ->
response.result
# Set the fee to add to new transactions
# parameters:
# real_amount `fee` - the wallet transaction fee to set
# return_type: `asset`
set_transaction_fee: (fee, error_handler = null) ->
@rpc.request('wallet_set_transaction_fee', [fee], error_handler).then (response) ->
response.result
# Returns
# parameters:
# asset_symbol `symbol` - the wallet transaction if paid in the given asset type
# return_type: `asset`
get_transaction_fee: (symbol, error_handler = null) ->
@rpc.request('wallet_get_transaction_fee', [symbol], error_handler).then (response) ->
response.result
# Used to place a request to buy a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the bid
# string `quantity` - the quantity of items you would like to buy
# asset_symbol `quantity_symbol` - the type of items you would like to buy
# string `base_price` - the price you would like to pay
# asset_symbol `base_symbol` - the type of asset you would like to pay with
# bool `allow_stupid_bid` - Allow user to place bid at more than 5% above the current sell price.
# return_type: `transaction_record`
market_submit_bid: (from_account_name, quantity, quantity_symbol, base_price, base_symbol, allow_stupid_bid, error_handler = null) ->
@rpc.request('wallet_market_submit_bid', [from_account_name, quantity, quantity_symbol, base_price, base_symbol, allow_stupid_bid], error_handler).then (response) ->
response.result
# Used to place a request to buy a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the bid
# string `quantity` - the quantity of items you would like to buy
# asset_symbol `quantity_symbol` - the type of items you would like to buy
# string `relative_price` - the price relative to the feed you would like to pay
# asset_symbol `base_symbol` - the type of asset you would like to pay with
# string `limit_price` - the limit on what you are willing to pay
# return_type: `transaction_record`
market_submit_relative_bid: (from_account_name, quantity, quantity_symbol, relative_price, base_symbol, limit_price, error_handler = null) ->
@rpc.request('wallet_market_submit_relative_bid', [from_account_name, quantity, quantity_symbol, relative_price, base_symbol, limit_price], error_handler).then (response) ->
response.result
# Used to place a request to sell a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `sell_quantity` - the quantity of items you would like to sell
# asset_symbol `sell_quantity_symbol` - the type of items you would like to sell
# string `ask_price` - the price per unit sold.
# asset_symbol `ask_price_symbol` - the type of asset you would like to be paid
# bool `allow_stupid_ask` - Allow user to place ask at more than 5% below the current buy price.
# return_type: `transaction_record`
market_submit_ask: (from_account_name, sell_quantity, sell_quantity_symbol, ask_price, ask_price_symbol, allow_stupid_ask, error_handler = null) ->
@rpc.request('wallet_market_submit_ask', [from_account_name, sell_quantity, sell_quantity_symbol, ask_price, ask_price_symbol, allow_stupid_ask], error_handler).then (response) ->
response.result
# Used to place a request to sell a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `sell_quantity` - the quantity of items you would like to sell
# asset_symbol `sell_quantity_symbol` - the type of items you would like to sell
# string `relative_ask_price` - the relative price per unit sold.
# asset_symbol `ask_price_symbol` - the type of asset you would like to be paid
# string `limit_ask_price` - the minimum price per unit sold.
# return_type: `transaction_record`
market_submit_relative_ask: (from_account_name, sell_quantity, sell_quantity_symbol, relative_ask_price, ask_price_symbol, limit_ask_price, error_handler = null) ->
@rpc.request('wallet_market_submit_relative_ask', [from_account_name, sell_quantity, sell_quantity_symbol, relative_ask_price, ask_price_symbol, limit_ask_price], error_handler).then (response) ->
response.result
# Used to place a request to short sell a quantity of assets at a price specified
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `short_collateral` - the amount of collateral you wish to fund this short with
# asset_symbol `collateral_symbol` - the type of asset collateralizing this short (i.e. XTS)
# string `interest_rate` - the APR you wish to pay interest at (0.0% to 1000.0%)
# asset_symbol `quote_symbol` - the asset to short sell (i.e. USD)
# string `short_price_limit` - maximim price (USD per XTS) that the short will execute at, if 0 then no limit will be applied
# return_type: `transaction_record`
market_submit_short: (from_account_name, short_collateral, collateral_symbol, interest_rate, quote_symbol, short_price_limit, error_handler = null) ->
@rpc.request('wallet_market_submit_short', [from_account_name, short_collateral, collateral_symbol, interest_rate, quote_symbol, short_price_limit], error_handler).then (response) ->
response.result
# Used to place a request to cover an existing short position
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `quantity` - the quantity of asset you would like to cover
# asset_symbol `quantity_symbol` - the type of asset you are covering (ie: USD)
# order_id `cover_id` - the order ID you would like to cover
# return_type: `transaction_record`
market_cover: (from_account_name, quantity, quantity_symbol, cover_id, error_handler = null) ->
@rpc.request('wallet_market_cover', [from_account_name, quantity, quantity_symbol, cover_id], error_handler).then (response) ->
response.result
# Cancel and/or create many market orders in a single transaction.
# parameters:
# order_ids `cancel_order_ids` - Order IDs of all market orders to cancel in this transaction.
# order_descriptions `new_orders` - Descriptions of all new orders to create in this transaction.
# bool `sign` - True if transaction should be signed and broadcast (if possible), false otherwse.
# return_type: `transaction_record`
market_batch_update: (cancel_order_ids, new_orders, sign, error_handler = null) ->
@rpc.request('wallet_market_batch_update', [cancel_order_ids, new_orders, sign], error_handler).then (response) ->
response.result
# Add collateral to a short position
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# order_id `cover_id` - the ID of the order to recollateralize
# string `real_quantity_collateral_to_add` - the quantity of collateral of the base asset to add to the specified position
# return_type: `transaction_record`
market_add_collateral: (from_account_name, cover_id, real_quantity_collateral_to_add, error_handler = null) ->
@rpc.request('wallet_market_add_collateral', [from_account_name, cover_id, real_quantity_collateral_to_add], error_handler).then (response) ->
response.result
# List an order list of a specific market
# parameters:
# asset_symbol `base_symbol` - the base symbol of the market
# asset_symbol `quote_symbol` - the quote symbol of the market
# uint32_t `limit` - the maximum number of items to return
# account_name `account_name` - the account for which to get the orders, or empty for all accounts
# return_type: `market_order_map`
market_order_list: (base_symbol, quote_symbol, limit, account_name, error_handler = null) ->
@rpc.request('wallet_market_order_list', [base_symbol, quote_symbol, limit, account_name], error_handler).then (response) ->
response.result
# List an order list of a specific account
# parameters:
# account_name `account_name` - the account for which to get the orders, or empty for all accounts
# uint32_t `limit` - the maximum number of items to return
# return_type: `market_order_map`
account_order_list: (account_name, limit, error_handler = null) ->
@rpc.request('wallet_account_order_list', [account_name, limit], error_handler).then (response) ->
response.result
# Cancel an order: deprecated - use wallet_market_cancel_orders
# parameters:
# order_id `order_id` - the ID of the order to cancel
# return_type: `transaction_record`
market_cancel_order: (order_id, error_handler = null) ->
@rpc.request('wallet_market_cancel_order', [order_id], error_handler).then (response) ->
response.result
# Cancel more than one order at a time
# parameters:
# order_ids `order_ids` - the IDs of the orders to cancel
# return_type: `transaction_record`
market_cancel_orders: (order_ids, error_handler = null) ->
@rpc.request('wallet_market_cancel_orders', [order_ids], error_handler).then (response) ->
response.result
# Reveals the private key corresponding to an account, public key, or address
# parameters:
# string `input` - an account name, public key, or address (quoted hash of public key)
# return_type: `string`
dump_private_key: (input, error_handler = null) ->
@rpc.request('wallet_dump_private_key', [input], error_handler).then (response) ->
response.result
# Returns the allocation of votes by this account
# parameters:
# account_name `account_name` - the account to report votes on, or empty for all accounts
# return_type: `account_vote_summary`
account_vote_summary: (account_name, error_handler = null) ->
@rpc.request('wallet_account_vote_summary', [account_name], error_handler).then (response) ->
response.result
# Check how much this account is utilizing its voting power
# parameters:
# account_name `account` -
# return_type: `vote_summary`
check_vote_proportion: (account, error_handler = null) ->
@rpc.request('wallet_check_vote_proportion', [account], error_handler).then (response) ->
response.result
# Set a property in the GUI settings DB
# parameters:
# string `name` - the name of the setting to set
# variant `value` - the value to set the setting to
# return_type: `void`
set_setting: (name, value, error_handler = null) ->
@rpc.request('wallet_set_setting', [name, value], error_handler).then (response) ->
response.result
# Get the value of the given setting
# parameters:
# string `name` - The name of the setting to fetch
# return_type: `optional_variant`
get_setting: (name, error_handler = null) ->
@rpc.request('wallet_get_setting', [name], error_handler).then (response) ->
response.result
# Enable or disable block production for a particular delegate account
# parameters:
# string `delegate_name` - The delegate to enable/disable block production for; ALL for all delegate accounts
# bool `enabled` - true to enable block production, false otherwise
# return_type: `void`
delegate_set_block_production: (delegate_name, enabled, error_handler = null) ->
@rpc.request('wallet_delegate_set_block_production', [delegate_name, enabled], error_handler).then (response) ->
response.result
# Enable or disable wallet transaction scanning
# parameters:
# bool `enabled` - true to enable transaction scanning, false otherwise
# return_type: `bool`
set_transaction_scanning: (enabled, error_handler = null) ->
@rpc.request('wallet_set_transaction_scanning', [enabled], error_handler).then (response) ->
response.result
# Signs the provided message digest with the account key
# parameters:
# string `signer` - A public key, address, or account name whose key to sign with
# sha256 `hash` - SHA256 digest of the message to sign
# return_type: `compact_signature`
sign_hash: (signer, hash, error_handler = null) ->
@rpc.request('wallet_sign_hash', [signer, hash], error_handler).then (response) ->
response.result
# Initiates the login procedure by providing a BitShares Login URL
# parameters:
# string `server_account` - Name of the account of the server. The user will be shown this name as the site he is logging into.
# return_type: `string`
login_start: (server_account, error_handler = null) ->
@rpc.request('wallet_login_start', [server_account], error_handler).then (response) ->
response.result
# Completes the login procedure by finding the user's public account key and shared secret
# parameters:
# public_key `server_key` - The one-time public key from wallet_login_start.
# public_key `client_key` - The client's one-time public key.
# compact_signature `client_signature` - The client's signature of the shared secret.
# return_type: `variant`
login_finish: (server_key, client_key, client_signature, error_handler = null) ->
@rpc.request('wallet_login_finish', [server_key, client_key, client_signature], error_handler).then (response) ->
response.result
# Set this balance's voting address and slate
# parameters:
# address `balance_id` - the current name of the account
# string `voter_address` - The new voting address. If none is specified, tries to re-use existing address.
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
balance_set_vote_info: (balance_id, voter_address, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_balance_set_vote_info', [balance_id, voter_address, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
# Publishes the current wallet delegate slate to the public data associated with the account
# parameters:
# account_name `publishing_account_name` - The account to publish the slate ID under
# account_name `paying_account_name` - The account to pay transaction fees or leave empty to pay with publishing account
# return_type: `transaction_record`
publish_slate: (publishing_account_name, paying_account_name, error_handler = null) ->
@rpc.request('wallet_publish_slate', [publishing_account_name, paying_account_name], error_handler).then (response) ->
response.result
# Publish your current client version to the specified account's public data record
# parameters:
# account_name `publishing_account_name` - The account to publish the client version under
# account_name `paying_account_name` - The account to pay transaction fees with or leave empty to pay with publishing account
# return_type: `transaction_record`
publish_version: (publishing_account_name, paying_account_name, error_handler = null) ->
@rpc.request('wallet_publish_version', [publishing_account_name, paying_account_name], error_handler).then (response) ->
response.result
# Collect specified account's genesis balances
# parameters:
# account_name `account_name` - account to collect genesis balances for
# return_type: `transaction_record`
collect_genesis_balances: (account_name, error_handler = null) ->
@rpc.request('wallet_collect_genesis_balances', [account_name], error_handler).then (response) ->
response.result
# Collect specified account's vested balances
# parameters:
# account_name `account_name` - account to collect vested balances for
# return_type: `transaction_record`
collect_vested_balances: (account_name, error_handler = null) ->
@rpc.request('wallet_collect_vested_balances', [account_name], error_handler).then (response) ->
response.result
# Update a delegate's block signing and feed publishing key
# parameters:
# account_name `authorizing_account_name` - The account that will authorize changing the block signing key
# account_name `delegate_name` - The delegate account which will have its block signing key changed
# public_key `signing_key` - The new key that will be used for block signing
# return_type: `transaction_record`
delegate_update_signing_key: (authorizing_account_name, delegate_name, signing_key, error_handler = null) ->
@rpc.request('wallet_delegate_update_signing_key', [authorizing_account_name, delegate_name, signing_key], error_handler).then (response) ->
response.result
# Attempts to recover accounts created after last backup was taken and returns number of successful recoveries. Use if you have restored from backup and are missing accounts.
# parameters:
# int32_t `accounts_to_recover` - The number of accounts to attept to recover
# int32_t `maximum_number_of_attempts` - The maximum number of keys to generate trying to recover accounts
# return_type: `int32_t`
recover_accounts: (accounts_to_recover, maximum_number_of_attempts, error_handler = null) ->
@rpc.request('wallet_recover_accounts', [accounts_to_recover, maximum_number_of_attempts], error_handler).then (response) ->
response.result
# Attempts to recover any missing recipient and memo information for the specified transaction
# parameters:
# string `transaction_id_prefix` - the id (or id prefix) of the transaction record
# string `recipient_account` - the account name of the recipient (if known)
# return_type: `transaction_record`
recover_transaction: (transaction_id_prefix, recipient_account, error_handler = null) ->
@rpc.request('wallet_recover_transaction', [transaction_id_prefix, recipient_account], error_handler).then (response) ->
response.result
# Verify whether the specified transaction made a TITAN deposit to the current wallet; returns null if not
# parameters:
# string `transaction_id_prefix` - the id (or id prefix) of the transaction record
# return_type: `optional_variant_object`
verify_titan_deposit: (transaction_id_prefix, error_handler = null) ->
@rpc.request('wallet_verify_titan_deposit', [transaction_id_prefix], error_handler).then (response) ->
response.result
# publishes a price feed for BitAssets, only active delegates may do this
# parameters:
# account_name `delegate_account` - the delegate to publish the price under
# real_amount `price` - the number of this asset per XTS
# asset_symbol `asset_symbol` - the type of asset being priced
# return_type: `transaction_record`
publish_price_feed: (delegate_account, price, asset_symbol, error_handler = null) ->
@rpc.request('wallet_publish_price_feed', [delegate_account, price, asset_symbol], error_handler).then (response) ->
response.result
# publishes a set of feeds for BitAssets, only active delegates may do this
# parameters:
# account_name `delegate_account` - the delegate to publish the price under
# price_map `symbol_to_price_map` - maps the BitAsset symbol to its price per share
# return_type: `transaction_record`
publish_feeds: (delegate_account, symbol_to_price_map, error_handler = null) ->
@rpc.request('wallet_publish_feeds', [delegate_account, symbol_to_price_map], error_handler).then (response) ->
response.result
# publishes a set of feeds for BitAssets for all active delegates, most useful for testnets
# parameters:
# price_map `symbol_to_price_map` - maps the BitAsset symbol to its price per share
# return_type: `vector<std::pair<string, wallet_transaction_record>>`
publish_feeds_multi_experimental: (symbol_to_price_map, error_handler = null) ->
@rpc.request('wallet_publish_feeds_multi_experimental', [symbol_to_price_map], error_handler).then (response) ->
response.result
# tries to repair any inconsistent wallet account, key, and transaction records
# parameters:
# account_name `collecting_account_name` - collect any orphan balances into this account
# return_type: `void`
repair_records: (collecting_account_name, error_handler = null) ->
@rpc.request('wallet_repair_records', [collecting_account_name], error_handler).then (response) ->
response.result
# regenerates private keys as part of wallet recovery
# parameters:
# account_name `account_name` - the account the generated keys should be a part of
# uint32_t `max_key_number` - the last key number to regenerate
# return_type: `int32_t`
regenerate_keys: (account_name, max_key_number, error_handler = null) ->
@rpc.request('wallet_regenerate_keys', [account_name, max_key_number], error_handler).then (response) ->
response.result
# Creates a new mail message and returns the unencrypted message.
# parameters:
# string `sender` - The name of the message's sender.
# string `subject` - The subject of the message.
# string `body` - The body of the message.
# message_id `reply_to` - The ID of the message this is in reply to.
# return_type: `message`
mail_create: (sender, subject, body, reply_to, error_handler = null) ->
@rpc.request('wallet_mail_create', [sender, subject, body, reply_to], error_handler).then (response) ->
response.result
# Encrypts a mail message and returns the encrypted message.
# parameters:
# string `recipient` - The name of the message's recipient.
# message `plaintext` - The plaintext message, such as from wallet_mail_create.
# return_type: `message`
mail_encrypt: (recipient, plaintext, error_handler = null) ->
@rpc.request('wallet_mail_encrypt', [recipient, plaintext], error_handler).then (response) ->
response.result
# Opens an encrypted mail message.
# parameters:
# address `recipient` - The address of the message's recipient.
# message `ciphertext` - The encrypted message.
# return_type: `message`
mail_open: (recipient, ciphertext, error_handler = null) ->
@rpc.request('wallet_mail_open', [recipient, ciphertext], error_handler).then (response) ->
response.result
# Sets the list of mail servers an account checks for his mail.
# parameters:
# string `account_name` - The name of the account whose mail servers should be updated.
# string_array `server_list` - A list of names of blockchain accounts who run mail servers.
# string `paying_account` - The name of the account to pay the transaction fee, if different from account_name.
# return_type: `void`
set_preferred_mail_servers: (account_name, server_list, paying_account, error_handler = null) ->
@rpc.request('wallet_set_preferred_mail_servers', [account_name, server_list, paying_account], error_handler).then (response) ->
response.result
# Retract (permanently disable) the specified account in case of master key compromise.
# parameters:
# account_name `account_to_retract` - The name of the account to retract.
# account_name `pay_from_account` - The account from which fees will be paid.
# return_type: `transaction_record`
account_retract: (account_to_retract, pay_from_account, error_handler = null) ->
@rpc.request('wallet_account_retract', [account_to_retract, pay_from_account], error_handler).then (response) ->
response.result
# Generates a human friendly brain wallet key starting with a public salt as the last word
# parameters:
# return_type: `string`
generate_brain_seed: (error_handler = null) ->
@rpc.request('wallet_generate_brain_seed', error_handler).then (response) ->
response.result
angular.module("app").service("WalletAPI", ["$q", "$log", "RpcService", "$interval", WalletAPI])
| true | # Warning: this is a generated file, any changes made here will be overwritten by the build process
class WalletAPI
constructor: (@q, @log, @rpc, @interval) ->
#@log.info "---- WalletAPI Constructor ----"
# Extra information about the wallet.
# parameters:
# return_type: `json_object`
get_info: (error_handler = null) ->
@rpc.request('wallet_get_info', error_handler).then (response) ->
response.result
# Opens the wallet of the given name
# parameters:
# wallet_name `wallet_name` - the name of the wallet to open
# return_type: `void`
open: (wallet_name, error_handler = null) ->
@rpc.request('wallet_open', [wallet_name], error_handler).then (response) ->
response.result
# Creates a wallet with the given name
# parameters:
# wallet_name `wallet_name` - name of the wallet to create
# new_passphrase `PI:PASSWORD:<PASSWORD>END_PI` - a passphrase for encrypting the wallet
# brainkey `brain_key` - a strong passphrase that will be used to generate all private keys, defaults to a large random number
# return_type: `void`
create: (wallet_name, new_passphrase, brain_key, error_handler = null) ->
@rpc.request('wallet_create', [wallet_name, new_passphrase, brain_key], error_handler).then (response) ->
response.result
# Returns the wallet name passed to wallet_open
# parameters:
# return_type: `optional_wallet_name`
get_name: (error_handler = null) ->
@rpc.request('wallet_get_name', error_handler).then (response) ->
response.result
# Loads the private key into the specified account. Returns which account it was actually imported to.
# parameters:
# wif_private_key `wif_key` - A private key in bitcoin Wallet Import Format (WIF)
# account_name `account_name` - the name of the account the key should be imported into, if null then the key must belong to an active account
# bool `create_new_account` - If true, the wallet will attempt to create a new account for the name provided rather than import the key into an existing account
# bool `rescan` - If true, the wallet will rescan the blockchain looking for transactions that involve this private key
# return_type: `account_name`
import_private_key: (wif_key, account_name, create_new_account, rescan, error_handler = null) ->
@rpc.request('wallet_import_private_key', [wif_key, account_name, create_new_account, rescan], error_handler).then (response) ->
response.result
# Imports a Bitcoin Core or BitShares PTS wallet
# parameters:
# filename `wallet_filename` - the Bitcoin/PTS wallet file path
# passphrase `PI:PASSWORD:<PASSWORD>END_PI` - the imported wallet's password
# account_name `account_name` - the account to receive the contents of the wallet
# return_type: `uint32_t`
import_bitcoin: (wallet_filename, passphrase, account_name, error_handler = null) ->
@rpc.request('wallet_import_bitcoin', [wallet_filename, passphrase, account_name], error_handler).then (response) ->
response.result
# Imports an Electrum wallet
# parameters:
# filename `wallet_filename` - the Electrum wallet file path
# passphrase `PI:PASSWORD:<PASSWORD>END_PI` - the imported wallet's password
# account_name `account_name` - the account to receive the contents of the wallet
# return_type: `uint32_t`
import_electrum: (wallet_filename, passphrase, account_name, error_handler = null) ->
@rpc.request('wallet_import_electrum', [wallet_filename, passphrase, account_name], error_handler).then (response) ->
response.result
# Create the key from keyhotee config and import it to the wallet, creating a new account using this key
# parameters:
# name `firstname` - first name in keyhotee profile config, for salting the seed of private key
# name `middlename` - middle name in keyhotee profile config, for salting the seed of private key
# name `lastname` - last name in keyhotee profile config, for salting the seed of private key
# brainkey `brainkey` - brainkey in keyhotee profile config, for salting the seed of private key
# keyhoteeid `keyhoteeid` - using keyhotee id as account name
# return_type: `void`
import_keyhotee: (firstname, middlename, lastname, brainkey, keyhoteeid, error_handler = null) ->
@rpc.request('wallet_import_keyhotee', [firstname, middlename, lastname, brainkey, keyhoteeid], error_handler).then (response) ->
response.result
# Imports anything that looks like a private key from the given JSON file.
# parameters:
# filename `json_filename` - the full path and filename of JSON wallet to import, example: /path/to/exported_wallet.json
# passphrase `PI:PASSWORD:<PASSWORD>END_PI` - passphrase for encrypted keys
# account_name `account` - Account into which to import keys.
# return_type: `void`
import_keys_from_json: (json_filename, imported_wallet_passphrase, account, error_handler = null) ->
@rpc.request('wallet_import_keys_from_json', [json_filename, imported_wallet_passphrase, account], error_handler).then (response) ->
response.result
# Closes the curent wallet if one is open
# parameters:
# return_type: `void`
close: (error_handler = null) ->
@rpc.request('wallet_close', error_handler).then (response) ->
response.result
# Exports the current wallet to a JSON file
# parameters:
# filename `json_filename` - the full path and filename of JSON file to generate, example: /path/to/exported_wallet.json
# return_type: `void`
backup_create: (json_filename, error_handler = null) ->
@rpc.request('wallet_backup_create', [json_filename], error_handler).then (response) ->
response.result
# Creates a new wallet from an exported JSON file
# parameters:
# filename `json_filename` - the full path and filename of JSON wallet to import, example: /path/to/exported_wallet.json
# wallet_name `wallet_name` - name of the wallet to create
# passphrase `importedPI:PASSWORD:<PASSWORD>END_PI_walletPI:PASSWORD:<PASSWORD>END_PI_passphrase` - passphrase of the imported wallet
# return_type: `void`
backup_restore: (json_filename, wallet_name, imported_wallet_passphrase, error_handler = null) ->
@rpc.request('wallet_backup_restore', [json_filename, wallet_name, imported_wallet_passphrase], error_handler).then (response) ->
response.result
# Enables or disables automatic wallet backups
# parameters:
# bool `enabled` - true to enable and false to disable
# return_type: `bool`
set_automatic_backups: (enabled, error_handler = null) ->
@rpc.request('wallet_set_automatic_backups', [enabled], error_handler).then (response) ->
response.result
# Set transaction expiration time
# parameters:
# uint32_t `seconds` - seconds before new transactions expire
# return_type: `uint32_t`
set_transaction_expiration_time: (seconds, error_handler = null) ->
@rpc.request('wallet_set_transaction_expiration_time', [seconds], error_handler).then (response) ->
response.result
# Creates a normal user object. If no owner info is specified, uses a new address from payer.
# parameters:
# account_name `account` -
# variant `user_data` -
# int32_t `m` -
# address_list `owners` -
# return_type: `transaction_record`
object_create: (account, user_data, m, owners, error_handler = null) ->
@rpc.request('wallet_object_create', [account, user_data, m, owners], error_handler).then (response) ->
response.result
# Update a normal user object.
# parameters:
# sending_account_name `paying_account_name` - the source account to draw the shares from
# object_id_type `object_id` - the object to update
# variant `user_data` -
# bool `sign_and_broadcast` -
# return_type: `transaction_builder`
object_update: (paying_account_name, object_id, user_data, sign_and_broadcast, error_handler = null) ->
@rpc.request('wallet_object_update', [paying_account_name, object_id, user_data, sign_and_broadcast], error_handler).then (response) ->
response.result
# Update a normal user object's owner.
# parameters:
# sending_account_name `paying_account_name` - the source account to draw the shares from
# object_id_type `object_id` - the object to update
# uint32_t `m` -
# address_list `owners` -
# bool `sign_and_broadcast` -
# return_type: `transaction_builder`
object_transfer: (paying_account_name, object_id, m, owners, sign_and_broadcast, error_handler = null) ->
@rpc.request('wallet_object_transfer', [paying_account_name, object_id, m, owners, sign_and_broadcast], error_handler).then (response) ->
response.result
# List objects that belong to an account.
# parameters:
# account_name `account` - Account to fetch objects for
# return_type: `object_array`
object_list: (account, error_handler = null) ->
@rpc.request('wallet_object_list', [account], error_handler).then (response) ->
response.result
# Create or update an edge object.
# parameters:
# account_name `paying_account` - Account that will pay for this transaction
# object_id_type `from` -
# object_id_type `to` - Account that will pay for this transaction
# string `name` - The edge name (the 'key', used in index)
# variant `value` - The edge 'value', not part of the index
# return_type: `transaction_builder`
set_edge: (paying_account, from, to, name, value, error_handler = null) ->
@rpc.request('wallet_set_edge', [paying_account, from, to, name, value], error_handler).then (response) ->
response.result
# Lists transaction history for the specified account
# parameters:
# string `account_name` - the name of the account for which the transaction history will be returned, "" for all accounts, example: alice
# string `asset_symbol` - only include transactions involving the specified asset, or "" to include all
# int32_t `limit` - limit the number of returned transactions; negative for most recent and positive for least recent. 0 does not limit
# uint32_t `start_block_num` - the earliest block number to list transactions from; 0 to include all transactions starting from genesis
# uint32_t `end_block_num` - the latest block to list transaction from; -1 to include all transactions ending at the head block
# return_type: `pretty_transactions`
account_transaction_history: (account_name, asset_symbol, limit, start_block_num, end_block_num, error_handler = null) ->
@rpc.request('wallet_account_transaction_history', [account_name, asset_symbol, limit, start_block_num, end_block_num], error_handler).then (response) ->
response.result
#
# parameters:
# string `account_name` - the name of the account for which the transaction history will be returned, "" for all accounts, example: alice
# return_type: `experimental_transactions`
transaction_history_experimental: (account_name, error_handler = null) ->
@rpc.request('wallet_transaction_history_experimental', [account_name], error_handler).then (response) ->
response.result
#
# parameters:
# return_type: `snapshot_record_list`
check_sharedrop: (error_handler = null) ->
@rpc.request('wallet_check_sharedrop', error_handler).then (response) ->
response.result
# Removes the specified transaction record from your transaction history. USE WITH CAUTION! Rescan cannot reconstruct all transaction details
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction record
# return_type: `void`
remove_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_remove_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Return any errors for your currently pending transactions
# parameters:
# string `filename` - filename to save pending transaction errors to
# return_type: `map<transaction_id_type, fc::exception>`
get_pending_transaction_errors: (filename, error_handler = null) ->
@rpc.request('wallet_get_pending_transaction_errors', [filename], error_handler).then (response) ->
response.result
# Lock the private keys in wallet, disables spending commands until unlocked
# parameters:
# return_type: `void`
lock: (error_handler = null) ->
@rpc.request('wallet_lock', error_handler).then (response) ->
response.result
# Unlock the private keys in the wallet to enable spending operations
# parameters:
# uint32_t `timeout` - the number of seconds to keep the wallet unlocked
# passphrase `PI:PASSWORD:<PASSWORD>END_PI` - the passphrase for encrypting the wallet
# return_type: `void`
unlock: (timeout, passphrase, error_handler = null) ->
@rpc.request('wallet_unlock', [timeout, passphrase], error_handler).then (response) ->
response.result
# Change the password of the current wallet
# parameters:
# passphrase `PI:PASSWORD:<PASSWORD>END_PI` - the passphrase for encrypting the wallet
# return_type: `void`
change_passphrase: (passphrase, error_handler = null) ->
@rpc.request('wallet_change_passphrase', [passphrase], error_handler).then (response) ->
response.result
# Return a list of wallets in the current data directory
# parameters:
# return_type: `wallet_name_array`
list: (error_handler = null) ->
@rpc.request('wallet_list', error_handler).then (response) ->
response.result
# Add new account for receiving payments
# parameters:
# account_name `account_name` - the name you will use to refer to this receive account
# json_variant `private_data` - Extra data to store with this account record
# return_type: `public_key`
account_create: (account_name, private_data, error_handler = null) ->
@rpc.request('wallet_account_create', [account_name, private_data], error_handler).then (response) ->
response.result
# Updates the favorited status of the specified account
# parameters:
# account_name `account_name` - the name of the account to set favorited status on
# bool `is_favorite` - true if account should be marked as a favorite; false otherwise
# return_type: `void`
account_set_favorite: (account_name, is_favorite, error_handler = null) ->
@rpc.request('wallet_account_set_favorite', [account_name, is_favorite], error_handler).then (response) ->
response.result
# Updates your approval of the specified account
# parameters:
# account_name `account_name` - the name of the account to set approval for
# int8_t `approval` - 1, 0, or -1 respectively for approve, neutral, or disapprove
# return_type: `int8_t`
account_set_approval: (account_name, approval, error_handler = null) ->
@rpc.request('wallet_account_set_approval', [account_name, approval], error_handler).then (response) ->
response.result
# Add new account for sending payments
# parameters:
# account_name `account_name` - the name you will use to refer to this sending account
# public_key `account_key` - the key associated with this sending account
# return_type: `void`
add_contact_account: (account_name, account_key, error_handler = null) ->
@rpc.request('wallet_add_contact_account', [account_name, account_key], error_handler).then (response) ->
response.result
# Authorizes a public key to control funds of a particular asset class. Requires authority of asset issuer
# parameters:
# account_name `paying_account` - the account that will pay the transaction fee
# asset_symbol `symbol` - the asset granting authorization
# string `address` - the address being granted permission, or the public key, or the account name
# object_id_type `meta` - -1 to remove authorization, otherwise a link to an object in the object graph
# return_type: `transaction_record`
asset_authorize_key: (paying_account, symbol, address, meta, error_handler = null) ->
@rpc.request('wallet_asset_authorize_key', [paying_account, symbol, address, meta], error_handler).then (response) ->
response.result
# Burns given amount to the given account. This will allow you to post message and +/- sentiment on someones account as a form of reputation.
# parameters:
# real_amount `amount_to_burn` - the amount of shares to burn
# asset_symbol `asset_symbol` - the asset to burn
# sending_account_name `from_account_name` - the source account to draw the shares from
# string `for_or_against` - the value 'for' or 'against'
# receive_account_name `to_account_name` - the account to which the burn should be credited (for or against) and on which the public message will appear
# string `public_message` - a public message to post
# bool `anonymous` - true if anonymous, else signed by from_account_name
# return_type: `transaction_record`
burn: (amount_to_burn, asset_symbol, from_account_name, for_or_against, to_account_name, public_message, anonymous, error_handler = null) ->
@rpc.request('wallet_burn', [amount_to_burn, asset_symbol, from_account_name, for_or_against, to_account_name, public_message, anonymous], error_handler).then (response) ->
response.result
# Creates an address which can be used for a simple (non-TITAN) transfer.
# parameters:
# string `account_name` - The account name that will own this address
# string `label` -
# int32_t `legacy_network_byte` - If not -1, use this as the network byte for a BTC-style address.
# return_type: `string`
address_create: (account_name, label, legacy_network_byte, error_handler = null) ->
@rpc.request('wallet_address_create', [account_name, label, legacy_network_byte], error_handler).then (response) ->
response.result
# Do a simple (non-TITAN) transfer to a BTC-style address
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# account_name `from_account_name` - the source account to draw the shares from
# legacy_address `to_address` - the address to transfer to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_legacy_address: (amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_legacy_address', [amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method], error_handler).then (response) ->
response.result
# Do a simple (non-TITAN) transfer to an address
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# account_name `from_account_name` - the source account to draw the shares from
# address `to_address` - the address to transfer to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_address: (amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_address', [amount_to_transfer, asset_symbol, from_account_name, to_address, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given account, with the from field set to the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `from_account_name` - the source account to draw the shares from
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer: (amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer', [amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given account
# parameters:
# real_amount `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `from_account_name` - the source account to draw the shares from
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_to_public_account: (amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_to_public_account', [amount_to_transfer, asset_symbol, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
#
# parameters:
# string `symbol` - which asset
# uint32_t `m` - Required number of signatures
# address_list `addresses` - List of possible addresses for signatures
# return_type: `address`
multisig_get_balance_id: (symbol, m, addresses, error_handler = null) ->
@rpc.request('wallet_multisig_get_balance_id', [symbol, m, addresses], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# string `from_name` - TITAN name to withdraw from
# uint32_t `m` - Required number of signatures
# address_list `addresses` - List of possible addresses for signatures
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
multisig_deposit: (amount, symbol, from_name, m, addresses, vote_method, error_handler = null) ->
@rpc.request('wallet_multisig_deposit', [amount, symbol, from_name, m, addresses, vote_method], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# address `from_address` - the balance address to withdraw from
# string `to` - address or account to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
withdraw_from_address: (amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_withdraw_from_address', [amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# legacy_address `from_address` - the balance address to withdraw from
# string `to` - address or account to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
withdraw_from_legacy_address: (amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_withdraw_from_legacy_address', [amount, symbol, from_address, to, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
#
# parameters:
# string `amount` - how much to transfer
# string `symbol` - which asset
# address `from` - multisig balance ID to withdraw from
# address `to_address` - address to receive funds
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
multisig_withdraw_start: (amount, symbol, from, to_address, vote_method, builder_path, error_handler = null) ->
@rpc.request('wallet_multisig_withdraw_start', [amount, symbol, from, to_address, vote_method, builder_path], error_handler).then (response) ->
response.result
# Review a transaction and add a signature.
# parameters:
# transaction_builder `builder` - A transaction builder object created by a wallet. If null, tries to use builder in file.
# bool `broadcast` - Try to broadcast this transaction?
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
builder_add_signature: (builder, broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_builder_add_signature', [builder, broadcast, builder_path], error_handler).then (response) ->
response.result
# Review a transaction in a builder file and add a signature.
# parameters:
# bool `broadcast` - Try to broadcast this transaction?
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
builder_file_add_signature: (broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_builder_file_add_signature', [broadcast, builder_path], error_handler).then (response) ->
response.result
# Releases escrow balance to third parties
# parameters:
# account_name `pay_fee_with_account_name` - when releasing escrow a transaction fee must be paid by funds not in escrow, this account will pay the fee
# address `escrow_balance_id` - The balance id of the escrow to be released.
# account_name `released_by_account` - the account that is to perform the release.
# share_type `amount_to_sender` - Amount to release back to the sender.
# share_type `amount_to_receiver` - Amount to release to receiver.
# return_type: `transaction_record`
release_escrow: (pay_fee_with_account_name, escrow_balance_id, released_by_account, amount_to_sender, amount_to_receiver, error_handler = null) ->
@rpc.request('wallet_release_escrow', [pay_fee_with_account_name, escrow_balance_id, released_by_account, amount_to_sender, amount_to_receiver], error_handler).then (response) ->
response.result
# Sends given amount to the given name, with the from field set to a different account than the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `paying_account_name` - the source account to draw the shares from
# sending_account_name `from_account_name` - the account to show the recipient as being the sender (requires account's private key to be in wallet).
# receive_account_name `to_account_name` - the account to transfer the shares to
# account_name `escrow_account_name` - the account of the escrow agent which has the power to decide how to divide the funds among from/to accounts.
# digest `agreement` - the hash of an agreement between the sender/receiver in the event a dispute arises can be given to escrow agent
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_from_with_escrow: (amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, escrow_account_name, agreement, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_from_with_escrow', [amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, escrow_account_name, agreement, memo_message, vote_method], error_handler).then (response) ->
response.result
# Sends given amount to the given name, with the from field set to a different account than the payer. This transfer will occur in a single transaction and will be cheaper, but may reduce your privacy.
# parameters:
# string `amount_to_transfer` - the amount of shares to transfer
# asset_symbol `asset_symbol` - the asset to transfer
# sending_account_name `paying_account_name` - the source account to draw the shares from
# sending_account_name `from_account_name` - the account to show the recipient as being the sender (requires account's private key to be in wallet).
# receive_account_name `to_account_name` - the account to transfer the shares to
# string `memo_message` - a memo to store with the transaction
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# return_type: `transaction_record`
transfer_from: (amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, memo_message, vote_method, error_handler = null) ->
@rpc.request('wallet_transfer_from', [amount_to_transfer, asset_symbol, paying_account_name, from_account_name, to_account_name, memo_message, vote_method], error_handler).then (response) ->
response.result
# Scans the blockchain history for operations relevant to this wallet.
# parameters:
# uint32_t `first_block_number` - the first block to scan
# uint32_t `num_blocks` - the number of blocks to scan
# bool `fast_scan` - true to scan as fast as possible but freeze the rest of your computer, and false otherwise
# return_type: `void`
rescan_blockchain: (first_block_number, num_blocks, fast_scan, error_handler = null) ->
@rpc.request('wallet_rescan_blockchain', [first_block_number, num_blocks, fast_scan], error_handler).then (response) ->
response.result
# Queries your wallet for the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# return_type: `transaction_record`
get_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_get_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Scans the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# bool `overwrite_existing` - true to overwrite existing wallet transaction record and false otherwise
# return_type: `transaction_record`
scan_transaction: (transaction_id, overwrite_existing, error_handler = null) ->
@rpc.request('wallet_scan_transaction', [transaction_id, overwrite_existing], error_handler).then (response) ->
response.result
# Scans the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# bool `overwrite_existing` - true to overwrite existing wallet transaction record and false otherwise
# return_type: `void`
scan_transaction_experimental: (transaction_id, overwrite_existing, error_handler = null) ->
@rpc.request('wallet_scan_transaction_experimental', [transaction_id, overwrite_existing], error_handler).then (response) ->
response.result
# Adds a custom note to the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# string `note` - note to add
# return_type: `void`
add_transaction_note_experimental: (transaction_id, note, error_handler = null) ->
@rpc.request('wallet_add_transaction_note_experimental', [transaction_id, note], error_handler).then (response) ->
response.result
# Rebroadcasts the specified transaction
# parameters:
# string `transaction_id` - the id (or id prefix) of the transaction
# return_type: `void`
rebroadcast_transaction: (transaction_id, error_handler = null) ->
@rpc.request('wallet_rebroadcast_transaction', [transaction_id], error_handler).then (response) ->
response.result
# Updates the data published about a given account
# parameters:
# account_name `account_name` - the account that will be updated
# account_name `pay_from_account` - the account from which fees will be paid
# json_variant `public_data` - public data about the account
# uint8_t `delegate_pay_rate` - -1 for non-delegates; otherwise the percent of delegate pay to accept per produced block
# string `account_type` - titan_account | public_account - public accounts do not receive memos and all payments are made to the active key
# return_type: `transaction_record`
account_register: (account_name, pay_from_account, public_data, delegate_pay_rate, account_type, error_handler = null) ->
@rpc.request('wallet_account_register', [account_name, pay_from_account, public_data, delegate_pay_rate, account_type], error_handler).then (response) ->
response.result
# Updates the local private data for an account
# parameters:
# account_name `account_name` - the account that will be updated
# json_variant `private_data` - private data about the account
# return_type: `void`
account_update_private_data: (account_name, private_data, error_handler = null) ->
@rpc.request('wallet_account_update_private_data', [account_name, private_data], error_handler).then (response) ->
response.result
# Updates the data published about a given account
# parameters:
# account_name `account_name` - the account that will be updated
# account_name `pay_from_account` - the account from which fees will be paid
# json_variant `public_data` - public data about the account
# uint8_t `delegate_pay_rate` - -1 for non-delegates; otherwise the percent of delegate pay to accept per produced block
# return_type: `transaction_record`
account_update_registration: (account_name, pay_from_account, public_data, delegate_pay_rate, error_handler = null) ->
@rpc.request('wallet_account_update_registration', [account_name, pay_from_account, public_data, delegate_pay_rate], error_handler).then (response) ->
response.result
# Updates the specified account's active key and broadcasts the transaction.
# parameters:
# account_name `account_to_update` - The name of the account to update the active key of.
# account_name `pay_from_account` - The account from which fees will be paid.
# string `new_active_key` - WIF private key to update active key to. If empty, a new key will be generated.
# return_type: `transaction_record`
account_update_active_key: (account_to_update, pay_from_account, new_active_key, error_handler = null) ->
@rpc.request('wallet_account_update_active_key', [account_to_update, pay_from_account, new_active_key], error_handler).then (response) ->
response.result
# Lists all accounts associated with this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_accounts: (error_handler = null) ->
@rpc.request('wallet_list_accounts', error_handler).then (response) ->
response.result
# Lists all accounts which have been marked as favorites.
# parameters:
# return_type: `wallet_account_record_array`
list_favorite_accounts: (error_handler = null) ->
@rpc.request('wallet_list_favorite_accounts', error_handler).then (response) ->
response.result
# Lists all unregistered accounts belonging to this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_unregistered_accounts: (error_handler = null) ->
@rpc.request('wallet_list_unregistered_accounts', error_handler).then (response) ->
response.result
# Lists all accounts for which we have a private key in this wallet
# parameters:
# return_type: `wallet_account_record_array`
list_my_accounts: (error_handler = null) ->
@rpc.request('wallet_list_my_accounts', error_handler).then (response) ->
response.result
# Get the account record for a given name
# parameters:
# account_name `account_name` - the name of the account to retrieve
# return_type: `wallet_account_record`
get_account: (account_name, error_handler = null) ->
@rpc.request('wallet_get_account', [account_name], error_handler).then (response) ->
response.result
# Get the account record for a given name
# parameters:
# account_name `account_name` - the name of the account whose public address you want
# return_type: `address`
get_account_public_address: (account_name, error_handler = null) ->
@rpc.request('wallet_get_account_public_address', [account_name], error_handler).then (response) ->
response.result
# Remove a contact account from your wallet
# parameters:
# account_name `account_name` - the name of the contact
# return_type: `void`
remove_contact_account: (account_name, error_handler = null) ->
@rpc.request('wallet_remove_contact_account', [account_name], error_handler).then (response) ->
response.result
# Rename an account in wallet
# parameters:
# account_name `current_account_name` - the current name of the account
# new_account_name `new_account_name` - the new name for the account
# return_type: `void`
account_rename: (current_account_name, new_account_name, error_handler = null) ->
@rpc.request('wallet_account_rename', [current_account_name, new_account_name], error_handler).then (response) ->
response.result
# Creates a new user issued asset
# parameters:
# asset_symbol `symbol` - the ticker symbol for the new asset
# string `asset_name` - the name of the asset
# string `issuer_name` - the name of the issuer of the asset
# string `description` - a description of the asset
# real_amount `maximum_share_supply` - the maximum number of shares of the asset
# uint64_t `precision` - defines where the decimal should be displayed, must be a power of 10
# json_variant `public_data` - arbitrary data attached to the asset
# bool `is_market_issued` - creation of a new BitAsset that is created by shorting
# return_type: `transaction_record`
asset_create: (symbol, asset_name, issuer_name, description, maximum_share_supply, precision, public_data, is_market_issued, error_handler = null) ->
@rpc.request('wallet_asset_create', [symbol, asset_name, issuer_name, description, maximum_share_supply, precision, public_data, is_market_issued], error_handler).then (response) ->
response.result
# Updates an existing user-issued asset; only the public_data can be updated if any shares of the asset exist
# parameters:
# asset_symbol `symbol` - the ticker symbol for the asset to update
# optional_string `name` - the new name to give the asset; or null to keep the current name
# optional_string `description` - the new description to give the asset; or null to keep the current description
# optional_variant `public_data` - the new public_data to give the asset; or null to keep the current public_data
# optional_double `maximum_share_supply` - the new maximum_share_supply to give the asset; or null to keep the current maximum_share_supply
# optional_uint64_t `precision` - the new precision to give the asset; or null to keep the current precision
# share_type `issuer_transaction_fee` - an additional fee (denominated in issued asset) charged by the issuer on every transaction that uses this asset type
# asset_permission_array `flags` - a set of flags set by the issuer (if they have permission to set them)
# asset_permission_array `issuer_permissions` - a set of permissions an issuer retains
# account_name `issuer_account_name` - used to transfer the asset to a new user
# uint32_t `required_sigs` - number of signatures from the authority required to control this asset record
# address_list `authority` - owner keys that control this asset record
# return_type: `transaction_record`
asset_update: (symbol, name, description, public_data, maximum_share_supply, precision, issuer_transaction_fee, flags, issuer_permissions, issuer_account_name, required_sigs, authority, error_handler = null) ->
@rpc.request('wallet_asset_update', [symbol, name, description, public_data, maximum_share_supply, precision, issuer_transaction_fee, flags, issuer_permissions, issuer_account_name, required_sigs, authority], error_handler).then (response) ->
response.result
# Issues new shares of a given asset type
# parameters:
# real_amount `amount` - the amount of shares to issue
# asset_symbol `symbol` - the ticker symbol for asset
# account_name `to_account_name` - the name of the account to receive the shares
# string `memo_message` - the memo to send to the receiver
# return_type: `transaction_record`
asset_issue: (amount, symbol, to_account_name, memo_message, error_handler = null) ->
@rpc.request('wallet_asset_issue', [amount, symbol, to_account_name, memo_message], error_handler).then (response) ->
response.result
# Lists the total asset balances for all open escrows
# parameters:
# account_name `account_name` - the account to get a escrow summary for, or leave empty for all accounts
# return_type: `escrow_summary_array`
escrow_summary: (account_name, error_handler = null) ->
@rpc.request('wallet_escrow_summary', [account_name], error_handler).then (response) ->
response.result
# Lists the total asset balances for the specified account
# parameters:
# account_name `account_name` - the account to get a balance for, or leave empty for all accounts
# return_type: `account_balance_summary_type`
account_balance: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance', [account_name], error_handler).then (response) ->
response.result
# Lists the total asset balances across all withdraw condition types for the specified account
# parameters:
# account_name `account_name` - the account to get a balance for, or leave empty for all accounts
# return_type: `account_extended_balance_type`
account_balance_extended: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance_extended', [account_name], error_handler).then (response) ->
response.result
# Lists the balance record ids for the specified account
# parameters:
# account_name `account_name` - the account to list balance ids for, or leave empty for all accounts
# return_type: `account_balance_id_summary_type`
account_balance_ids: (account_name, error_handler = null) ->
@rpc.request('wallet_account_balance_ids', [account_name], error_handler).then (response) ->
response.result
# Lists the total accumulated yield for asset balances
# parameters:
# account_name `account_name` - the account to get yield for, or leave empty for all accounts
# return_type: `account_balance_summary_type`
account_yield: (account_name, error_handler = null) ->
@rpc.request('wallet_account_yield', [account_name], error_handler).then (response) ->
response.result
# Lists all public keys in this account
# parameters:
# account_name `account_name` - the account for which public keys should be listed
# return_type: `public_key_summary_array`
account_list_public_keys: (account_name, error_handler = null) ->
@rpc.request('wallet_account_list_public_keys', [account_name], error_handler).then (response) ->
response.result
# Used to transfer some of the delegate's pay from their balance
# parameters:
# account_name `delegate_name` - the delegate whose pay is being cashed out
# account_name `to_account_name` - the account that should receive the funds
# real_amount `amount_to_withdraw` - the amount to withdraw
# return_type: `transaction_record`
delegate_withdraw_pay: (delegate_name, to_account_name, amount_to_withdraw, error_handler = null) ->
@rpc.request('wallet_delegate_withdraw_pay', [delegate_name, to_account_name, amount_to_withdraw], error_handler).then (response) ->
response.result
# Set the fee to add to new transactions
# parameters:
# real_amount `fee` - the wallet transaction fee to set
# return_type: `asset`
set_transaction_fee: (fee, error_handler = null) ->
@rpc.request('wallet_set_transaction_fee', [fee], error_handler).then (response) ->
response.result
# Returns
# parameters:
# asset_symbol `symbol` - the wallet transaction if paid in the given asset type
# return_type: `asset`
get_transaction_fee: (symbol, error_handler = null) ->
@rpc.request('wallet_get_transaction_fee', [symbol], error_handler).then (response) ->
response.result
# Used to place a request to buy a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the bid
# string `quantity` - the quantity of items you would like to buy
# asset_symbol `quantity_symbol` - the type of items you would like to buy
# string `base_price` - the price you would like to pay
# asset_symbol `base_symbol` - the type of asset you would like to pay with
# bool `allow_stupid_bid` - Allow user to place bid at more than 5% above the current sell price.
# return_type: `transaction_record`
market_submit_bid: (from_account_name, quantity, quantity_symbol, base_price, base_symbol, allow_stupid_bid, error_handler = null) ->
@rpc.request('wallet_market_submit_bid', [from_account_name, quantity, quantity_symbol, base_price, base_symbol, allow_stupid_bid], error_handler).then (response) ->
response.result
# Used to place a request to buy a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the bid
# string `quantity` - the quantity of items you would like to buy
# asset_symbol `quantity_symbol` - the type of items you would like to buy
# string `relative_price` - the price relative to the feed you would like to pay
# asset_symbol `base_symbol` - the type of asset you would like to pay with
# string `limit_price` - the limit on what you are willing to pay
# return_type: `transaction_record`
market_submit_relative_bid: (from_account_name, quantity, quantity_symbol, relative_price, base_symbol, limit_price, error_handler = null) ->
@rpc.request('wallet_market_submit_relative_bid', [from_account_name, quantity, quantity_symbol, relative_price, base_symbol, limit_price], error_handler).then (response) ->
response.result
# Used to place a request to sell a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `sell_quantity` - the quantity of items you would like to sell
# asset_symbol `sell_quantity_symbol` - the type of items you would like to sell
# string `ask_price` - the price per unit sold.
# asset_symbol `ask_price_symbol` - the type of asset you would like to be paid
# bool `allow_stupid_ask` - Allow user to place ask at more than 5% below the current buy price.
# return_type: `transaction_record`
market_submit_ask: (from_account_name, sell_quantity, sell_quantity_symbol, ask_price, ask_price_symbol, allow_stupid_ask, error_handler = null) ->
@rpc.request('wallet_market_submit_ask', [from_account_name, sell_quantity, sell_quantity_symbol, ask_price, ask_price_symbol, allow_stupid_ask], error_handler).then (response) ->
response.result
# Used to place a request to sell a quantity of assets at a price specified in another asset
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `sell_quantity` - the quantity of items you would like to sell
# asset_symbol `sell_quantity_symbol` - the type of items you would like to sell
# string `relative_ask_price` - the relative price per unit sold.
# asset_symbol `ask_price_symbol` - the type of asset you would like to be paid
# string `limit_ask_price` - the minimum price per unit sold.
# return_type: `transaction_record`
market_submit_relative_ask: (from_account_name, sell_quantity, sell_quantity_symbol, relative_ask_price, ask_price_symbol, limit_ask_price, error_handler = null) ->
@rpc.request('wallet_market_submit_relative_ask', [from_account_name, sell_quantity, sell_quantity_symbol, relative_ask_price, ask_price_symbol, limit_ask_price], error_handler).then (response) ->
response.result
# Used to place a request to short sell a quantity of assets at a price specified
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `short_collateral` - the amount of collateral you wish to fund this short with
# asset_symbol `collateral_symbol` - the type of asset collateralizing this short (i.e. XTS)
# string `interest_rate` - the APR you wish to pay interest at (0.0% to 1000.0%)
# asset_symbol `quote_symbol` - the asset to short sell (i.e. USD)
# string `short_price_limit` - maximim price (USD per XTS) that the short will execute at, if 0 then no limit will be applied
# return_type: `transaction_record`
market_submit_short: (from_account_name, short_collateral, collateral_symbol, interest_rate, quote_symbol, short_price_limit, error_handler = null) ->
@rpc.request('wallet_market_submit_short', [from_account_name, short_collateral, collateral_symbol, interest_rate, quote_symbol, short_price_limit], error_handler).then (response) ->
response.result
# Used to place a request to cover an existing short position
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# string `quantity` - the quantity of asset you would like to cover
# asset_symbol `quantity_symbol` - the type of asset you are covering (ie: USD)
# order_id `cover_id` - the order ID you would like to cover
# return_type: `transaction_record`
market_cover: (from_account_name, quantity, quantity_symbol, cover_id, error_handler = null) ->
@rpc.request('wallet_market_cover', [from_account_name, quantity, quantity_symbol, cover_id], error_handler).then (response) ->
response.result
# Cancel and/or create many market orders in a single transaction.
# parameters:
# order_ids `cancel_order_ids` - Order IDs of all market orders to cancel in this transaction.
# order_descriptions `new_orders` - Descriptions of all new orders to create in this transaction.
# bool `sign` - True if transaction should be signed and broadcast (if possible), false otherwse.
# return_type: `transaction_record`
market_batch_update: (cancel_order_ids, new_orders, sign, error_handler = null) ->
@rpc.request('wallet_market_batch_update', [cancel_order_ids, new_orders, sign], error_handler).then (response) ->
response.result
# Add collateral to a short position
# parameters:
# account_name `from_account_name` - the account that will provide funds for the ask
# order_id `cover_id` - the ID of the order to recollateralize
# string `real_quantity_collateral_to_add` - the quantity of collateral of the base asset to add to the specified position
# return_type: `transaction_record`
market_add_collateral: (from_account_name, cover_id, real_quantity_collateral_to_add, error_handler = null) ->
@rpc.request('wallet_market_add_collateral', [from_account_name, cover_id, real_quantity_collateral_to_add], error_handler).then (response) ->
response.result
# List an order list of a specific market
# parameters:
# asset_symbol `base_symbol` - the base symbol of the market
# asset_symbol `quote_symbol` - the quote symbol of the market
# uint32_t `limit` - the maximum number of items to return
# account_name `account_name` - the account for which to get the orders, or empty for all accounts
# return_type: `market_order_map`
market_order_list: (base_symbol, quote_symbol, limit, account_name, error_handler = null) ->
@rpc.request('wallet_market_order_list', [base_symbol, quote_symbol, limit, account_name], error_handler).then (response) ->
response.result
# List an order list of a specific account
# parameters:
# account_name `account_name` - the account for which to get the orders, or empty for all accounts
# uint32_t `limit` - the maximum number of items to return
# return_type: `market_order_map`
account_order_list: (account_name, limit, error_handler = null) ->
@rpc.request('wallet_account_order_list', [account_name, limit], error_handler).then (response) ->
response.result
# Cancel an order: deprecated - use wallet_market_cancel_orders
# parameters:
# order_id `order_id` - the ID of the order to cancel
# return_type: `transaction_record`
market_cancel_order: (order_id, error_handler = null) ->
@rpc.request('wallet_market_cancel_order', [order_id], error_handler).then (response) ->
response.result
# Cancel more than one order at a time
# parameters:
# order_ids `order_ids` - the IDs of the orders to cancel
# return_type: `transaction_record`
market_cancel_orders: (order_ids, error_handler = null) ->
@rpc.request('wallet_market_cancel_orders', [order_ids], error_handler).then (response) ->
response.result
# Reveals the private key corresponding to an account, public key, or address
# parameters:
# string `input` - an account name, public key, or address (quoted hash of public key)
# return_type: `string`
dump_private_key: (input, error_handler = null) ->
@rpc.request('wallet_dump_private_key', [input], error_handler).then (response) ->
response.result
# Returns the allocation of votes by this account
# parameters:
# account_name `account_name` - the account to report votes on, or empty for all accounts
# return_type: `account_vote_summary`
account_vote_summary: (account_name, error_handler = null) ->
@rpc.request('wallet_account_vote_summary', [account_name], error_handler).then (response) ->
response.result
# Check how much this account is utilizing its voting power
# parameters:
# account_name `account` -
# return_type: `vote_summary`
check_vote_proportion: (account, error_handler = null) ->
@rpc.request('wallet_check_vote_proportion', [account], error_handler).then (response) ->
response.result
# Set a property in the GUI settings DB
# parameters:
# string `name` - the name of the setting to set
# variant `value` - the value to set the setting to
# return_type: `void`
set_setting: (name, value, error_handler = null) ->
@rpc.request('wallet_set_setting', [name, value], error_handler).then (response) ->
response.result
# Get the value of the given setting
# parameters:
# string `name` - The name of the setting to fetch
# return_type: `optional_variant`
get_setting: (name, error_handler = null) ->
@rpc.request('wallet_get_setting', [name], error_handler).then (response) ->
response.result
# Enable or disable block production for a particular delegate account
# parameters:
# string `delegate_name` - The delegate to enable/disable block production for; ALL for all delegate accounts
# bool `enabled` - true to enable block production, false otherwise
# return_type: `void`
delegate_set_block_production: (delegate_name, enabled, error_handler = null) ->
@rpc.request('wallet_delegate_set_block_production', [delegate_name, enabled], error_handler).then (response) ->
response.result
# Enable or disable wallet transaction scanning
# parameters:
# bool `enabled` - true to enable transaction scanning, false otherwise
# return_type: `bool`
set_transaction_scanning: (enabled, error_handler = null) ->
@rpc.request('wallet_set_transaction_scanning', [enabled], error_handler).then (response) ->
response.result
# Signs the provided message digest with the account key
# parameters:
# string `signer` - A public key, address, or account name whose key to sign with
# sha256 `hash` - SHA256 digest of the message to sign
# return_type: `compact_signature`
sign_hash: (signer, hash, error_handler = null) ->
@rpc.request('wallet_sign_hash', [signer, hash], error_handler).then (response) ->
response.result
# Initiates the login procedure by providing a BitShares Login URL
# parameters:
# string `server_account` - Name of the account of the server. The user will be shown this name as the site he is logging into.
# return_type: `string`
login_start: (server_account, error_handler = null) ->
@rpc.request('wallet_login_start', [server_account], error_handler).then (response) ->
response.result
# Completes the login procedure by finding the user's public account key and shared secret
# parameters:
# public_key `server_key` - The one-time public key from wallet_login_start.
# public_key `client_key` - The client's one-time public key.
# compact_signature `client_signature` - The client's signature of the shared secret.
# return_type: `variant`
login_finish: (server_key, client_key, client_signature, error_handler = null) ->
@rpc.request('wallet_login_finish', [server_key, client_key, client_signature], error_handler).then (response) ->
response.result
# Set this balance's voting address and slate
# parameters:
# address `balance_id` - the current name of the account
# string `voter_address` - The new voting address. If none is specified, tries to re-use existing address.
# vote_selection_method `vote_method` - enumeration [vote_none | vote_all | vote_random | vote_recommended]
# bool `sign_and_broadcast` -
# string `builder_path` - If specified, will write builder here instead of to DATA_DIR/transactions/latest.trx
# return_type: `transaction_builder`
balance_set_vote_info: (balance_id, voter_address, vote_method, sign_and_broadcast, builder_path, error_handler = null) ->
@rpc.request('wallet_balance_set_vote_info', [balance_id, voter_address, vote_method, sign_and_broadcast, builder_path], error_handler).then (response) ->
response.result
# Publishes the current wallet delegate slate to the public data associated with the account
# parameters:
# account_name `publishing_account_name` - The account to publish the slate ID under
# account_name `paying_account_name` - The account to pay transaction fees or leave empty to pay with publishing account
# return_type: `transaction_record`
publish_slate: (publishing_account_name, paying_account_name, error_handler = null) ->
@rpc.request('wallet_publish_slate', [publishing_account_name, paying_account_name], error_handler).then (response) ->
response.result
# Publish your current client version to the specified account's public data record
# parameters:
# account_name `publishing_account_name` - The account to publish the client version under
# account_name `paying_account_name` - The account to pay transaction fees with or leave empty to pay with publishing account
# return_type: `transaction_record`
publish_version: (publishing_account_name, paying_account_name, error_handler = null) ->
@rpc.request('wallet_publish_version', [publishing_account_name, paying_account_name], error_handler).then (response) ->
response.result
# Collect specified account's genesis balances
# parameters:
# account_name `account_name` - account to collect genesis balances for
# return_type: `transaction_record`
collect_genesis_balances: (account_name, error_handler = null) ->
@rpc.request('wallet_collect_genesis_balances', [account_name], error_handler).then (response) ->
response.result
# Collect specified account's vested balances
# parameters:
# account_name `account_name` - account to collect vested balances for
# return_type: `transaction_record`
collect_vested_balances: (account_name, error_handler = null) ->
@rpc.request('wallet_collect_vested_balances', [account_name], error_handler).then (response) ->
response.result
# Update a delegate's block signing and feed publishing key
# parameters:
# account_name `authorizing_account_name` - The account that will authorize changing the block signing key
# account_name `delegate_name` - The delegate account which will have its block signing key changed
# public_key `signing_key` - The new key that will be used for block signing
# return_type: `transaction_record`
delegate_update_signing_key: (authorizing_account_name, delegate_name, signing_key, error_handler = null) ->
@rpc.request('wallet_delegate_update_signing_key', [authorizing_account_name, delegate_name, signing_key], error_handler).then (response) ->
response.result
# Attempts to recover accounts created after last backup was taken and returns number of successful recoveries. Use if you have restored from backup and are missing accounts.
# parameters:
# int32_t `accounts_to_recover` - The number of accounts to attept to recover
# int32_t `maximum_number_of_attempts` - The maximum number of keys to generate trying to recover accounts
# return_type: `int32_t`
recover_accounts: (accounts_to_recover, maximum_number_of_attempts, error_handler = null) ->
@rpc.request('wallet_recover_accounts', [accounts_to_recover, maximum_number_of_attempts], error_handler).then (response) ->
response.result
# Attempts to recover any missing recipient and memo information for the specified transaction
# parameters:
# string `transaction_id_prefix` - the id (or id prefix) of the transaction record
# string `recipient_account` - the account name of the recipient (if known)
# return_type: `transaction_record`
recover_transaction: (transaction_id_prefix, recipient_account, error_handler = null) ->
@rpc.request('wallet_recover_transaction', [transaction_id_prefix, recipient_account], error_handler).then (response) ->
response.result
# Verify whether the specified transaction made a TITAN deposit to the current wallet; returns null if not
# parameters:
# string `transaction_id_prefix` - the id (or id prefix) of the transaction record
# return_type: `optional_variant_object`
verify_titan_deposit: (transaction_id_prefix, error_handler = null) ->
@rpc.request('wallet_verify_titan_deposit', [transaction_id_prefix], error_handler).then (response) ->
response.result
# publishes a price feed for BitAssets, only active delegates may do this
# parameters:
# account_name `delegate_account` - the delegate to publish the price under
# real_amount `price` - the number of this asset per XTS
# asset_symbol `asset_symbol` - the type of asset being priced
# return_type: `transaction_record`
publish_price_feed: (delegate_account, price, asset_symbol, error_handler = null) ->
@rpc.request('wallet_publish_price_feed', [delegate_account, price, asset_symbol], error_handler).then (response) ->
response.result
# publishes a set of feeds for BitAssets, only active delegates may do this
# parameters:
# account_name `delegate_account` - the delegate to publish the price under
# price_map `symbol_to_price_map` - maps the BitAsset symbol to its price per share
# return_type: `transaction_record`
publish_feeds: (delegate_account, symbol_to_price_map, error_handler = null) ->
@rpc.request('wallet_publish_feeds', [delegate_account, symbol_to_price_map], error_handler).then (response) ->
response.result
# publishes a set of feeds for BitAssets for all active delegates, most useful for testnets
# parameters:
# price_map `symbol_to_price_map` - maps the BitAsset symbol to its price per share
# return_type: `vector<std::pair<string, wallet_transaction_record>>`
publish_feeds_multi_experimental: (symbol_to_price_map, error_handler = null) ->
@rpc.request('wallet_publish_feeds_multi_experimental', [symbol_to_price_map], error_handler).then (response) ->
response.result
# tries to repair any inconsistent wallet account, key, and transaction records
# parameters:
# account_name `collecting_account_name` - collect any orphan balances into this account
# return_type: `void`
repair_records: (collecting_account_name, error_handler = null) ->
@rpc.request('wallet_repair_records', [collecting_account_name], error_handler).then (response) ->
response.result
# regenerates private keys as part of wallet recovery
# parameters:
# account_name `account_name` - the account the generated keys should be a part of
# uint32_t `max_key_number` - the last key number to regenerate
# return_type: `int32_t`
regenerate_keys: (account_name, max_key_number, error_handler = null) ->
@rpc.request('wallet_regenerate_keys', [account_name, max_key_number], error_handler).then (response) ->
response.result
# Creates a new mail message and returns the unencrypted message.
# parameters:
# string `sender` - The name of the message's sender.
# string `subject` - The subject of the message.
# string `body` - The body of the message.
# message_id `reply_to` - The ID of the message this is in reply to.
# return_type: `message`
mail_create: (sender, subject, body, reply_to, error_handler = null) ->
@rpc.request('wallet_mail_create', [sender, subject, body, reply_to], error_handler).then (response) ->
response.result
# Encrypts a mail message and returns the encrypted message.
# parameters:
# string `recipient` - The name of the message's recipient.
# message `plaintext` - The plaintext message, such as from wallet_mail_create.
# return_type: `message`
mail_encrypt: (recipient, plaintext, error_handler = null) ->
@rpc.request('wallet_mail_encrypt', [recipient, plaintext], error_handler).then (response) ->
response.result
# Opens an encrypted mail message.
# parameters:
# address `recipient` - The address of the message's recipient.
# message `ciphertext` - The encrypted message.
# return_type: `message`
mail_open: (recipient, ciphertext, error_handler = null) ->
@rpc.request('wallet_mail_open', [recipient, ciphertext], error_handler).then (response) ->
response.result
# Sets the list of mail servers an account checks for his mail.
# parameters:
# string `account_name` - The name of the account whose mail servers should be updated.
# string_array `server_list` - A list of names of blockchain accounts who run mail servers.
# string `paying_account` - The name of the account to pay the transaction fee, if different from account_name.
# return_type: `void`
set_preferred_mail_servers: (account_name, server_list, paying_account, error_handler = null) ->
@rpc.request('wallet_set_preferred_mail_servers', [account_name, server_list, paying_account], error_handler).then (response) ->
response.result
# Retract (permanently disable) the specified account in case of master key compromise.
# parameters:
# account_name `account_to_retract` - The name of the account to retract.
# account_name `pay_from_account` - The account from which fees will be paid.
# return_type: `transaction_record`
account_retract: (account_to_retract, pay_from_account, error_handler = null) ->
@rpc.request('wallet_account_retract', [account_to_retract, pay_from_account], error_handler).then (response) ->
response.result
# Generates a human friendly brain wallet key starting with a public salt as the last word
# parameters:
# return_type: `string`
generate_brain_seed: (error_handler = null) ->
@rpc.request('wallet_generate_brain_seed', error_handler).then (response) ->
response.result
angular.module("app").service("WalletAPI", ["$q", "$log", "RpcService", "$interval", WalletAPI])
|
[
{
"context": "s.push new AllpassFilter a+stereospread,.5\n\n #@reflections = []\n #@reflections.push new Reflection 7830,.",
"end": 26405,
"score": 0.7719463109970093,
"start": 26394,
"tag": "USERNAME",
"value": "reflections"
},
{
"context": "r a+stereospread,.5\n\n #@reflec... | static/js/sound/synth/audioengine.coffee | HomineLudens/microstudio | 511 | `
const TWOPI = 2*Math.PI
const SIN_TABLE = new Float64Array(10001)
const WHITE_NOISE = new Float64Array(100000)
const COLORED_NOISE = new Float64Array(100000)
`
do ->
for i in [0..10000] by 1
SIN_TABLE[i] = Math.sin(i/10000*Math.PI*2)
`
const BLIP_SIZE = 512
const BLIP = new Float64Array(BLIP_SIZE+1)
`
do ->
for p in [1..31] by 2
for i in [0..BLIP_SIZE] by 1
x = (i/BLIP_SIZE-.5)*.5
BLIP[i] += Math.sin(x*2*Math.PI*p)/p
norm = BLIP[BLIP_SIZE]
for i in [0..BLIP_SIZE] by 1
BLIP[i] /= norm
do ->
n = 0
b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0
for i in [0..99999] by 1
white = Math.random()*2-1
n = .99*n+.01*white
pink = n*6
WHITE_NOISE[i] = white
COLORED_NOISE[i] = pink
DBSCALE = (value,range)-> (Math.exp(value*range)/Math.exp(range)-1/Math.exp(range))/(1-1/Math.exp(range))
class SquareOscillator
constructor:(@voice,osc)->
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@buffer = new Float64Array(32)
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then 0 else Math.random()
@sig = -1
@index = 0
@update(osc)
for i in [0..@buffer.length-1]
@buffer[i] = 0
return
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@analog_tune = @tune
process:(freq,mod)->
dp = @analog_tune*freq*@invSampleRate
@phase += dp
m = .5-mod*.49
avg = 1-2*m
if @sig<0
if @phase>=m
@sig = 1
dp = Math.max(0,Math.min(1,(@phase-m)/dp))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += -1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
else
if @phase>=1
dp = Math.max(0,Math.min(1,(@phase-1)/dp))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
@sig = -1
@phase -= 1
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += 1-BLIP[dpi]*(1-a)-BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
sig = @sig+@buffer[@index]
@buffer[@index] = 0
@index = (@index+1)%@buffer.length
sig-avg
class SawOscillator
constructor:(@voice,osc)->
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@buffer = new Float64Array(32)
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then 0 else Math.random()
@sig = -1
@index = 0
@jumped = false
@update(osc)
for i in [0..@buffer.length-1]
@buffer[i] = 0
return
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@analog_tune = @tune
process:(freq,mod)->
dphase = @analog_tune*freq*@invSampleRate
@phase += dphase
#if @phase>=1
# @phase -= 1
#return 1-2*@phase
slope = 1+mod
if not @jumped
sig = 1-2*@phase*slope
if @phase>=.5
@jumped = true
sig = mod-2*(@phase-.5)*slope
dp = Math.max(0,Math.min(1,(@phase-.5)/dphase))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
if mod>0
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += (-1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a)*mod
dpi += 16
index = (index+1)%@buffer.length
else
sig = mod-2*(@phase-.5)*slope
if @phase>=1
@jumped = false
dp = Math.max(0,Math.min(1,(@phase-1)/dphase))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
@phase -= 1
sig = 1-2*@phase*slope
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += -1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
sig += @buffer[@index]
@buffer[@index] = 0
@index = (@index+1)%@buffer.length
offset = 16*2*dphase*slope
sig+offset
class SineOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@maxRatio = @sampleRate/Math.PI/5/(2*Math.PI)
@tune = 1
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then .25 else Math.random()
@update(osc)
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@modnorm = 25/Math.sqrt(@tune*@voice.freq)
@dphase = @tune*@invSampleRate
sinWave:(x)->
x = (x-Math.floor(x))*10000
ix = Math.floor(x)
ax = x-ix
SIN_TABLE[ix]*(1-ax)+SIN_TABLE[ix+1]*ax
sinWave2:(x)->
x = 2*(x-Math.floor(x))
if x>1
x = 2-x
x*x*(3-2*x)*2-1
process:(freq,mod)->
@phase = (@phase+freq*@dphase)
if @phase>=1
@phase -= 1
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
@dphase = @analog_tune*@invSampleRate
m1 = mod*@modnorm
m2 = mod*m1
p = @phase
return @sinWave2(p+m1*@sinWave2(p+m2*@sinWave2(p)))
class VoiceOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@init(osc) if osc?
@f1 = [320,500,700,1000,500,320,700,500,320,320]
@f2 = [800,1000,1150,1400,1500,1650,1800,2300,3200,3200]
init:(osc)->
@phase = 0
@grain1_p1 = .25
@grain1_p2 = .25
@grain2_p1 = .25
@grain2_p2 = .25
@update(osc)
update:(osc)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@modnorm = 25/Math.sqrt(@tune*@voice.freq)
@dphase = @tune*@invSampleRate
sinWave2:(x)->
x = 2*(x-Math.floor(x))
if x>1
x = 2-x
x*x*(3-2*x)*2-1
process:(freq,mod)->
p1 = @phase<1
@phase = (@phase+freq*@dphase)
m = mod*(@f1.length-2)
im = Math.floor(m)
am = m-im
f1 = @f1[im]*(1-am)+@f1[im+1]*am
f2 = @f2[im]*(1-am)+@f2[im+1]*am
if p1 and @phase>=1
@grain2_p1 = .25
@grain2_p2 = .25
if @phase>=2
@phase -= 2
@grain1_p1 = .25
@grain1_p2 = .25
x = @phase-1
x *= x*x
vol = 1-Math.abs(1-@phase)
#vol = (@sinWave2(x*.5+.5)+1)*.5
sig = vol*(@sinWave2(@grain1_p1)*.25+.125*@sinWave2(@grain1_p2))
@grain1_p1 += f1*@invSampleRate
@grain1_p2 += f2*@invSampleRate
x = ((@phase+1)%2)-1
x *= x*x
#vol = (@sinWave2(x*.5+.5)+1)*.5
vol = if @phase<1 then 1-@phase else @phase-1
sig += vol*(@sinWave2(@grain2_p1)*.25+.125*@sinWave2(@grain2_p2))
sig += @sinWave2(@phase+.25)
@grain2_p1 += f1*@invSampleRate
@grain2_p2 += f2*@invSampleRate
sig
class StringOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@maxRatio = @sampleRate/Math.PI/5/(2*Math.PI)
@tune = 1
@init(osc) if osc?
init:(osc)->
@index = 0
@buffer = new Float64Array(@sampleRate/10)
@update(osc)
@prev = 0
@power = 1
update:(osc)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
process:(freq,mod)->
period = @sampleRate/(freq*@tune)
x = (@index-period+@buffer.length)%@buffer.length
ix = Math.floor(x)
a = x-ix
reflection = @buffer[ix]*(1-a)+@buffer[(ix+1)%@buffer.length]*a
m = Math.exp(Math.log(0.99)/freq)
m = 1
r = reflection*m+@prev*(1-m)
@prev = r
n = Math.random()*2-1
m = mod*.5
m = m*m*.999+.001
sig = n*m+r
@power = Math.max(Math.abs(sig)*.1+@power*.9,@power*.999)
sig /= (@power+.0001)
@buffer[@index] = sig
@index += 1
@index = 0 if @index>=@buffer.length
sig
processOld:(freq,mod)->
period = @sampleRate/(freq*@tune)
x = (@index-period+@buffer.length)%@buffer.length
ix = Math.floor(x)
a = x-ix
reflection = @buffer[ix]*(1-a)+@buffer[(ix+1)%@buffer.length]*a
#m = .1+Math.exp(-period*mod*.01)*.89
m = mod
r = reflection*m+@prev*(1-m)
@prev = r
sig = (Math.random()*2-1)*.01+r*.99
@buffer[@index] = sig/@power
@power = Math.abs(sig)*.001+@power*.999
@index += 1
@index = 0 if @index>=@buffer.length
sig
class Noise
constructor:(@voice)->
init:()->
@phase = 0
@seed = 1382
@n = 0
process:(mod)->
@seed = (@seed*13907+12345)&0x7FFFFFFF
white = (@seed/0x80000000)*2-1
@n = @n*.99+white*.01
pink = @n*6
white*(1-mod)+pink*mod
class ModEnvelope
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
init:(@params)->
@phase = 0
@update()
update:(a = @params.a)->
@a = 1/(@sampleRate*20*Math.pow(a,3)+1)
@d = 1/(@sampleRate*20*Math.pow(@params.d,3)+1)
@s = @params.s
@r = 1/(@sampleRate*20*Math.pow(@params.r,3)+1)
process:(noteon)->
if @phase<1
sig = @sig = @phase = Math.min(1,@phase+@a)
else if @phase<2
@phase = Math.min(2,@phase+@d)
sig = @sig = 1-(@phase-1)*(1-@s)
else if @phase<3
sig = @sig = @s
else
@phase = @phase+@r
sig = Math.max(0,@sig*(1-(@phase-3)))
if @phase<3 and not noteon
@phase = 3
sig
class AmpEnvelope
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
@sig = 0
init:(@params)->
@phase = 0
@sig = 0
@update()
update:(a = @params.a)->
#@a = Math.exp(Math.log(1/@epsilon)/(@sampleRate*10*Math.pow(@params.a,3)+1))
@a2 = 1/(@sampleRate*(20*Math.pow(a,3)+.00025))
@d = Math.exp(Math.log(0.5)/(@sampleRate*(10*Math.pow(@params.d,3)+0.001)))
@s = DBSCALE(@params.s,2)
console.info("sustain #{@s}")
@r = Math.exp(Math.log(0.5)/(@sampleRate*(10*Math.pow(@params.r,3)+0.001)))
process:(noteon)->
if @phase<1
@phase += @a2
sig = @sig = (@phase*.75+.25)*@phase
if @phase>=1
sig = @sig = 1
@phase = 1
else if @phase==1
sig = @sig = @sig*@d
if sig <= @s
sig = @sig = @s
@phase = 2
else if @phase<3
sig = @sig = @s
else
sig = @sig = @sig*@r
if @phase<3 and not noteon
@phase = 3
sig
class LFO
constructor:(@voice)->
@invSampleRate = 1/@voice.engine.sampleRate
init:(@params,sync)->
if sync
rate = @params.rate
rate = .1+rate*rate*rate*(100-.1)
t = @voice.engine.getTime()*rate
@phase = t%1
else
@phase = Math.random()
@process = @processSine
@update()
@r1 = Math.random()*2-1
@r2 = Math.random()*2-1
@out = 0
update:()->
switch @params.type
when 0 then @process = @processSaw
when 1 then @process = @processSquare
when 2 then @process = @processSine
when 3 then @process = @processTriangle
when 4 then @process = @processRandom
when 5 then @process = @processRandomStep
@audio_freq = 440*Math.pow(Math.pow(2,1/12),@voice.key-57)
processSine:(rate)->
if @params.audio
r = if rate<.5 then .25+rate*rate/.25*.75 else rate*rate*4
#r = Math.pow(2,(Math.round(rate*48))/12)*.25
rate = @audio_freq*r
else
rate = .01+rate*rate*20
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
p = @phase*2
if p<1
p*p*(3-2*p)*2-1
else
p -= 1
1-p*p*(3-2*p)*2
processTriangle:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
return (1-4*Math.abs(@phase-.5))
processSaw:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
out = (1-@phase*2)
@out = @out*.97+out*.03
processSquare:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
out = if @phase<.5 then 1 else -1
@out = @out*.97+out*.03
processRandom:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
if @phase>=1
@phase -= 1
@r1 = @r2
@r2 = Math.random()*2-1
@r1*(1-@phase)+@r2*@phase
processRandomStep:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
if @phase>=1
@phase -= 1
@r1 = Math.random()*2-1
@out = @out*.97+@r1*.03
processDiscreteRandom:()->
processSmoothRandom:()->
class Filter
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@halfSampleRate = @sampleRate*.5
init:(@layer)->
@fm00 = 0
@fm01 = 0
@fm10 = 0
@fm11 = 0
@update()
update:()->
switch @layer.inputs.filter.type
when 0
@process = @processLowPass
when 1
@process = @processBandPass
when 2
@process = @processHighPass
processHighPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
onePlusCosw0 = 1 + cosw0
b0 = onePlusCosw0*.5* invOnePlusAlpha
b1 = -onePlusCosw0* invOnePlusAlpha
b2 = b0
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
processBandPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
b0 = q * alpha * invOnePlusAlpha
b1 = 0
b2 = -b0
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
processLowPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
b1 = oneLessCosw0 * invOnePlusAlpha
b0 = b1*.5
b2 = b0
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
class Distortion
constructor:()->
@amount = .5
@rate = .5
update:(data)->
@amount = data.amount
@rate = data.rate
process:(buffer,length)->
for i in [0..length-1] by 1
sig = buffer[0][i]
s = sig*(1+@rate*@rate*99)
disto = if s<0 then -1+Math.exp(s) else 1-Math.exp(-s)
buffer[0][i] = (1-@amount)*sig+@amount*disto
sig = buffer[1][i]
s = sig*(1+@rate*@rate*99)
disto = if s<0 then -1+Math.exp(s) else 1-Math.exp(-s)
buffer[1][i] = (1-@amount)*sig+@amount*disto
return
class BitCrusher
constructor:()->
@phase = 0
@left = 0
@right = 0
update:(data)->
@amount = Math.pow(2,data.amount*8)
@rate = Math.pow(2,(1-data.rate)*16)*2
process:(buffer,length)->
r = 1-@rate
crush = 1+15*r*r
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase += 1
if @phase>@amount
@phase -= @amount
@left = if left>0 then Math.ceil(left*@rate)/@rate else Math.floor(left*@rate)/@rate
@right = if right>0 then Math.ceil(right*@rate)/@rate else Math.floor(right*@rate)/@rate
buffer[0][i] = @left
buffer[1][i] = @right
return
class Chorus
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = Math.random()
@phase2 = Math.random()
@phase3 = Math.random()
@f1 = 1.031/@sampleRate
@f2 = 1.2713/@sampleRate
@f3 = 0.9317/@sampleRate
@index = 0
update:(data)->
@amount = Math.pow(data.amount,.5)
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = @left_buffer[@index] = buffer[0][i]
right = @right_buffer[@index] = buffer[1][i]
@phase1 += @f1*(.5+.5*@rate)
@phase2 += @f2*(.5+.5*@rate)
@phase3 += @f3*(.5+.5*@rate)
p1 = (1+Math.sin(@phase1*Math.PI*2))*@left_buffer.length*.002
p2 = (1+Math.sin(@phase2*Math.PI*2))*@left_buffer.length*.002
p3 = (1+Math.sin(@phase3*Math.PI*2))*@left_buffer.length*.002
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@phase3 -= 1 if @phase3>=1
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
s3 = @read(@right_buffer,@index-p3)
pleft = @amount*(s1*.2+s2*.7+s3*.1)
pright = @amount*(s1*.6+s2*.2+s3*.2)
left += pleft
right += pright
@left_buffer[@index] += pleft*.5*@amount
@right_buffer[@index] += pleft*.5*@amount
@index += 1
@index = 0 if @index>=@left_buffer.length
buffer[0][i] = left
buffer[1][i] = right
return
class Phaser
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = Math.random()
@phase2 = Math.random()
@f1 = .0573/@sampleRate
@f2 = .0497/@sampleRate
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase1 += @f1*(.5+.5*@rate)
@phase2 += @f2*(.5+.5*@rate)
o1 = (1+Math.sin(@phase1*Math.PI*2))/2
p1 = @sampleRate*(.0001+.05*o1)
o2 = (1+Math.sin(@phase2*Math.PI*2))/2
p2 = @sampleRate*(.0001+.05*o2)
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@left_buffer[@index] = left
@right_buffer[@index] = right
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
@left_buffer[@index] += s1*@rate*.9
@right_buffer[@index] += s2*@rate*.9
buffer[0][i] = s1*@amount-left
buffer[1][i] = s2*@amount-right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Flanger
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = 0 #Math.random()
@phase2 = 0 #Math.random()
@f1 = .0573/@sampleRate
@f2 = .0497/@sampleRate
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase1 += @f1
@phase2 += @f2
o1 = (1+Math.sin(@phase1*Math.PI*2))/2
p1 = @sampleRate*(.0001+.05*o1)
o2 = (1+Math.sin(@phase2*Math.PI*2))/2
p2 = @sampleRate*(.0001+.05*o2)
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@left_buffer[@index] = left #+s1*.3
@right_buffer[@index] = right #+s2*.3
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
@left_buffer[@index] += s1*@rate*.9
@right_buffer[@index] += s2*@rate*.9
buffer[0][i] = s1*@amount+left
buffer[1][i] = s2*@amount+right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Delay
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate*3)
@right_buffer = new Float64Array(@sampleRate*3)
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
tempo = (30+Math.pow(@rate,2)*170*4)*4
tick = @sampleRate/(tempo/60)
@L = Math.round(tick*4)
@R = Math.round(tick*4+@sampleRate*0.00075)
@fb = @amount*.95
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
left += @right_buffer[(@index+@left_buffer.length-@L)%@left_buffer.length]*@fb
right += @left_buffer[(@index+@right_buffer.length-@R)%@right_buffer.length]*@fb
buffer[0][i] = @left_buffer[@index] = left
buffer[1][i] = @right_buffer[@index] = right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Spatializer
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate/10)
@right_buffer = new Float64Array(@sampleRate/10)
@index = 0
@left_delay1 = 0
@left_delay2 = 0
@right_delay1 = 0
@right_delay2 = 0
@left = 0
@right = 0
@left_delay1 = Math.round(@sampleRate*9.7913/340)
@right_delay1 = Math.round(@sampleRate*11.1379/340)
@left_delay2 = Math.round(@sampleRate*11.3179/340)
@right_delay2 = Math.round(@sampleRate*12.7913/340)
process:(buffer,length,spatialize,pan)->
mnt = spatialize
mnt2 = mnt*mnt
left_pan = Math.cos(pan*Math.PI/2)/(1+spatialize)
right_pan = Math.sin(pan*Math.PI/2)/(1+spatialize)
left_buffer = buffer[0]
right_buffer = buffer[1]
for i in [0..length-1] by 1
left = left_buffer[i]
right = right_buffer[i]
@left = left*.5+@left*.5
@right = right*.5+@right*.5
@left_buffer[@index] = @left
@right_buffer[@index] = @right
left_buffer[i] = (left-mnt*@right_buffer[(@index+@right_buffer.length-@left_delay1)%@right_buffer.length])*left_pan
right_buffer[i] = (right-mnt*@left_buffer[(@index+@right_buffer.length-@right_delay1)%@right_buffer.length])*right_pan
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class EQ
constructor:(@engine)->
@sampleRate = @engine.sampleRate
# band pass for medium freqs
@mid = 900
q = .5
w0 = 2*Math.PI*@mid/@sampleRate
cosw0 = Math.cos(w0)
sinw0 = Math.sin(w0)
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
@a0 = (-2 * cosw0) * invOnePlusAlpha
@a1 = (1 - alpha) * invOnePlusAlpha
@b0 = q * alpha * invOnePlusAlpha
@b1 = 0
@b2 = -@b0
@llow = 0
@rlow = 0
@lfm0 = 0
@lfm1 = 0
@rfm0 = 0
@rfm1 = 0
@low = 1
@mid = 1
@high = 1
update:(data)->
@low = data.low*2
@mid = data.mid*2
@high = data.high*2
processBandPass:(sig,cutoff,q)->
w = sig - a0*@fm0 - a1*@fm1
sig = b0*w + b1*@fm0 + b2*@fm1
@fm1 = @fm0
@fm0 = w
sig
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
lw = left - @a0*@lfm0 - @a1*@lfm1
lmid = @b0*lw + @b1*@lfm0 + @b2*@lfm1
@lfm1 = @lfm0
@lfm0 = lw
left -= lmid
@llow = left*.1+@llow*.9
lhigh = left-@llow
buffer[0][i] = @llow*@low+lmid*@mid+lhigh*@high
rw = right - @a0*@rfm0 - @a1*@rfm1
rmid = @b0*rw + @b1*@rfm0 + @b2*@rfm1
@rfm1 = @rfm0
@rfm0 = rw
right -= rmid
@rlow = right*.1+@rlow*.9
rhigh = right-@rlow
buffer[1][i] = @rlow*@low+rmid*@mid+rhigh*@high
return
class CombFilter
constructor:(@length,@feedback = .5)->
@buffer = new Float64Array(@length)
@index = 0
@store = 0
@damp = .2
process:(input)->
output = @buffer[@index]
@store = output*(1-@damp)+@store*@damp
@buffer[@index++] = input+@store*@feedback
if @index>=@length
@index = 0
output
class AllpassFilter
constructor:(@length,@feedback = .5)->
@buffer = new Float64Array(@length)
@index = 0
process:(input)->
bufout = @buffer[@index]
output = -input+bufout
@buffer[@index++] = input+bufout*@feedback
if @index>=@length
@index = 0
output
class Reverb
constructor:(@engine)->
@sampleRate = @engine.sampleRate
combtuning = [1116,1188,1277,1356,1422,1491,1557,1617]
allpasstuning = [556,441,341,225]
stereospread = 23
@left_combs = []
@right_combs = []
@left_allpass = []
@right_allpass = []
@res = [0,0]
@spread = .25
@wet = .025
for c in combtuning
@left_combs.push new CombFilter c,.9
@right_combs.push new CombFilter c+stereospread,.9
for a in allpasstuning
@left_allpass.push new AllpassFilter a,.5
@right_allpass.push new AllpassFilter a+stereospread,.5
#@reflections = []
#@reflections.push new Reflection 7830,.1,.5,1
#@reflections.push new Reflection 10670,-.2,.6,.9
#@reflections.push new Reflection 13630,.7,.7,.8
#@reflections.push new Reflection 21870,-.6,.8,.7
#@reflections.push new Reflection 35810,-.1,.9,.6
update:(data)->
@wet = data.amount*.05
feedback = .7+Math.pow(data.rate,.25)*.29
damp = .2 #.4-.4*data.rate
for i in [0..@left_combs.length-1] by 1
@left_combs[i].feedback = feedback
@right_combs[i].feedback = feedback
@left_combs[i].damp = damp
@right_combs[i].damp = damp
@spread = .5-data.rate*.5
process:(buffer,length)->
for s in [0..length-1] by 1
outL = 0
outR = 0
left = buffer[0][s]
right = buffer[1][s]
input = (left+right)*.5
for i in [0..@left_combs.length-1]
outL += @left_combs[i].process(input)
outR += @right_combs[i].process(input)
for i in [0..@left_allpass.length-1]
outL = @left_allpass[i].process(outL)
outR = @right_allpass[i].process(outR)
#for i in [0..@reflections.length-1]
# r = @reflections[i].process(input)
# outL += r[0]
# outR += r[1]
buffer[0][s] = (outL*(1-@spread)+outR*@spread)*@wet+left*(1-@wet)
buffer[1][s] = (outR*(1-@spread)+outL*@spread)*@wet+right*(1-@wet)
return
class Voice
@oscillators = [SawOscillator,SquareOscillator,SineOscillator,VoiceOscillator,StringOscillator]
constructor:(@engine)->
@osc1 = new SineOscillator @
@osc2 = new SineOscillator @
@noise = new Noise @
@lfo1 = new LFO @
@lfo2 = new LFO @
@filter = new Filter @
@env1 = new AmpEnvelope @
@env2 = new ModEnvelope @
@filter_increment = 0.0005*44100/@engine.sampleRate
@modulation = 0
@noteon_time = 0
init:(layer)->
if @osc1 not instanceof Voice.oscillators[layer.inputs.osc1.type]
@osc1 = new Voice.oscillators[layer.inputs.osc1.type] @
if @osc2 not instanceof Voice.oscillators[layer.inputs.osc2.type]
@osc2 = new Voice.oscillators[layer.inputs.osc2.type] @
@osc1.init(layer.inputs.osc1,layer.inputs.sync)
@osc2.init(layer.inputs.osc2,layer.inputs.sync)
@noise.init(layer,layer.inputs.sync)
@lfo1.init(layer.inputs.lfo1,layer.inputs.sync)
@lfo2.init(layer.inputs.lfo2,layer.inputs.sync)
@filter.init(layer)
@env1.init(layer.inputs.env1)
@env2.init(layer.inputs.env2)
@updateConstantMods()
update:()->
return if not @layer?
if @osc1 not instanceof Voice.oscillators[@layer.inputs.osc1.type]
@osc1 = new Voice.oscillators[@layer.inputs.osc1.type] @,@layer.inputs.osc1
if @osc2 not instanceof Voice.oscillators[@layer.inputs.osc2.type]
@osc2 = new Voice.oscillators[@layer.inputs.osc2.type] @,@layer.inputs.osc2
@osc1.update(@layer.inputs.osc1,@layer.inputs.sync)
@osc2.update(@layer.inputs.osc2,@layer.inputs.sync)
@env1.update()
@env2.update()
@lfo1.update()
@lfo2.update()
@filter.update()
@updateConstantMods()
updateConstantMods:()->
@osc1_amp = DBSCALE(@inputs.osc1.amp,3)
@osc2_amp = DBSCALE(@inputs.osc2.amp,3)
@osc1_mod = @inputs.osc1.mod
@osc2_mod = @inputs.osc2.mod
@noise_amp = DBSCALE(@inputs.noise.amp,3)
@noise_mod = @inputs.noise.mod
if @inputs.velocity.amp>0
p = @inputs.velocity.amp
p *= p
p *= 4
#amp = Math.pow(@velocity,p) #Math.exp((-1+@velocity)*5)
norm = @inputs.velocity.amp*4
amp = Math.exp(@velocity*norm)/Math.exp(norm)
#amp = @inputs.velocity.amp*amp+(1-@inputs.velocity.amp)
else
amp = 1
@osc1_amp *= amp
@osc2_amp *= amp
@noise_amp *= amp
c = Math.log(1024*@freq/22000)/(10*Math.log(2))
@cutoff_keymod = (c-.5)*@inputs.filter.follow
@cutoff_base = @inputs.filter.cutoff+@cutoff_keymod
@env2_amount = @inputs.env2.amount
@lfo1_rate = @inputs.lfo1.rate
@lfo1_amount = @inputs.lfo1.amount
@lfo2_rate = @inputs.lfo2.rate
@lfo2_amount = @inputs.lfo2.amount
# "Mod"
#
# "Filter Cutoff"
# "Filter Resonance"
#
# "Osc1 Mod"
# "Osc1 Amp"
#
# "Osc2 Mod"
# "Osc2 Amp"
#
# "Noise Amp"
# "Noise Color"
#
# "Env1 Attack"
# "Env2 Attack"
# "Env2 Amount"
#
# "LFO1 Amount"
# "LFO1 Rate"
#
# "LFO2 Amount"
# "LFO2 Rate"
# ]
switch @inputs.velocity.out
when 0 # Oscs Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_mod = Math.min(1,Math.max(0,@osc1_mod+mod))
@osc2_mod = Math.min(1,Math.max(0,@osc2_mod+mod))
when 1 # Filter Cutoff
mod = @velocity*(@inputs.velocity.amount-.5)*2
@cutoff_base += mod
when 3 # Osc1 Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_mod = Math.min(1,Math.max(0,@osc1_mod+mod))
when 4 # Osc1 Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_amp = Math.max(0,@osc1_amp+mod)
when 5 # Osc2 Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc2_mod = Math.min(1,Math.max(0,@osc2_mod+mod))
when 6 # Osc2 Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc2_amp = Math.max(0,@osc2_amp+mod)
when 7 # Noise Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@noise_amp = Math.max(0,@noise_amp+mod)
when 8 # Noise Color
mod = @velocity*(@inputs.velocity.amount-.5)*2
@noise_mod = Math.min(1,Math.max(0,@noise_mod+mod))
when 9 # Env1 Attack
mod = @velocity*(@inputs.velocity.amount-.5)*2
a = Math.max(0,Math.min(@inputs.env1.a+mod,1))
@env1.update(a)
when 10 # Env2 Attack
mod = @velocity*(@inputs.velocity.amount-.5)*2
a = Math.max(0,Math.min(@inputs.env2.a+mod,1))
@env2.update(a)
when 11 # Env2 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@env2_amount = Math.max(0,Math.min(1,@env2_amount+mod))
when 12 # LFO1 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo1_amount = Math.max(0,@lfo1_amount+mod)
when 13 # LFO1 Rate
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo1_rate = Math.min(1,Math.max(0,@lfo1_rate+mod))
when 14 # LFO2 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo2_amount = Math.max(0,@lfo2_amount+mod)
when 15 # LFO2 Rate
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo2_rate = Math.min(1,Math.max(0,@lfo2_rate+mod))
if @freq?
c = Math.log(1024*@freq/22000)/(10*Math.log(2))
@cutoff_keymod = (c-.5)*@inputs.filter.follow
noteOn:(layer,@key,velocity,legato=false)->
@velocity = velocity/127
if @layer?
@layer.removeVoice @
@layer = layer
@inputs = @layer.inputs
if legato and @on
@freq = 440*Math.pow(Math.pow(2,1/12),@key-57)
if layer.last_key?
@glide_from = layer.last_key
@glide = true
@glide_phase = 0
glide_time = (@inputs.glide*.025+Math.pow(@inputs.glide,16)*.975)*10
@glide_inc = 1/(glide_time*@engine.sampleRate+1)
else
@freq = 440*Math.pow(Math.pow(2,1/12),@key-57)
@init @layer
@on = true
@cutoff = 0
@modulation = @layer.instrument.modulation
@pitch_bend = @layer.instrument.pitch_bend
@modulation_v = 0
@pitch_bend_v = 0
@noteon_time = Date.now()
noteOff:()->
@on = false
process:()->
osc1_mod = @osc1_mod
osc2_mod = @osc2_mod
osc1_amp = @osc1_amp
osc2_amp = @osc2_amp
if @glide
k = @glide_from*(1-@glide_phase)+@key*@glide_phase
osc1_freq = osc2_freq = 440*Math.pow(Math.pow(2,1/12),k-57)
@glide_phase += @glide_inc
if @glide_phase>=1
@glide = false
else
osc1_freq = osc2_freq = @freq
if Math.abs(@pitch_bend-@layer.instrument.pitch_bend)>.0001
@pitch_bend_v += .001*(@layer.instrument.pitch_bend-@pitch_bend)
@pitch_bend_v *= .5
@pitch_bend += @pitch_bend_v
if Math.abs(@pitch_bend-.5)>.0001
p = @pitch_bend*2-1
p *= 2
f = Math.pow(Math.pow(2,1/12),p)
osc1_freq *= f
osc2_freq *= f
noise_amp = @noise_amp
noise_mod = @noise_mod
lfo1_rate = @lfo1_rate
lfo1_amount = @lfo1_amount
lfo2_rate = @lfo2_rate
lfo2_amount = @lfo2_amount
cutoff = @cutoff_base
q = @inputs.filter.resonance
if Math.abs(@modulation-@layer.instrument.modulation)>.0001
@modulation_v += .001*(@layer.instrument.modulation-@modulation)
@modulation_v *= .5
@modulation += @modulation_v
switch @inputs.modulation.out
when 0 # Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 1 # Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Osc1 Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_amp = Math.max(0,osc1_amp+mod)
when 3 # Osc1 Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 4 # Osc2 Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc2_amp = Math.max(0,osc2_amp+mod)
when 5 # Osc2 Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 6 # Noise Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
noise_amp = Math.max(0,noise_amp+mod)
when 7 # Noise Color
mod = (@inputs.modulation.amount-.5)*2*@modulation
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 8 # Filter Cutoff
mod = (@inputs.modulation.amount-.5)*2*@modulation
cutoff += mod
when 9 # Filter Resonance
mod = (@inputs.modulation.amount-.5)*2*@modulation
q = Math.max(0,Math.min(1,q+mod))
when 10 # LFO1 Amount
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo1_amount = Math.max(0,Math.min(1,lfo1_amount+mod))
when 11 # LFO1 Rate
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo1_rate = Math.max(0,Math.min(1,lfo1_rate+mod))
when 12 # LFO2 Amount
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo2_amount = Math.max(0,Math.min(1,lfo2_amount+mod))
when 13 # LFO2 Rate
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo2_rate = Math.max(0,Math.min(1,lfo2_rate+mod))
switch @inputs.env2.out
when 0 # Filter Cutoff
cutoff += @env2.process(@on)*(@env2_amount*2-1)
when 1 # Filter Resonance
q = Math.max(0,Math.min(1,@env2.process(@on)*(@env2_amount*2-1)))
when 2 # Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
mod = 1+mod
osc1_freq *= mod
osc2_freq *= mod
when 3 # Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 4 # Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 5 #Osc1 Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
osc1_freq *= 1+mod
when 6 #Osc1 Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 7 #Osc1 Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_amp = Math.max(0,osc1_amp+mod)
when 8 #Osc2 Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
osc1_freq *= 1+mod
when 9 #Osc2 Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 10 #Osc2 Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc2_amp = Math.max(0,osc2_amp+mod)
when 11 # Noise amp
mod = @env2.process(@on)*(@env2_amount*2-1)
noise_amp = Math.max(0,noise_amp+mod)
when 12 # Noise color
mod = @env2.process(@on)*(@env2_amount*2-1)
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 13 # LFO1 Amount
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo1_amount = Math.min(1,Math.max(0,lfo1_amount+mod))
when 14 # LFO1 rate
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo1_rate = Math.min(1,Math.max(0,lfo1_rate+mod))
when 15 # LFO2 Amount
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo2_amount = Math.min(1,Math.max(0,lfo2_amount+mod))
when 16 # LFO2 rate
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo2_rate = Math.min(1,Math.max(0,lfo2_rate+mod))
switch @inputs.lfo1.out
when 0 # Pitch
mod = lfo1_amount
if @inputs.lfo1.audio
mod = 1+mod*mod*@lfo1.process(lfo1_rate)*16
else
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc1_freq *= mod
osc2_freq *= mod
when 1 # Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 3 #Osc1 Pitch
mod = lfo1_amount
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc1_freq *= mod
when 4 #Osc1 Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 5 #Osc1 Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_amp = Math.max(0,osc1_amp+mod)
when 6 #Osc2 Pitch
mod = lfo1_amount
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc2_freq *= mod
when 7 #Osc2 Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 8 #Osc2 Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc2_amp = Math.max(0,osc2_amp+mod)
when 9 # Noise amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
noise_amp = Math.max(0,noise_amp+mod)
when 10 # Noise color
mod = @lfo1.process(lfo1_rate)*lfo1_amount
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 11
cutoff += @lfo1.process(lfo1_rate)*lfo1_amount
when 12
q = Math.max(0,Math.min(1,@lfo1.process(lfo1_rate)*lfo1_amount))
when 13 # LFO2 Amount
mod = @lfo1.process(lfo1_rate)*lfo1_amount
lfo2_amount = Math.min(1,Math.max(0,lfo2_amount+mod))
when 14 # LFO2 rate
mod = @lfo1.process(lfo1_rate)*lfo1_amount
lfo2_rate = Math.min(1,Math.max(0,lfo2_rate+mod))
switch @inputs.lfo2.out
when 0 # Pitch
mod = lfo2_amount
if @inputs.lfo2.audio
mod = 1+mod*mod*@lfo2.process(lfo2_rate)*16
else
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc1_freq *= mod
osc2_freq *= mod
when 1 # Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 3 #Osc1 Pitch
mod = lfo2_amount
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc1_freq *= mod
when 4 #Osc1 Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 5 #Osc1 Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_amp = Math.max(0,osc1_amp+mod)
when 6 #Osc2 Pitch
mod = lfo2_amount
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc2_freq *= mod
when 7 #Osc2 Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 8 #Osc2 Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc2_amp = Math.max(0,osc2_amp+mod)
when 9 # Noise amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
noise_amp = Math.max(0,noise_amp+mod)
when 10 # Noise color
mod = @lfo2.process(lfo2_rate)*lfo2_amount
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 11
cutoff += @lfo2.process(lfo2_rate)*lfo2_amount
when 12
q = Math.max(0,Math.min(1,@lfo2.process(lfo2_rate)*lfo2_amount))
switch @inputs.combine
when 1
s1 = @osc1.process(osc1_freq,osc1_mod)*osc1_amp
s2 = @osc2.process(osc2_freq,osc2_mod)*osc2_amp
sig = (s1+s2)*(s1+s2)
#sig = @osc1.process(osc1_freq,osc1_mod)*@osc2.process(osc2_freq,osc2_mod)*Math.max(osc1_amp,osc2_amp)*4
when 2
sig = @osc2.process(osc2_freq*Math.max(0,1-@osc1.process(osc1_freq,osc1_mod)*osc1_amp),osc2_mod)*osc2_amp
else
sig = @osc1.process(osc1_freq,osc1_mod)*osc1_amp+@osc2.process(osc2_freq,osc2_mod)*osc2_amp
if noise_amp>0
sig += @noise.process(noise_mod)*noise_amp
mod = @env2.process(@on)
if not @cutoff
@cutoff = cutoff
else
if @cutoff<cutoff
@cutoff += Math.min(cutoff-@cutoff,@filter_increment)
else if @cutoff>cutoff
@cutoff += Math.max(cutoff-@cutoff,-@filter_increment)
cutoff = @cutoff
# VCF
cutoff = Math.pow(2,Math.max(0,Math.min(cutoff,1))*10)*22000/1024
sig *= @env1.process(@on)
#@cutoff += 0.001*(cutoff-@cutoff)
sig = @filter.process(sig,cutoff,q*q*9.5+.5)
class Instrument
constructor:(@engine)->
@layers = []
@layers.push new Layer @
@modulation = 0
@pitch_bend = .5
noteOn:(key,velocity)->
for l in @layers
l.noteOn(key,velocity)
return
noteOff:(key)->
for l in @layers
l.noteOff(key)
return
setModulation:(@modulation)->
setPitchBend:(@pitch_bend)->
process:(length)->
if false #@layers.length == 1
@layers[0].process(length)
@output = @layers[0].output
else
if not @output? or @output[0].length<length
@output = [new Float64Array(length),new Float64Array(length)]
for l in @layers
l.process(length)
for i in [0..length-1] by 1
left = 0
right = 0
for l in @layers
left += l.output[0][i]
right += l.output[1][i]
# res = @fx1.process(sig)
@output[0][i] = left
@output[1][i] = right
return
FX1 = [
Distortion
BitCrusher
Chorus
Flanger
Phaser
Delay
]
FX2 = [
Delay
Reverb
Chorus
Flanger
Phaser
]
class Layer
constructor:(@instrument)->
@engine = @instrument.engine
@voices = []
@eq = new EQ(@engine)
@spatializer = new Spatializer(@engine)
@inputs =
osc1:
type: 0
tune: .5
coarse: .5
amp: .5
mod: 0
osc2:
type: 0
tune: .5
coarse: .5
amp: .5
mod: 0
combine: 0
noise:
amp: 0
mod: 0
filter:
cutoff: 1
resonance: 0
type: 0
slope: 1
follow: 0
disto:
wet:0
drive:0
bitcrusher:
wet: 0
drive: 0
crush: 0
env1:
a: 0
d: 0
s: 1
r: 0
env2:
a: .1
d: .1
s: .5
r: .1
out: 0
amount: .5
lfo1:
type: 0
amount: 0
rate: .5
out: 0
lfo2:
type: 0
amount: 0
rate: .5
out: 0
fx1:
type: -1
amount: 0
rate: 0
fx2:
type: -1
amount: 0
rate: 0
eq:
low: .5
mid: .5
high: .5
spatialize: .5
pan: .5
polyphony: 1
glide: .5
sync: 1
velocity:
out: 0
amount: .5
amp: .5
modulation:
out: 0
amount: .5
noteOn:(key,velocity)->
if @inputs.polyphony == 1 and @last_voice? and @last_voice.on
voice = @last_voice
voice.noteOn @,key,velocity,true
@voices.push voice
else
voice = @engine.getVoice()
voice.noteOn @,key,velocity
@voices.push voice
@last_voice = voice
@last_key = key
removeVoice:(voice)->
index = @voices.indexOf(voice)
if index>=0
@voices.splice index,1
noteOff:(key)->
for v in @voices
if v.key == key
v.noteOff()
return
update:()->
if @inputs.fx1.type>=0
if not @fx1? or @fx1 not instanceof FX1[@inputs.fx1.type]
@fx1 = new FX1[@inputs.fx1.type] @engine
else
@fx1 = null
if @inputs.fx2.type>=0
if not @fx2? or @fx2 not instanceof FX2[@inputs.fx2.type]
@fx2 = new FX2[@inputs.fx2.type] @engine
else
@fx2 = null
if @fx1?
@fx1.update(@inputs.fx1)
if @fx2?
@fx2.update(@inputs.fx2)
@eq.update(@inputs.eq)
process:(length)->
if not @output? or @output[0].length<length
@output = [new Float64Array(length),new Float64Array(length)]
for i in [@voices.length-1..0] by -1
v = @voices[i]
if not v.on and v.env1.sig<.00001
v.env1.sig = 0
@removeVoice v
for i in [0..length-1] by 1
sig = 0
for v in @voices
sig += v.process()
@output[0][i] = sig
@output[1][i] = sig
@spatializer.process(@output,length,@inputs.spatialize,@inputs.pan)
if @fx1?
@fx1.process(@output,length)
if @fx2?
@fx2.process(@output,length)
@eq.process(@output,length)
return
class AudioEngine
constructor:(@sampleRate)->
@voices = []
@voice_index = 0
@num_voices = 8
for i in [0..@num_voices-1]
@voices[i] = new Voice @
@instruments = []
@instruments.push new Instrument @
@avg = 0
@samples = 0
@time = 0
@layer =
inputs: @inputs
@start = Date.now()
event:(data)->
if data[0] == 144 and data[2]>0
@instruments[0].noteOn(data[1],data[2])
else if data[0] == 128 or (data[0] == 144 and data[2] == 0)
@instruments[0].noteOff(data[1])
else if data[0]>=176 and data[0]<192 and data[1] == 1 # modulation wheel
@instruments[0].setModulation(data[2]/127)
else if data[0]>=224 and data[0]<240 # pitch bend
v = (data[1]+128*data[2])
console.info "PB value=#{v}"
if v>=8192
v = (v-8192)/(16383-8192)
v = .5+.5*v
else
v = .5*v/8192
console.info("Pitch Bend = #{v}")
@instruments[0].setPitchBend(v)
return
getTime:()->
(Date.now()-@start)/1000
getVoice:()->
best = @voices[0]
for i in [1..@voices.length-1]
v = @voices[i]
if best.on
if v.on
if v.noteon_time<best.noteon_time
best = v
else
best = v
else
if not v.on and v.env1.sig<best.env1.sig
best = v
return best
updateVoices:()->
for v in @voices
v.update()
for l in @instruments[0].layers
l.update()
return
process:(inputs,outputs,parameters)->
output = outputs[0]
time = Date.now()
res = [0,0]
for inst in @instruments
inst.process(output[0].length)
for channel,i in output
if i<2
for j in [0..channel.length-1] by 1
sig = 0
for inst in @instruments
sig += inst.output[i][j]
sig *= .125
sig = if sig<0 then -(1-Math.exp(sig)) else 1-Math.exp(-sig)
channel[j] = sig
@time += Date.now()-time
@samples += channel.length
if @samples >= @sampleRate
@samples -= @sampleRate
console.info @time+" ms ; buffer size = "+channel.length
@time = 0
return
class Blip
constructor:()->
@size = 512
@samples = new Float64Array(@size+1)
for p in [1..31] by 2
for i in [0..@size] by 1
x = (i/@size-.5)*.5
@samples[i] += Math.sin(x*2*Math.PI*p)/p
norm = @samples[@size]
for i in [0..@size] by 1
@samples[i] /= norm
`
class MyWorkletProcessor extends AudioWorkletProcessor {
constructor() {
super()
this.synth = new AudioEngine(sampleRate)
this.port.onmessage = (e) => {
console.info(e)
var data = JSON.parse(e.data)
if (data.name == "note")
{
this.synth.event(data.data)
}
else if (data.name == "param")
{
var value = data.value
var s = data.id.split(".")
data = this.synth.instruments[0].layers[0].inputs
while (s.length>1)
{
data = data[s.splice(0,1)[0]]
}
data[s[0]] = value
this.synth.updateVoices()
}
}
}
process(inputs, outputs, parameters) {
this.synth.process(inputs,outputs,parameters)
return true
}
}
registerProcessor('my-worklet-processor', MyWorkletProcessor)
`
| 41656 | `
const TWOPI = 2*Math.PI
const SIN_TABLE = new Float64Array(10001)
const WHITE_NOISE = new Float64Array(100000)
const COLORED_NOISE = new Float64Array(100000)
`
do ->
for i in [0..10000] by 1
SIN_TABLE[i] = Math.sin(i/10000*Math.PI*2)
`
const BLIP_SIZE = 512
const BLIP = new Float64Array(BLIP_SIZE+1)
`
do ->
for p in [1..31] by 2
for i in [0..BLIP_SIZE] by 1
x = (i/BLIP_SIZE-.5)*.5
BLIP[i] += Math.sin(x*2*Math.PI*p)/p
norm = BLIP[BLIP_SIZE]
for i in [0..BLIP_SIZE] by 1
BLIP[i] /= norm
do ->
n = 0
b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0
for i in [0..99999] by 1
white = Math.random()*2-1
n = .99*n+.01*white
pink = n*6
WHITE_NOISE[i] = white
COLORED_NOISE[i] = pink
DBSCALE = (value,range)-> (Math.exp(value*range)/Math.exp(range)-1/Math.exp(range))/(1-1/Math.exp(range))
class SquareOscillator
constructor:(@voice,osc)->
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@buffer = new Float64Array(32)
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then 0 else Math.random()
@sig = -1
@index = 0
@update(osc)
for i in [0..@buffer.length-1]
@buffer[i] = 0
return
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@analog_tune = @tune
process:(freq,mod)->
dp = @analog_tune*freq*@invSampleRate
@phase += dp
m = .5-mod*.49
avg = 1-2*m
if @sig<0
if @phase>=m
@sig = 1
dp = Math.max(0,Math.min(1,(@phase-m)/dp))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += -1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
else
if @phase>=1
dp = Math.max(0,Math.min(1,(@phase-1)/dp))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
@sig = -1
@phase -= 1
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += 1-BLIP[dpi]*(1-a)-BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
sig = @sig+@buffer[@index]
@buffer[@index] = 0
@index = (@index+1)%@buffer.length
sig-avg
class SawOscillator
constructor:(@voice,osc)->
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@buffer = new Float64Array(32)
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then 0 else Math.random()
@sig = -1
@index = 0
@jumped = false
@update(osc)
for i in [0..@buffer.length-1]
@buffer[i] = 0
return
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@analog_tune = @tune
process:(freq,mod)->
dphase = @analog_tune*freq*@invSampleRate
@phase += dphase
#if @phase>=1
# @phase -= 1
#return 1-2*@phase
slope = 1+mod
if not @jumped
sig = 1-2*@phase*slope
if @phase>=.5
@jumped = true
sig = mod-2*(@phase-.5)*slope
dp = Math.max(0,Math.min(1,(@phase-.5)/dphase))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
if mod>0
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += (-1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a)*mod
dpi += 16
index = (index+1)%@buffer.length
else
sig = mod-2*(@phase-.5)*slope
if @phase>=1
@jumped = false
dp = Math.max(0,Math.min(1,(@phase-1)/dphase))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
@phase -= 1
sig = 1-2*@phase*slope
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += -1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
sig += @buffer[@index]
@buffer[@index] = 0
@index = (@index+1)%@buffer.length
offset = 16*2*dphase*slope
sig+offset
class SineOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@maxRatio = @sampleRate/Math.PI/5/(2*Math.PI)
@tune = 1
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then .25 else Math.random()
@update(osc)
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@modnorm = 25/Math.sqrt(@tune*@voice.freq)
@dphase = @tune*@invSampleRate
sinWave:(x)->
x = (x-Math.floor(x))*10000
ix = Math.floor(x)
ax = x-ix
SIN_TABLE[ix]*(1-ax)+SIN_TABLE[ix+1]*ax
sinWave2:(x)->
x = 2*(x-Math.floor(x))
if x>1
x = 2-x
x*x*(3-2*x)*2-1
process:(freq,mod)->
@phase = (@phase+freq*@dphase)
if @phase>=1
@phase -= 1
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
@dphase = @analog_tune*@invSampleRate
m1 = mod*@modnorm
m2 = mod*m1
p = @phase
return @sinWave2(p+m1*@sinWave2(p+m2*@sinWave2(p)))
class VoiceOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@init(osc) if osc?
@f1 = [320,500,700,1000,500,320,700,500,320,320]
@f2 = [800,1000,1150,1400,1500,1650,1800,2300,3200,3200]
init:(osc)->
@phase = 0
@grain1_p1 = .25
@grain1_p2 = .25
@grain2_p1 = .25
@grain2_p2 = .25
@update(osc)
update:(osc)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@modnorm = 25/Math.sqrt(@tune*@voice.freq)
@dphase = @tune*@invSampleRate
sinWave2:(x)->
x = 2*(x-Math.floor(x))
if x>1
x = 2-x
x*x*(3-2*x)*2-1
process:(freq,mod)->
p1 = @phase<1
@phase = (@phase+freq*@dphase)
m = mod*(@f1.length-2)
im = Math.floor(m)
am = m-im
f1 = @f1[im]*(1-am)+@f1[im+1]*am
f2 = @f2[im]*(1-am)+@f2[im+1]*am
if p1 and @phase>=1
@grain2_p1 = .25
@grain2_p2 = .25
if @phase>=2
@phase -= 2
@grain1_p1 = .25
@grain1_p2 = .25
x = @phase-1
x *= x*x
vol = 1-Math.abs(1-@phase)
#vol = (@sinWave2(x*.5+.5)+1)*.5
sig = vol*(@sinWave2(@grain1_p1)*.25+.125*@sinWave2(@grain1_p2))
@grain1_p1 += f1*@invSampleRate
@grain1_p2 += f2*@invSampleRate
x = ((@phase+1)%2)-1
x *= x*x
#vol = (@sinWave2(x*.5+.5)+1)*.5
vol = if @phase<1 then 1-@phase else @phase-1
sig += vol*(@sinWave2(@grain2_p1)*.25+.125*@sinWave2(@grain2_p2))
sig += @sinWave2(@phase+.25)
@grain2_p1 += f1*@invSampleRate
@grain2_p2 += f2*@invSampleRate
sig
class StringOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@maxRatio = @sampleRate/Math.PI/5/(2*Math.PI)
@tune = 1
@init(osc) if osc?
init:(osc)->
@index = 0
@buffer = new Float64Array(@sampleRate/10)
@update(osc)
@prev = 0
@power = 1
update:(osc)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
process:(freq,mod)->
period = @sampleRate/(freq*@tune)
x = (@index-period+@buffer.length)%@buffer.length
ix = Math.floor(x)
a = x-ix
reflection = @buffer[ix]*(1-a)+@buffer[(ix+1)%@buffer.length]*a
m = Math.exp(Math.log(0.99)/freq)
m = 1
r = reflection*m+@prev*(1-m)
@prev = r
n = Math.random()*2-1
m = mod*.5
m = m*m*.999+.001
sig = n*m+r
@power = Math.max(Math.abs(sig)*.1+@power*.9,@power*.999)
sig /= (@power+.0001)
@buffer[@index] = sig
@index += 1
@index = 0 if @index>=@buffer.length
sig
processOld:(freq,mod)->
period = @sampleRate/(freq*@tune)
x = (@index-period+@buffer.length)%@buffer.length
ix = Math.floor(x)
a = x-ix
reflection = @buffer[ix]*(1-a)+@buffer[(ix+1)%@buffer.length]*a
#m = .1+Math.exp(-period*mod*.01)*.89
m = mod
r = reflection*m+@prev*(1-m)
@prev = r
sig = (Math.random()*2-1)*.01+r*.99
@buffer[@index] = sig/@power
@power = Math.abs(sig)*.001+@power*.999
@index += 1
@index = 0 if @index>=@buffer.length
sig
class Noise
constructor:(@voice)->
init:()->
@phase = 0
@seed = 1382
@n = 0
process:(mod)->
@seed = (@seed*13907+12345)&0x7FFFFFFF
white = (@seed/0x80000000)*2-1
@n = @n*.99+white*.01
pink = @n*6
white*(1-mod)+pink*mod
class ModEnvelope
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
init:(@params)->
@phase = 0
@update()
update:(a = @params.a)->
@a = 1/(@sampleRate*20*Math.pow(a,3)+1)
@d = 1/(@sampleRate*20*Math.pow(@params.d,3)+1)
@s = @params.s
@r = 1/(@sampleRate*20*Math.pow(@params.r,3)+1)
process:(noteon)->
if @phase<1
sig = @sig = @phase = Math.min(1,@phase+@a)
else if @phase<2
@phase = Math.min(2,@phase+@d)
sig = @sig = 1-(@phase-1)*(1-@s)
else if @phase<3
sig = @sig = @s
else
@phase = @phase+@r
sig = Math.max(0,@sig*(1-(@phase-3)))
if @phase<3 and not noteon
@phase = 3
sig
class AmpEnvelope
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
@sig = 0
init:(@params)->
@phase = 0
@sig = 0
@update()
update:(a = @params.a)->
#@a = Math.exp(Math.log(1/@epsilon)/(@sampleRate*10*Math.pow(@params.a,3)+1))
@a2 = 1/(@sampleRate*(20*Math.pow(a,3)+.00025))
@d = Math.exp(Math.log(0.5)/(@sampleRate*(10*Math.pow(@params.d,3)+0.001)))
@s = DBSCALE(@params.s,2)
console.info("sustain #{@s}")
@r = Math.exp(Math.log(0.5)/(@sampleRate*(10*Math.pow(@params.r,3)+0.001)))
process:(noteon)->
if @phase<1
@phase += @a2
sig = @sig = (@phase*.75+.25)*@phase
if @phase>=1
sig = @sig = 1
@phase = 1
else if @phase==1
sig = @sig = @sig*@d
if sig <= @s
sig = @sig = @s
@phase = 2
else if @phase<3
sig = @sig = @s
else
sig = @sig = @sig*@r
if @phase<3 and not noteon
@phase = 3
sig
class LFO
constructor:(@voice)->
@invSampleRate = 1/@voice.engine.sampleRate
init:(@params,sync)->
if sync
rate = @params.rate
rate = .1+rate*rate*rate*(100-.1)
t = @voice.engine.getTime()*rate
@phase = t%1
else
@phase = Math.random()
@process = @processSine
@update()
@r1 = Math.random()*2-1
@r2 = Math.random()*2-1
@out = 0
update:()->
switch @params.type
when 0 then @process = @processSaw
when 1 then @process = @processSquare
when 2 then @process = @processSine
when 3 then @process = @processTriangle
when 4 then @process = @processRandom
when 5 then @process = @processRandomStep
@audio_freq = 440*Math.pow(Math.pow(2,1/12),@voice.key-57)
processSine:(rate)->
if @params.audio
r = if rate<.5 then .25+rate*rate/.25*.75 else rate*rate*4
#r = Math.pow(2,(Math.round(rate*48))/12)*.25
rate = @audio_freq*r
else
rate = .01+rate*rate*20
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
p = @phase*2
if p<1
p*p*(3-2*p)*2-1
else
p -= 1
1-p*p*(3-2*p)*2
processTriangle:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
return (1-4*Math.abs(@phase-.5))
processSaw:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
out = (1-@phase*2)
@out = @out*.97+out*.03
processSquare:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
out = if @phase<.5 then 1 else -1
@out = @out*.97+out*.03
processRandom:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
if @phase>=1
@phase -= 1
@r1 = @r2
@r2 = Math.random()*2-1
@r1*(1-@phase)+@r2*@phase
processRandomStep:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
if @phase>=1
@phase -= 1
@r1 = Math.random()*2-1
@out = @out*.97+@r1*.03
processDiscreteRandom:()->
processSmoothRandom:()->
class Filter
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@halfSampleRate = @sampleRate*.5
init:(@layer)->
@fm00 = 0
@fm01 = 0
@fm10 = 0
@fm11 = 0
@update()
update:()->
switch @layer.inputs.filter.type
when 0
@process = @processLowPass
when 1
@process = @processBandPass
when 2
@process = @processHighPass
processHighPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
onePlusCosw0 = 1 + cosw0
b0 = onePlusCosw0*.5* invOnePlusAlpha
b1 = -onePlusCosw0* invOnePlusAlpha
b2 = b0
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
processBandPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
b0 = q * alpha * invOnePlusAlpha
b1 = 0
b2 = -b0
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
processLowPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
b1 = oneLessCosw0 * invOnePlusAlpha
b0 = b1*.5
b2 = b0
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
class Distortion
constructor:()->
@amount = .5
@rate = .5
update:(data)->
@amount = data.amount
@rate = data.rate
process:(buffer,length)->
for i in [0..length-1] by 1
sig = buffer[0][i]
s = sig*(1+@rate*@rate*99)
disto = if s<0 then -1+Math.exp(s) else 1-Math.exp(-s)
buffer[0][i] = (1-@amount)*sig+@amount*disto
sig = buffer[1][i]
s = sig*(1+@rate*@rate*99)
disto = if s<0 then -1+Math.exp(s) else 1-Math.exp(-s)
buffer[1][i] = (1-@amount)*sig+@amount*disto
return
class BitCrusher
constructor:()->
@phase = 0
@left = 0
@right = 0
update:(data)->
@amount = Math.pow(2,data.amount*8)
@rate = Math.pow(2,(1-data.rate)*16)*2
process:(buffer,length)->
r = 1-@rate
crush = 1+15*r*r
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase += 1
if @phase>@amount
@phase -= @amount
@left = if left>0 then Math.ceil(left*@rate)/@rate else Math.floor(left*@rate)/@rate
@right = if right>0 then Math.ceil(right*@rate)/@rate else Math.floor(right*@rate)/@rate
buffer[0][i] = @left
buffer[1][i] = @right
return
class Chorus
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = Math.random()
@phase2 = Math.random()
@phase3 = Math.random()
@f1 = 1.031/@sampleRate
@f2 = 1.2713/@sampleRate
@f3 = 0.9317/@sampleRate
@index = 0
update:(data)->
@amount = Math.pow(data.amount,.5)
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = @left_buffer[@index] = buffer[0][i]
right = @right_buffer[@index] = buffer[1][i]
@phase1 += @f1*(.5+.5*@rate)
@phase2 += @f2*(.5+.5*@rate)
@phase3 += @f3*(.5+.5*@rate)
p1 = (1+Math.sin(@phase1*Math.PI*2))*@left_buffer.length*.002
p2 = (1+Math.sin(@phase2*Math.PI*2))*@left_buffer.length*.002
p3 = (1+Math.sin(@phase3*Math.PI*2))*@left_buffer.length*.002
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@phase3 -= 1 if @phase3>=1
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
s3 = @read(@right_buffer,@index-p3)
pleft = @amount*(s1*.2+s2*.7+s3*.1)
pright = @amount*(s1*.6+s2*.2+s3*.2)
left += pleft
right += pright
@left_buffer[@index] += pleft*.5*@amount
@right_buffer[@index] += pleft*.5*@amount
@index += 1
@index = 0 if @index>=@left_buffer.length
buffer[0][i] = left
buffer[1][i] = right
return
class Phaser
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = Math.random()
@phase2 = Math.random()
@f1 = .0573/@sampleRate
@f2 = .0497/@sampleRate
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase1 += @f1*(.5+.5*@rate)
@phase2 += @f2*(.5+.5*@rate)
o1 = (1+Math.sin(@phase1*Math.PI*2))/2
p1 = @sampleRate*(.0001+.05*o1)
o2 = (1+Math.sin(@phase2*Math.PI*2))/2
p2 = @sampleRate*(.0001+.05*o2)
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@left_buffer[@index] = left
@right_buffer[@index] = right
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
@left_buffer[@index] += s1*@rate*.9
@right_buffer[@index] += s2*@rate*.9
buffer[0][i] = s1*@amount-left
buffer[1][i] = s2*@amount-right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Flanger
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = 0 #Math.random()
@phase2 = 0 #Math.random()
@f1 = .0573/@sampleRate
@f2 = .0497/@sampleRate
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase1 += @f1
@phase2 += @f2
o1 = (1+Math.sin(@phase1*Math.PI*2))/2
p1 = @sampleRate*(.0001+.05*o1)
o2 = (1+Math.sin(@phase2*Math.PI*2))/2
p2 = @sampleRate*(.0001+.05*o2)
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@left_buffer[@index] = left #+s1*.3
@right_buffer[@index] = right #+s2*.3
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
@left_buffer[@index] += s1*@rate*.9
@right_buffer[@index] += s2*@rate*.9
buffer[0][i] = s1*@amount+left
buffer[1][i] = s2*@amount+right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Delay
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate*3)
@right_buffer = new Float64Array(@sampleRate*3)
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
tempo = (30+Math.pow(@rate,2)*170*4)*4
tick = @sampleRate/(tempo/60)
@L = Math.round(tick*4)
@R = Math.round(tick*4+@sampleRate*0.00075)
@fb = @amount*.95
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
left += @right_buffer[(@index+@left_buffer.length-@L)%@left_buffer.length]*@fb
right += @left_buffer[(@index+@right_buffer.length-@R)%@right_buffer.length]*@fb
buffer[0][i] = @left_buffer[@index] = left
buffer[1][i] = @right_buffer[@index] = right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Spatializer
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate/10)
@right_buffer = new Float64Array(@sampleRate/10)
@index = 0
@left_delay1 = 0
@left_delay2 = 0
@right_delay1 = 0
@right_delay2 = 0
@left = 0
@right = 0
@left_delay1 = Math.round(@sampleRate*9.7913/340)
@right_delay1 = Math.round(@sampleRate*11.1379/340)
@left_delay2 = Math.round(@sampleRate*11.3179/340)
@right_delay2 = Math.round(@sampleRate*12.7913/340)
process:(buffer,length,spatialize,pan)->
mnt = spatialize
mnt2 = mnt*mnt
left_pan = Math.cos(pan*Math.PI/2)/(1+spatialize)
right_pan = Math.sin(pan*Math.PI/2)/(1+spatialize)
left_buffer = buffer[0]
right_buffer = buffer[1]
for i in [0..length-1] by 1
left = left_buffer[i]
right = right_buffer[i]
@left = left*.5+@left*.5
@right = right*.5+@right*.5
@left_buffer[@index] = @left
@right_buffer[@index] = @right
left_buffer[i] = (left-mnt*@right_buffer[(@index+@right_buffer.length-@left_delay1)%@right_buffer.length])*left_pan
right_buffer[i] = (right-mnt*@left_buffer[(@index+@right_buffer.length-@right_delay1)%@right_buffer.length])*right_pan
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class EQ
constructor:(@engine)->
@sampleRate = @engine.sampleRate
# band pass for medium freqs
@mid = 900
q = .5
w0 = 2*Math.PI*@mid/@sampleRate
cosw0 = Math.cos(w0)
sinw0 = Math.sin(w0)
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
@a0 = (-2 * cosw0) * invOnePlusAlpha
@a1 = (1 - alpha) * invOnePlusAlpha
@b0 = q * alpha * invOnePlusAlpha
@b1 = 0
@b2 = -@b0
@llow = 0
@rlow = 0
@lfm0 = 0
@lfm1 = 0
@rfm0 = 0
@rfm1 = 0
@low = 1
@mid = 1
@high = 1
update:(data)->
@low = data.low*2
@mid = data.mid*2
@high = data.high*2
processBandPass:(sig,cutoff,q)->
w = sig - a0*@fm0 - a1*@fm1
sig = b0*w + b1*@fm0 + b2*@fm1
@fm1 = @fm0
@fm0 = w
sig
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
lw = left - @a0*@lfm0 - @a1*@lfm1
lmid = @b0*lw + @b1*@lfm0 + @b2*@lfm1
@lfm1 = @lfm0
@lfm0 = lw
left -= lmid
@llow = left*.1+@llow*.9
lhigh = left-@llow
buffer[0][i] = @llow*@low+lmid*@mid+lhigh*@high
rw = right - @a0*@rfm0 - @a1*@rfm1
rmid = @b0*rw + @b1*@rfm0 + @b2*@rfm1
@rfm1 = @rfm0
@rfm0 = rw
right -= rmid
@rlow = right*.1+@rlow*.9
rhigh = right-@rlow
buffer[1][i] = @rlow*@low+rmid*@mid+rhigh*@high
return
class CombFilter
constructor:(@length,@feedback = .5)->
@buffer = new Float64Array(@length)
@index = 0
@store = 0
@damp = .2
process:(input)->
output = @buffer[@index]
@store = output*(1-@damp)+@store*@damp
@buffer[@index++] = input+@store*@feedback
if @index>=@length
@index = 0
output
class AllpassFilter
constructor:(@length,@feedback = .5)->
@buffer = new Float64Array(@length)
@index = 0
process:(input)->
bufout = @buffer[@index]
output = -input+bufout
@buffer[@index++] = input+bufout*@feedback
if @index>=@length
@index = 0
output
class Reverb
constructor:(@engine)->
@sampleRate = @engine.sampleRate
combtuning = [1116,1188,1277,1356,1422,1491,1557,1617]
allpasstuning = [556,441,341,225]
stereospread = 23
@left_combs = []
@right_combs = []
@left_allpass = []
@right_allpass = []
@res = [0,0]
@spread = .25
@wet = .025
for c in combtuning
@left_combs.push new CombFilter c,.9
@right_combs.push new CombFilter c+stereospread,.9
for a in allpasstuning
@left_allpass.push new AllpassFilter a,.5
@right_allpass.push new AllpassFilter a+stereospread,.5
#@reflections = []
#@<EMAIL>.push new Reflection 7830,.1,.5,1
#<EMAIL> new Reflection 10670,-.2,.6,.9
#@<EMAIL>.push new Reflection 13630,.7,.7,.8
#@reflection<EMAIL>.push new Reflection 21870,-.6,.8,.7
#@reflections.push new Reflection 35810,-.1,.9,.6
update:(data)->
@wet = data.amount*.05
feedback = .7+Math.pow(data.rate,.25)*.29
damp = .2 #.4-.4*data.rate
for i in [0..@left_combs.length-1] by 1
@left_combs[i].feedback = feedback
@right_combs[i].feedback = feedback
@left_combs[i].damp = damp
@right_combs[i].damp = damp
@spread = .5-data.rate*.5
process:(buffer,length)->
for s in [0..length-1] by 1
outL = 0
outR = 0
left = buffer[0][s]
right = buffer[1][s]
input = (left+right)*.5
for i in [0..@left_combs.length-1]
outL += @left_combs[i].process(input)
outR += @right_combs[i].process(input)
for i in [0..@left_allpass.length-1]
outL = @left_allpass[i].process(outL)
outR = @right_allpass[i].process(outR)
#for i in [0..@reflections.length-1]
# r = @reflections[i].process(input)
# outL += r[0]
# outR += r[1]
buffer[0][s] = (outL*(1-@spread)+outR*@spread)*@wet+left*(1-@wet)
buffer[1][s] = (outR*(1-@spread)+outL*@spread)*@wet+right*(1-@wet)
return
class Voice
@oscillators = [SawOscillator,SquareOscillator,SineOscillator,VoiceOscillator,StringOscillator]
constructor:(@engine)->
@osc1 = new SineOscillator @
@osc2 = new SineOscillator @
@noise = new Noise @
@lfo1 = new LFO @
@lfo2 = new LFO @
@filter = new Filter @
@env1 = new AmpEnvelope @
@env2 = new ModEnvelope @
@filter_increment = 0.0005*44100/@engine.sampleRate
@modulation = 0
@noteon_time = 0
init:(layer)->
if @osc1 not instanceof Voice.oscillators[layer.inputs.osc1.type]
@osc1 = new Voice.oscillators[layer.inputs.osc1.type] @
if @osc2 not instanceof Voice.oscillators[layer.inputs.osc2.type]
@osc2 = new Voice.oscillators[layer.inputs.osc2.type] @
@osc1.init(layer.inputs.osc1,layer.inputs.sync)
@osc2.init(layer.inputs.osc2,layer.inputs.sync)
@noise.init(layer,layer.inputs.sync)
@lfo1.init(layer.inputs.lfo1,layer.inputs.sync)
@lfo2.init(layer.inputs.lfo2,layer.inputs.sync)
@filter.init(layer)
@env1.init(layer.inputs.env1)
@env2.init(layer.inputs.env2)
@updateConstantMods()
update:()->
return if not @layer?
if @osc1 not instanceof Voice.oscillators[@layer.inputs.osc1.type]
@osc1 = new Voice.oscillators[@layer.inputs.osc1.type] @,@layer.inputs.osc1
if @osc2 not instanceof Voice.oscillators[@layer.inputs.osc2.type]
@osc2 = new Voice.oscillators[@layer.inputs.osc2.type] @,@layer.inputs.osc2
@osc1.update(@layer.inputs.osc1,@layer.inputs.sync)
@osc2.update(@layer.inputs.osc2,@layer.inputs.sync)
@env1.update()
@env2.update()
@lfo1.update()
@lfo2.update()
@filter.update()
@updateConstantMods()
updateConstantMods:()->
@osc1_amp = DBSCALE(@inputs.osc1.amp,3)
@osc2_amp = DBSCALE(@inputs.osc2.amp,3)
@osc1_mod = @inputs.osc1.mod
@osc2_mod = @inputs.osc2.mod
@noise_amp = DBSCALE(@inputs.noise.amp,3)
@noise_mod = @inputs.noise.mod
if @inputs.velocity.amp>0
p = @inputs.velocity.amp
p *= p
p *= 4
#amp = Math.pow(@velocity,p) #Math.exp((-1+@velocity)*5)
norm = @inputs.velocity.amp*4
amp = Math.exp(@velocity*norm)/Math.exp(norm)
#amp = @inputs.velocity.amp*amp+(1-@inputs.velocity.amp)
else
amp = 1
@osc1_amp *= amp
@osc2_amp *= amp
@noise_amp *= amp
c = Math.log(1024*@freq/22000)/(10*Math.log(2))
@cutoff_keymod = (c-.5)*@inputs.filter.follow
@cutoff_base = @inputs.filter.cutoff+@cutoff_keymod
@env2_amount = @inputs.env2.amount
@lfo1_rate = @inputs.lfo1.rate
@lfo1_amount = @inputs.lfo1.amount
@lfo2_rate = @inputs.lfo2.rate
@lfo2_amount = @inputs.lfo2.amount
# "Mod"
#
# "Filter Cutoff"
# "Filter Resonance"
#
# "Osc1 Mod"
# "Osc1 Amp"
#
# "Osc2 Mod"
# "Osc2 Amp"
#
# "Noise Amp"
# "Noise Color"
#
# "Env1 Attack"
# "Env2 Attack"
# "Env2 Amount"
#
# "LFO1 Amount"
# "LFO1 Rate"
#
# "LFO2 Amount"
# "LFO2 Rate"
# ]
switch @inputs.velocity.out
when 0 # Oscs Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_mod = Math.min(1,Math.max(0,@osc1_mod+mod))
@osc2_mod = Math.min(1,Math.max(0,@osc2_mod+mod))
when 1 # Filter Cutoff
mod = @velocity*(@inputs.velocity.amount-.5)*2
@cutoff_base += mod
when 3 # Osc1 Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_mod = Math.min(1,Math.max(0,@osc1_mod+mod))
when 4 # Osc1 Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_amp = Math.max(0,@osc1_amp+mod)
when 5 # Osc2 Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc2_mod = Math.min(1,Math.max(0,@osc2_mod+mod))
when 6 # Osc2 Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc2_amp = Math.max(0,@osc2_amp+mod)
when 7 # Noise Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@noise_amp = Math.max(0,@noise_amp+mod)
when 8 # Noise Color
mod = @velocity*(@inputs.velocity.amount-.5)*2
@noise_mod = Math.min(1,Math.max(0,@noise_mod+mod))
when 9 # Env1 Attack
mod = @velocity*(@inputs.velocity.amount-.5)*2
a = Math.max(0,Math.min(@inputs.env1.a+mod,1))
@env1.update(a)
when 10 # Env2 Attack
mod = @velocity*(@inputs.velocity.amount-.5)*2
a = Math.max(0,Math.min(@inputs.env2.a+mod,1))
@env2.update(a)
when 11 # Env2 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@env2_amount = Math.max(0,Math.min(1,@env2_amount+mod))
when 12 # LFO1 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo1_amount = Math.max(0,@lfo1_amount+mod)
when 13 # LFO1 Rate
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo1_rate = Math.min(1,Math.max(0,@lfo1_rate+mod))
when 14 # LFO2 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo2_amount = Math.max(0,@lfo2_amount+mod)
when 15 # LFO2 Rate
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo2_rate = Math.min(1,Math.max(0,@lfo2_rate+mod))
if @freq?
c = Math.log(1024*@freq/22000)/(10*Math.log(2))
@cutoff_keymod = (c-.5)*@inputs.filter.follow
noteOn:(layer,@key,velocity,legato=false)->
@velocity = velocity/127
if @layer?
@layer.removeVoice @
@layer = layer
@inputs = @layer.inputs
if legato and @on
@freq = 440*Math.pow(Math.pow(2,1/12),@key-57)
if layer.last_key?
@glide_from = layer.last_key
@glide = true
@glide_phase = 0
glide_time = (@inputs.glide*.025+Math.pow(@inputs.glide,16)*.975)*10
@glide_inc = 1/(glide_time*@engine.sampleRate+1)
else
@freq = 440*Math.pow(Math.pow(2,1/12),@key-57)
@init @layer
@on = true
@cutoff = 0
@modulation = @layer.instrument.modulation
@pitch_bend = @layer.instrument.pitch_bend
@modulation_v = 0
@pitch_bend_v = 0
@noteon_time = Date.now()
noteOff:()->
@on = false
process:()->
osc1_mod = @osc1_mod
osc2_mod = @osc2_mod
osc1_amp = @osc1_amp
osc2_amp = @osc2_amp
if @glide
k = @glide_from*(1-@glide_phase)+@key*@glide_phase
osc1_freq = osc2_freq = 440*Math.pow(Math.pow(2,1/12),k-57)
@glide_phase += @glide_inc
if @glide_phase>=1
@glide = false
else
osc1_freq = osc2_freq = @freq
if Math.abs(@pitch_bend-@layer.instrument.pitch_bend)>.0001
@pitch_bend_v += .001*(@layer.instrument.pitch_bend-@pitch_bend)
@pitch_bend_v *= .5
@pitch_bend += @pitch_bend_v
if Math.abs(@pitch_bend-.5)>.0001
p = @pitch_bend*2-1
p *= 2
f = Math.pow(Math.pow(2,1/12),p)
osc1_freq *= f
osc2_freq *= f
noise_amp = @noise_amp
noise_mod = @noise_mod
lfo1_rate = @lfo1_rate
lfo1_amount = @lfo1_amount
lfo2_rate = @lfo2_rate
lfo2_amount = @lfo2_amount
cutoff = @cutoff_base
q = @inputs.filter.resonance
if Math.abs(@modulation-@layer.instrument.modulation)>.0001
@modulation_v += .001*(@layer.instrument.modulation-@modulation)
@modulation_v *= .5
@modulation += @modulation_v
switch @inputs.modulation.out
when 0 # Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 1 # Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Osc1 Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_amp = Math.max(0,osc1_amp+mod)
when 3 # Osc1 Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 4 # Osc2 Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc2_amp = Math.max(0,osc2_amp+mod)
when 5 # Osc2 Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 6 # Noise Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
noise_amp = Math.max(0,noise_amp+mod)
when 7 # Noise Color
mod = (@inputs.modulation.amount-.5)*2*@modulation
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 8 # Filter Cutoff
mod = (@inputs.modulation.amount-.5)*2*@modulation
cutoff += mod
when 9 # Filter Resonance
mod = (@inputs.modulation.amount-.5)*2*@modulation
q = Math.max(0,Math.min(1,q+mod))
when 10 # LFO1 Amount
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo1_amount = Math.max(0,Math.min(1,lfo1_amount+mod))
when 11 # LFO1 Rate
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo1_rate = Math.max(0,Math.min(1,lfo1_rate+mod))
when 12 # LFO2 Amount
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo2_amount = Math.max(0,Math.min(1,lfo2_amount+mod))
when 13 # LFO2 Rate
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo2_rate = Math.max(0,Math.min(1,lfo2_rate+mod))
switch @inputs.env2.out
when 0 # Filter Cutoff
cutoff += @env2.process(@on)*(@env2_amount*2-1)
when 1 # Filter Resonance
q = Math.max(0,Math.min(1,@env2.process(@on)*(@env2_amount*2-1)))
when 2 # Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
mod = 1+mod
osc1_freq *= mod
osc2_freq *= mod
when 3 # Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 4 # Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 5 #Osc1 Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
osc1_freq *= 1+mod
when 6 #Osc1 Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 7 #Osc1 Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_amp = Math.max(0,osc1_amp+mod)
when 8 #Osc2 Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
osc1_freq *= 1+mod
when 9 #Osc2 Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 10 #Osc2 Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc2_amp = Math.max(0,osc2_amp+mod)
when 11 # Noise amp
mod = @env2.process(@on)*(@env2_amount*2-1)
noise_amp = Math.max(0,noise_amp+mod)
when 12 # Noise color
mod = @env2.process(@on)*(@env2_amount*2-1)
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 13 # LFO1 Amount
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo1_amount = Math.min(1,Math.max(0,lfo1_amount+mod))
when 14 # LFO1 rate
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo1_rate = Math.min(1,Math.max(0,lfo1_rate+mod))
when 15 # LFO2 Amount
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo2_amount = Math.min(1,Math.max(0,lfo2_amount+mod))
when 16 # LFO2 rate
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo2_rate = Math.min(1,Math.max(0,lfo2_rate+mod))
switch @inputs.lfo1.out
when 0 # Pitch
mod = lfo1_amount
if @inputs.lfo1.audio
mod = 1+mod*mod*@lfo1.process(lfo1_rate)*16
else
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc1_freq *= mod
osc2_freq *= mod
when 1 # Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 3 #Osc1 Pitch
mod = lfo1_amount
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc1_freq *= mod
when 4 #Osc1 Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 5 #Osc1 Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_amp = Math.max(0,osc1_amp+mod)
when 6 #Osc2 Pitch
mod = lfo1_amount
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc2_freq *= mod
when 7 #Osc2 Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 8 #Osc2 Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc2_amp = Math.max(0,osc2_amp+mod)
when 9 # Noise amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
noise_amp = Math.max(0,noise_amp+mod)
when 10 # Noise color
mod = @lfo1.process(lfo1_rate)*lfo1_amount
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 11
cutoff += @lfo1.process(lfo1_rate)*lfo1_amount
when 12
q = Math.max(0,Math.min(1,@lfo1.process(lfo1_rate)*lfo1_amount))
when 13 # LFO2 Amount
mod = @lfo1.process(lfo1_rate)*lfo1_amount
lfo2_amount = Math.min(1,Math.max(0,lfo2_amount+mod))
when 14 # LFO2 rate
mod = @lfo1.process(lfo1_rate)*lfo1_amount
lfo2_rate = Math.min(1,Math.max(0,lfo2_rate+mod))
switch @inputs.lfo2.out
when 0 # Pitch
mod = lfo2_amount
if @inputs.lfo2.audio
mod = 1+mod*mod*@lfo2.process(lfo2_rate)*16
else
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc1_freq *= mod
osc2_freq *= mod
when 1 # Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 3 #Osc1 Pitch
mod = lfo2_amount
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc1_freq *= mod
when 4 #Osc1 Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 5 #Osc1 Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_amp = Math.max(0,osc1_amp+mod)
when 6 #Osc2 Pitch
mod = lfo2_amount
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc2_freq *= mod
when 7 #Osc2 Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 8 #Osc2 Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc2_amp = Math.max(0,osc2_amp+mod)
when 9 # Noise amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
noise_amp = Math.max(0,noise_amp+mod)
when 10 # Noise color
mod = @lfo2.process(lfo2_rate)*lfo2_amount
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 11
cutoff += @lfo2.process(lfo2_rate)*lfo2_amount
when 12
q = Math.max(0,Math.min(1,@lfo2.process(lfo2_rate)*lfo2_amount))
switch @inputs.combine
when 1
s1 = @osc1.process(osc1_freq,osc1_mod)*osc1_amp
s2 = @osc2.process(osc2_freq,osc2_mod)*osc2_amp
sig = (s1+s2)*(s1+s2)
#sig = @osc1.process(osc1_freq,osc1_mod)*@osc2.process(osc2_freq,osc2_mod)*Math.max(osc1_amp,osc2_amp)*4
when 2
sig = @osc2.process(osc2_freq*Math.max(0,1-@osc1.process(osc1_freq,osc1_mod)*osc1_amp),osc2_mod)*osc2_amp
else
sig = @osc1.process(osc1_freq,osc1_mod)*osc1_amp+@osc2.process(osc2_freq,osc2_mod)*osc2_amp
if noise_amp>0
sig += @noise.process(noise_mod)*noise_amp
mod = @env2.process(@on)
if not @cutoff
@cutoff = cutoff
else
if @cutoff<cutoff
@cutoff += Math.min(cutoff-@cutoff,@filter_increment)
else if @cutoff>cutoff
@cutoff += Math.max(cutoff-@cutoff,-@filter_increment)
cutoff = @cutoff
# VCF
cutoff = Math.pow(2,Math.max(0,Math.min(cutoff,1))*10)*22000/1024
sig *= @env1.process(@on)
#@cutoff += 0.001*(cutoff-@cutoff)
sig = @filter.process(sig,cutoff,q*q*9.5+.5)
class Instrument
constructor:(@engine)->
@layers = []
@layers.push new Layer @
@modulation = 0
@pitch_bend = .5
noteOn:(key,velocity)->
for l in @layers
l.noteOn(key,velocity)
return
noteOff:(key)->
for l in @layers
l.noteOff(key)
return
setModulation:(@modulation)->
setPitchBend:(@pitch_bend)->
process:(length)->
if false #@layers.length == 1
@layers[0].process(length)
@output = @layers[0].output
else
if not @output? or @output[0].length<length
@output = [new Float64Array(length),new Float64Array(length)]
for l in @layers
l.process(length)
for i in [0..length-1] by 1
left = 0
right = 0
for l in @layers
left += l.output[0][i]
right += l.output[1][i]
# res = @fx1.process(sig)
@output[0][i] = left
@output[1][i] = right
return
FX1 = [
Distortion
BitCrusher
Chorus
Flanger
Phaser
Delay
]
FX2 = [
Delay
Reverb
Chorus
Flanger
Phaser
]
class Layer
constructor:(@instrument)->
@engine = @instrument.engine
@voices = []
@eq = new EQ(@engine)
@spatializer = new Spatializer(@engine)
@inputs =
osc1:
type: 0
tune: .5
coarse: .5
amp: .5
mod: 0
osc2:
type: 0
tune: .5
coarse: .5
amp: .5
mod: 0
combine: 0
noise:
amp: 0
mod: 0
filter:
cutoff: 1
resonance: 0
type: 0
slope: 1
follow: 0
disto:
wet:0
drive:0
bitcrusher:
wet: 0
drive: 0
crush: 0
env1:
a: 0
d: 0
s: 1
r: 0
env2:
a: .1
d: .1
s: .5
r: .1
out: 0
amount: .5
lfo1:
type: 0
amount: 0
rate: .5
out: 0
lfo2:
type: 0
amount: 0
rate: .5
out: 0
fx1:
type: -1
amount: 0
rate: 0
fx2:
type: -1
amount: 0
rate: 0
eq:
low: .5
mid: .5
high: .5
spatialize: .5
pan: .5
polyphony: 1
glide: .5
sync: 1
velocity:
out: 0
amount: .5
amp: .5
modulation:
out: 0
amount: .5
noteOn:(key,velocity)->
if @inputs.polyphony == 1 and @last_voice? and @last_voice.on
voice = @last_voice
voice.noteOn @,key,velocity,true
@voices.push voice
else
voice = @engine.getVoice()
voice.noteOn @,key,velocity
@voices.push voice
@last_voice = voice
@last_key = key
removeVoice:(voice)->
index = @voices.indexOf(voice)
if index>=0
@voices.splice index,1
noteOff:(key)->
for v in @voices
if v.key == key
v.noteOff()
return
update:()->
if @inputs.fx1.type>=0
if not @fx1? or @fx1 not instanceof FX1[@inputs.fx1.type]
@fx1 = new FX1[@inputs.fx1.type] @engine
else
@fx1 = null
if @inputs.fx2.type>=0
if not @fx2? or @fx2 not instanceof FX2[@inputs.fx2.type]
@fx2 = new FX2[@inputs.fx2.type] @engine
else
@fx2 = null
if @fx1?
@fx1.update(@inputs.fx1)
if @fx2?
@fx2.update(@inputs.fx2)
@eq.update(@inputs.eq)
process:(length)->
if not @output? or @output[0].length<length
@output = [new Float64Array(length),new Float64Array(length)]
for i in [@voices.length-1..0] by -1
v = @voices[i]
if not v.on and v.env1.sig<.00001
v.env1.sig = 0
@removeVoice v
for i in [0..length-1] by 1
sig = 0
for v in @voices
sig += v.process()
@output[0][i] = sig
@output[1][i] = sig
@spatializer.process(@output,length,@inputs.spatialize,@inputs.pan)
if @fx1?
@fx1.process(@output,length)
if @fx2?
@fx2.process(@output,length)
@eq.process(@output,length)
return
class AudioEngine
constructor:(@sampleRate)->
@voices = []
@voice_index = 0
@num_voices = 8
for i in [0..@num_voices-1]
@voices[i] = new Voice @
@instruments = []
@instruments.push new Instrument @
@avg = 0
@samples = 0
@time = 0
@layer =
inputs: @inputs
@start = Date.now()
event:(data)->
if data[0] == 144 and data[2]>0
@instruments[0].noteOn(data[1],data[2])
else if data[0] == 128 or (data[0] == 144 and data[2] == 0)
@instruments[0].noteOff(data[1])
else if data[0]>=176 and data[0]<192 and data[1] == 1 # modulation wheel
@instruments[0].setModulation(data[2]/127)
else if data[0]>=224 and data[0]<240 # pitch bend
v = (data[1]+128*data[2])
console.info "PB value=#{v}"
if v>=8192
v = (v-8192)/(16383-8192)
v = .5+.5*v
else
v = .5*v/8192
console.info("Pitch Bend = #{v}")
@instruments[0].setPitchBend(v)
return
getTime:()->
(Date.now()-@start)/1000
getVoice:()->
best = @voices[0]
for i in [1..@voices.length-1]
v = @voices[i]
if best.on
if v.on
if v.noteon_time<best.noteon_time
best = v
else
best = v
else
if not v.on and v.env1.sig<best.env1.sig
best = v
return best
updateVoices:()->
for v in @voices
v.update()
for l in @instruments[0].layers
l.update()
return
process:(inputs,outputs,parameters)->
output = outputs[0]
time = Date.now()
res = [0,0]
for inst in @instruments
inst.process(output[0].length)
for channel,i in output
if i<2
for j in [0..channel.length-1] by 1
sig = 0
for inst in @instruments
sig += inst.output[i][j]
sig *= .125
sig = if sig<0 then -(1-Math.exp(sig)) else 1-Math.exp(-sig)
channel[j] = sig
@time += Date.now()-time
@samples += channel.length
if @samples >= @sampleRate
@samples -= @sampleRate
console.info @time+" ms ; buffer size = "+channel.length
@time = 0
return
class Blip
constructor:()->
@size = 512
@samples = new Float64Array(@size+1)
for p in [1..31] by 2
for i in [0..@size] by 1
x = (i/@size-.5)*.5
@samples[i] += Math.sin(x*2*Math.PI*p)/p
norm = @samples[@size]
for i in [0..@size] by 1
@samples[i] /= norm
`
class MyWorkletProcessor extends AudioWorkletProcessor {
constructor() {
super()
this.synth = new AudioEngine(sampleRate)
this.port.onmessage = (e) => {
console.info(e)
var data = JSON.parse(e.data)
if (data.name == "note")
{
this.synth.event(data.data)
}
else if (data.name == "param")
{
var value = data.value
var s = data.id.split(".")
data = this.synth.instruments[0].layers[0].inputs
while (s.length>1)
{
data = data[s.splice(0,1)[0]]
}
data[s[0]] = value
this.synth.updateVoices()
}
}
}
process(inputs, outputs, parameters) {
this.synth.process(inputs,outputs,parameters)
return true
}
}
registerProcessor('my-worklet-processor', MyWorkletProcessor)
`
| true | `
const TWOPI = 2*Math.PI
const SIN_TABLE = new Float64Array(10001)
const WHITE_NOISE = new Float64Array(100000)
const COLORED_NOISE = new Float64Array(100000)
`
do ->
for i in [0..10000] by 1
SIN_TABLE[i] = Math.sin(i/10000*Math.PI*2)
`
const BLIP_SIZE = 512
const BLIP = new Float64Array(BLIP_SIZE+1)
`
do ->
for p in [1..31] by 2
for i in [0..BLIP_SIZE] by 1
x = (i/BLIP_SIZE-.5)*.5
BLIP[i] += Math.sin(x*2*Math.PI*p)/p
norm = BLIP[BLIP_SIZE]
for i in [0..BLIP_SIZE] by 1
BLIP[i] /= norm
do ->
n = 0
b0 = b1 = b2 = b3 = b4 = b5 = b6 = 0
for i in [0..99999] by 1
white = Math.random()*2-1
n = .99*n+.01*white
pink = n*6
WHITE_NOISE[i] = white
COLORED_NOISE[i] = pink
DBSCALE = (value,range)-> (Math.exp(value*range)/Math.exp(range)-1/Math.exp(range))/(1-1/Math.exp(range))
class SquareOscillator
constructor:(@voice,osc)->
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@buffer = new Float64Array(32)
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then 0 else Math.random()
@sig = -1
@index = 0
@update(osc)
for i in [0..@buffer.length-1]
@buffer[i] = 0
return
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@analog_tune = @tune
process:(freq,mod)->
dp = @analog_tune*freq*@invSampleRate
@phase += dp
m = .5-mod*.49
avg = 1-2*m
if @sig<0
if @phase>=m
@sig = 1
dp = Math.max(0,Math.min(1,(@phase-m)/dp))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += -1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
else
if @phase>=1
dp = Math.max(0,Math.min(1,(@phase-1)/dp))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
@sig = -1
@phase -= 1
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += 1-BLIP[dpi]*(1-a)-BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
sig = @sig+@buffer[@index]
@buffer[@index] = 0
@index = (@index+1)%@buffer.length
sig-avg
class SawOscillator
constructor:(@voice,osc)->
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@buffer = new Float64Array(32)
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then 0 else Math.random()
@sig = -1
@index = 0
@jumped = false
@update(osc)
for i in [0..@buffer.length-1]
@buffer[i] = 0
return
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@analog_tune = @tune
process:(freq,mod)->
dphase = @analog_tune*freq*@invSampleRate
@phase += dphase
#if @phase>=1
# @phase -= 1
#return 1-2*@phase
slope = 1+mod
if not @jumped
sig = 1-2*@phase*slope
if @phase>=.5
@jumped = true
sig = mod-2*(@phase-.5)*slope
dp = Math.max(0,Math.min(1,(@phase-.5)/dphase))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
if mod>0
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += (-1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a)*mod
dpi += 16
index = (index+1)%@buffer.length
else
sig = mod-2*(@phase-.5)*slope
if @phase>=1
@jumped = false
dp = Math.max(0,Math.min(1,(@phase-1)/dphase))
dp *= 16
dpi = Math.floor(dp)
a = dp-dpi
index = @index
@phase -= 1
sig = 1-2*@phase*slope
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
for i in [0..31] by 1
break if dpi>=512
@buffer[index] += -1+BLIP[dpi]*(1-a)+BLIP[dpi+1]*a
dpi += 16
index = (index+1)%@buffer.length
sig += @buffer[@index]
@buffer[@index] = 0
@index = (@index+1)%@buffer.length
offset = 16*2*dphase*slope
sig+offset
class SineOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@maxRatio = @sampleRate/Math.PI/5/(2*Math.PI)
@tune = 1
@init(osc) if osc?
init:(osc,@sync)->
@phase = if @sync then .25 else Math.random()
@update(osc)
update:(osc,@sync)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@modnorm = 25/Math.sqrt(@tune*@voice.freq)
@dphase = @tune*@invSampleRate
sinWave:(x)->
x = (x-Math.floor(x))*10000
ix = Math.floor(x)
ax = x-ix
SIN_TABLE[ix]*(1-ax)+SIN_TABLE[ix+1]*ax
sinWave2:(x)->
x = 2*(x-Math.floor(x))
if x>1
x = 2-x
x*x*(3-2*x)*2-1
process:(freq,mod)->
@phase = (@phase+freq*@dphase)
if @phase>=1
@phase -= 1
@analog_tune = if @sync then @tune else @tune*(1+(Math.random()-.5)*.002)
@dphase = @analog_tune*@invSampleRate
m1 = mod*@modnorm
m2 = mod*m1
p = @phase
return @sinWave2(p+m1*@sinWave2(p+m2*@sinWave2(p)))
class VoiceOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@tune = 1
@init(osc) if osc?
@f1 = [320,500,700,1000,500,320,700,500,320,320]
@f2 = [800,1000,1150,1400,1500,1650,1800,2300,3200,3200]
init:(osc)->
@phase = 0
@grain1_p1 = .25
@grain1_p2 = .25
@grain2_p1 = .25
@grain2_p2 = .25
@update(osc)
update:(osc)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
@modnorm = 25/Math.sqrt(@tune*@voice.freq)
@dphase = @tune*@invSampleRate
sinWave2:(x)->
x = 2*(x-Math.floor(x))
if x>1
x = 2-x
x*x*(3-2*x)*2-1
process:(freq,mod)->
p1 = @phase<1
@phase = (@phase+freq*@dphase)
m = mod*(@f1.length-2)
im = Math.floor(m)
am = m-im
f1 = @f1[im]*(1-am)+@f1[im+1]*am
f2 = @f2[im]*(1-am)+@f2[im+1]*am
if p1 and @phase>=1
@grain2_p1 = .25
@grain2_p2 = .25
if @phase>=2
@phase -= 2
@grain1_p1 = .25
@grain1_p2 = .25
x = @phase-1
x *= x*x
vol = 1-Math.abs(1-@phase)
#vol = (@sinWave2(x*.5+.5)+1)*.5
sig = vol*(@sinWave2(@grain1_p1)*.25+.125*@sinWave2(@grain1_p2))
@grain1_p1 += f1*@invSampleRate
@grain1_p2 += f2*@invSampleRate
x = ((@phase+1)%2)-1
x *= x*x
#vol = (@sinWave2(x*.5+.5)+1)*.5
vol = if @phase<1 then 1-@phase else @phase-1
sig += vol*(@sinWave2(@grain2_p1)*.25+.125*@sinWave2(@grain2_p2))
sig += @sinWave2(@phase+.25)
@grain2_p1 += f1*@invSampleRate
@grain2_p2 += f2*@invSampleRate
sig
class StringOscillator
constructor:(@voice,osc)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@maxRatio = @sampleRate/Math.PI/5/(2*Math.PI)
@tune = 1
@init(osc) if osc?
init:(osc)->
@index = 0
@buffer = new Float64Array(@sampleRate/10)
@update(osc)
@prev = 0
@power = 1
update:(osc)->
c = Math.round(osc.coarse*48)/48
fine = osc.tune*2-1
fine = fine*(1+fine*fine)*.5
@tune = 1*Math.pow(2,c*4)*.25*Math.pow(Math.pow(2,1/12),fine)
process:(freq,mod)->
period = @sampleRate/(freq*@tune)
x = (@index-period+@buffer.length)%@buffer.length
ix = Math.floor(x)
a = x-ix
reflection = @buffer[ix]*(1-a)+@buffer[(ix+1)%@buffer.length]*a
m = Math.exp(Math.log(0.99)/freq)
m = 1
r = reflection*m+@prev*(1-m)
@prev = r
n = Math.random()*2-1
m = mod*.5
m = m*m*.999+.001
sig = n*m+r
@power = Math.max(Math.abs(sig)*.1+@power*.9,@power*.999)
sig /= (@power+.0001)
@buffer[@index] = sig
@index += 1
@index = 0 if @index>=@buffer.length
sig
processOld:(freq,mod)->
period = @sampleRate/(freq*@tune)
x = (@index-period+@buffer.length)%@buffer.length
ix = Math.floor(x)
a = x-ix
reflection = @buffer[ix]*(1-a)+@buffer[(ix+1)%@buffer.length]*a
#m = .1+Math.exp(-period*mod*.01)*.89
m = mod
r = reflection*m+@prev*(1-m)
@prev = r
sig = (Math.random()*2-1)*.01+r*.99
@buffer[@index] = sig/@power
@power = Math.abs(sig)*.001+@power*.999
@index += 1
@index = 0 if @index>=@buffer.length
sig
class Noise
constructor:(@voice)->
init:()->
@phase = 0
@seed = 1382
@n = 0
process:(mod)->
@seed = (@seed*13907+12345)&0x7FFFFFFF
white = (@seed/0x80000000)*2-1
@n = @n*.99+white*.01
pink = @n*6
white*(1-mod)+pink*mod
class ModEnvelope
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
init:(@params)->
@phase = 0
@update()
update:(a = @params.a)->
@a = 1/(@sampleRate*20*Math.pow(a,3)+1)
@d = 1/(@sampleRate*20*Math.pow(@params.d,3)+1)
@s = @params.s
@r = 1/(@sampleRate*20*Math.pow(@params.r,3)+1)
process:(noteon)->
if @phase<1
sig = @sig = @phase = Math.min(1,@phase+@a)
else if @phase<2
@phase = Math.min(2,@phase+@d)
sig = @sig = 1-(@phase-1)*(1-@s)
else if @phase<3
sig = @sig = @s
else
@phase = @phase+@r
sig = Math.max(0,@sig*(1-(@phase-3)))
if @phase<3 and not noteon
@phase = 3
sig
class AmpEnvelope
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
@sig = 0
init:(@params)->
@phase = 0
@sig = 0
@update()
update:(a = @params.a)->
#@a = Math.exp(Math.log(1/@epsilon)/(@sampleRate*10*Math.pow(@params.a,3)+1))
@a2 = 1/(@sampleRate*(20*Math.pow(a,3)+.00025))
@d = Math.exp(Math.log(0.5)/(@sampleRate*(10*Math.pow(@params.d,3)+0.001)))
@s = DBSCALE(@params.s,2)
console.info("sustain #{@s}")
@r = Math.exp(Math.log(0.5)/(@sampleRate*(10*Math.pow(@params.r,3)+0.001)))
process:(noteon)->
if @phase<1
@phase += @a2
sig = @sig = (@phase*.75+.25)*@phase
if @phase>=1
sig = @sig = 1
@phase = 1
else if @phase==1
sig = @sig = @sig*@d
if sig <= @s
sig = @sig = @s
@phase = 2
else if @phase<3
sig = @sig = @s
else
sig = @sig = @sig*@r
if @phase<3 and not noteon
@phase = 3
sig
class LFO
constructor:(@voice)->
@invSampleRate = 1/@voice.engine.sampleRate
init:(@params,sync)->
if sync
rate = @params.rate
rate = .1+rate*rate*rate*(100-.1)
t = @voice.engine.getTime()*rate
@phase = t%1
else
@phase = Math.random()
@process = @processSine
@update()
@r1 = Math.random()*2-1
@r2 = Math.random()*2-1
@out = 0
update:()->
switch @params.type
when 0 then @process = @processSaw
when 1 then @process = @processSquare
when 2 then @process = @processSine
when 3 then @process = @processTriangle
when 4 then @process = @processRandom
when 5 then @process = @processRandomStep
@audio_freq = 440*Math.pow(Math.pow(2,1/12),@voice.key-57)
processSine:(rate)->
if @params.audio
r = if rate<.5 then .25+rate*rate/.25*.75 else rate*rate*4
#r = Math.pow(2,(Math.round(rate*48))/12)*.25
rate = @audio_freq*r
else
rate = .01+rate*rate*20
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
p = @phase*2
if p<1
p*p*(3-2*p)*2-1
else
p -= 1
1-p*p*(3-2*p)*2
processTriangle:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
return (1-4*Math.abs(@phase-.5))
processSaw:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
out = (1-@phase*2)
@out = @out*.97+out*.03
processSquare:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
@phase -= 1 if @phase>=1
out = if @phase<.5 then 1 else -1
@out = @out*.97+out*.03
processRandom:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
if @phase>=1
@phase -= 1
@r1 = @r2
@r2 = Math.random()*2-1
@r1*(1-@phase)+@r2*@phase
processRandomStep:(rate)->
rate *= rate
rate *= rate
rate = .05+rate*rate*10000
@phase = (@phase+rate*@invSampleRate)
if @phase>=1
@phase -= 1
@r1 = Math.random()*2-1
@out = @out*.97+@r1*.03
processDiscreteRandom:()->
processSmoothRandom:()->
class Filter
constructor:(@voice)->
@sampleRate = @voice.engine.sampleRate
@invSampleRate = 1/@voice.engine.sampleRate
@halfSampleRate = @sampleRate*.5
init:(@layer)->
@fm00 = 0
@fm01 = 0
@fm10 = 0
@fm11 = 0
@update()
update:()->
switch @layer.inputs.filter.type
when 0
@process = @processLowPass
when 1
@process = @processBandPass
when 2
@process = @processHighPass
processHighPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
onePlusCosw0 = 1 + cosw0
b0 = onePlusCosw0*.5* invOnePlusAlpha
b1 = -onePlusCosw0* invOnePlusAlpha
b2 = b0
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
processBandPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
b0 = q * alpha * invOnePlusAlpha
b1 = 0
b2 = -b0
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
processLowPass:(sig,cutoff,q)->
w0 = Math.max(0,Math.min(@halfSampleRate,cutoff))*@invSampleRate
w0 *= 10000
iw0 = Math.floor(w0)
aw0 = w0-iw0
cosw0 = (1-aw0)*SIN_TABLE[iw0+2500]+aw0*SIN_TABLE[iw0+2501]
sinw0 = (1-aw0)*SIN_TABLE[iw0]+aw0*SIN_TABLE[iw0+1]
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
b1 = oneLessCosw0 * invOnePlusAlpha
b0 = b1*.5
b2 = b0
a0 = (-2 * cosw0) * invOnePlusAlpha
a1 = (1 - alpha) * invOnePlusAlpha
w = sig - a0*@fm00 - a1*@fm01
sig = b0*w + b1*@fm00 + b2*@fm01
@fm01 = @fm00
@fm00 = w
if @layer.inputs.filter.slope
w = sig - a0*@fm10 - a1*@fm11
sig = b0*w + b1*@fm10 + b2*@fm11
@fm11 = @fm10
@fm10 = w
sig
class Distortion
constructor:()->
@amount = .5
@rate = .5
update:(data)->
@amount = data.amount
@rate = data.rate
process:(buffer,length)->
for i in [0..length-1] by 1
sig = buffer[0][i]
s = sig*(1+@rate*@rate*99)
disto = if s<0 then -1+Math.exp(s) else 1-Math.exp(-s)
buffer[0][i] = (1-@amount)*sig+@amount*disto
sig = buffer[1][i]
s = sig*(1+@rate*@rate*99)
disto = if s<0 then -1+Math.exp(s) else 1-Math.exp(-s)
buffer[1][i] = (1-@amount)*sig+@amount*disto
return
class BitCrusher
constructor:()->
@phase = 0
@left = 0
@right = 0
update:(data)->
@amount = Math.pow(2,data.amount*8)
@rate = Math.pow(2,(1-data.rate)*16)*2
process:(buffer,length)->
r = 1-@rate
crush = 1+15*r*r
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase += 1
if @phase>@amount
@phase -= @amount
@left = if left>0 then Math.ceil(left*@rate)/@rate else Math.floor(left*@rate)/@rate
@right = if right>0 then Math.ceil(right*@rate)/@rate else Math.floor(right*@rate)/@rate
buffer[0][i] = @left
buffer[1][i] = @right
return
class Chorus
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = Math.random()
@phase2 = Math.random()
@phase3 = Math.random()
@f1 = 1.031/@sampleRate
@f2 = 1.2713/@sampleRate
@f3 = 0.9317/@sampleRate
@index = 0
update:(data)->
@amount = Math.pow(data.amount,.5)
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = @left_buffer[@index] = buffer[0][i]
right = @right_buffer[@index] = buffer[1][i]
@phase1 += @f1*(.5+.5*@rate)
@phase2 += @f2*(.5+.5*@rate)
@phase3 += @f3*(.5+.5*@rate)
p1 = (1+Math.sin(@phase1*Math.PI*2))*@left_buffer.length*.002
p2 = (1+Math.sin(@phase2*Math.PI*2))*@left_buffer.length*.002
p3 = (1+Math.sin(@phase3*Math.PI*2))*@left_buffer.length*.002
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@phase3 -= 1 if @phase3>=1
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
s3 = @read(@right_buffer,@index-p3)
pleft = @amount*(s1*.2+s2*.7+s3*.1)
pright = @amount*(s1*.6+s2*.2+s3*.2)
left += pleft
right += pright
@left_buffer[@index] += pleft*.5*@amount
@right_buffer[@index] += pleft*.5*@amount
@index += 1
@index = 0 if @index>=@left_buffer.length
buffer[0][i] = left
buffer[1][i] = right
return
class Phaser
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = Math.random()
@phase2 = Math.random()
@f1 = .0573/@sampleRate
@f2 = .0497/@sampleRate
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase1 += @f1*(.5+.5*@rate)
@phase2 += @f2*(.5+.5*@rate)
o1 = (1+Math.sin(@phase1*Math.PI*2))/2
p1 = @sampleRate*(.0001+.05*o1)
o2 = (1+Math.sin(@phase2*Math.PI*2))/2
p2 = @sampleRate*(.0001+.05*o2)
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@left_buffer[@index] = left
@right_buffer[@index] = right
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
@left_buffer[@index] += s1*@rate*.9
@right_buffer[@index] += s2*@rate*.9
buffer[0][i] = s1*@amount-left
buffer[1][i] = s2*@amount-right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Flanger
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate)
@right_buffer = new Float64Array(@sampleRate)
@phase1 = 0 #Math.random()
@phase2 = 0 #Math.random()
@f1 = .0573/@sampleRate
@f2 = .0497/@sampleRate
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
read:(buffer,pos)->
pos += buffer.length if pos<0
i = Math.floor(pos)
a = pos-i
buffer[i]*(1-a)+buffer[(i+1)%buffer.length]*a
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
@phase1 += @f1
@phase2 += @f2
o1 = (1+Math.sin(@phase1*Math.PI*2))/2
p1 = @sampleRate*(.0001+.05*o1)
o2 = (1+Math.sin(@phase2*Math.PI*2))/2
p2 = @sampleRate*(.0001+.05*o2)
@phase1 -= 1 if @phase1>=1
@phase2 -= 1 if @phase2>=1
@left_buffer[@index] = left #+s1*.3
@right_buffer[@index] = right #+s2*.3
s1 = @read(@left_buffer,@index-p1)
s2 = @read(@right_buffer,@index-p2)
@left_buffer[@index] += s1*@rate*.9
@right_buffer[@index] += s2*@rate*.9
buffer[0][i] = s1*@amount+left
buffer[1][i] = s2*@amount+right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Delay
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate*3)
@right_buffer = new Float64Array(@sampleRate*3)
@index = 0
update:(data)->
@amount = data.amount
@rate = data.rate
tempo = (30+Math.pow(@rate,2)*170*4)*4
tick = @sampleRate/(tempo/60)
@L = Math.round(tick*4)
@R = Math.round(tick*4+@sampleRate*0.00075)
@fb = @amount*.95
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
left += @right_buffer[(@index+@left_buffer.length-@L)%@left_buffer.length]*@fb
right += @left_buffer[(@index+@right_buffer.length-@R)%@right_buffer.length]*@fb
buffer[0][i] = @left_buffer[@index] = left
buffer[1][i] = @right_buffer[@index] = right
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class Spatializer
constructor:(@engine)->
@sampleRate = @engine.sampleRate
@left_buffer = new Float64Array(@sampleRate/10)
@right_buffer = new Float64Array(@sampleRate/10)
@index = 0
@left_delay1 = 0
@left_delay2 = 0
@right_delay1 = 0
@right_delay2 = 0
@left = 0
@right = 0
@left_delay1 = Math.round(@sampleRate*9.7913/340)
@right_delay1 = Math.round(@sampleRate*11.1379/340)
@left_delay2 = Math.round(@sampleRate*11.3179/340)
@right_delay2 = Math.round(@sampleRate*12.7913/340)
process:(buffer,length,spatialize,pan)->
mnt = spatialize
mnt2 = mnt*mnt
left_pan = Math.cos(pan*Math.PI/2)/(1+spatialize)
right_pan = Math.sin(pan*Math.PI/2)/(1+spatialize)
left_buffer = buffer[0]
right_buffer = buffer[1]
for i in [0..length-1] by 1
left = left_buffer[i]
right = right_buffer[i]
@left = left*.5+@left*.5
@right = right*.5+@right*.5
@left_buffer[@index] = @left
@right_buffer[@index] = @right
left_buffer[i] = (left-mnt*@right_buffer[(@index+@right_buffer.length-@left_delay1)%@right_buffer.length])*left_pan
right_buffer[i] = (right-mnt*@left_buffer[(@index+@right_buffer.length-@right_delay1)%@right_buffer.length])*right_pan
@index += 1
@index = 0 if @index>=@left_buffer.length
return
class EQ
constructor:(@engine)->
@sampleRate = @engine.sampleRate
# band pass for medium freqs
@mid = 900
q = .5
w0 = 2*Math.PI*@mid/@sampleRate
cosw0 = Math.cos(w0)
sinw0 = Math.sin(w0)
alpha = sinw0 / (2 * q)
invOnePlusAlpha = 1/(1 + alpha)
oneLessCosw0 = 1 - cosw0
@a0 = (-2 * cosw0) * invOnePlusAlpha
@a1 = (1 - alpha) * invOnePlusAlpha
@b0 = q * alpha * invOnePlusAlpha
@b1 = 0
@b2 = -@b0
@llow = 0
@rlow = 0
@lfm0 = 0
@lfm1 = 0
@rfm0 = 0
@rfm1 = 0
@low = 1
@mid = 1
@high = 1
update:(data)->
@low = data.low*2
@mid = data.mid*2
@high = data.high*2
processBandPass:(sig,cutoff,q)->
w = sig - a0*@fm0 - a1*@fm1
sig = b0*w + b1*@fm0 + b2*@fm1
@fm1 = @fm0
@fm0 = w
sig
process:(buffer,length)->
for i in [0..length-1] by 1
left = buffer[0][i]
right = buffer[1][i]
lw = left - @a0*@lfm0 - @a1*@lfm1
lmid = @b0*lw + @b1*@lfm0 + @b2*@lfm1
@lfm1 = @lfm0
@lfm0 = lw
left -= lmid
@llow = left*.1+@llow*.9
lhigh = left-@llow
buffer[0][i] = @llow*@low+lmid*@mid+lhigh*@high
rw = right - @a0*@rfm0 - @a1*@rfm1
rmid = @b0*rw + @b1*@rfm0 + @b2*@rfm1
@rfm1 = @rfm0
@rfm0 = rw
right -= rmid
@rlow = right*.1+@rlow*.9
rhigh = right-@rlow
buffer[1][i] = @rlow*@low+rmid*@mid+rhigh*@high
return
class CombFilter
constructor:(@length,@feedback = .5)->
@buffer = new Float64Array(@length)
@index = 0
@store = 0
@damp = .2
process:(input)->
output = @buffer[@index]
@store = output*(1-@damp)+@store*@damp
@buffer[@index++] = input+@store*@feedback
if @index>=@length
@index = 0
output
class AllpassFilter
constructor:(@length,@feedback = .5)->
@buffer = new Float64Array(@length)
@index = 0
process:(input)->
bufout = @buffer[@index]
output = -input+bufout
@buffer[@index++] = input+bufout*@feedback
if @index>=@length
@index = 0
output
class Reverb
constructor:(@engine)->
@sampleRate = @engine.sampleRate
combtuning = [1116,1188,1277,1356,1422,1491,1557,1617]
allpasstuning = [556,441,341,225]
stereospread = 23
@left_combs = []
@right_combs = []
@left_allpass = []
@right_allpass = []
@res = [0,0]
@spread = .25
@wet = .025
for c in combtuning
@left_combs.push new CombFilter c,.9
@right_combs.push new CombFilter c+stereospread,.9
for a in allpasstuning
@left_allpass.push new AllpassFilter a,.5
@right_allpass.push new AllpassFilter a+stereospread,.5
#@reflections = []
#@PI:EMAIL:<EMAIL>END_PI.push new Reflection 7830,.1,.5,1
#PI:EMAIL:<EMAIL>END_PI new Reflection 10670,-.2,.6,.9
#@PI:EMAIL:<EMAIL>END_PI.push new Reflection 13630,.7,.7,.8
#@reflectionPI:EMAIL:<EMAIL>END_PI.push new Reflection 21870,-.6,.8,.7
#@reflections.push new Reflection 35810,-.1,.9,.6
update:(data)->
@wet = data.amount*.05
feedback = .7+Math.pow(data.rate,.25)*.29
damp = .2 #.4-.4*data.rate
for i in [0..@left_combs.length-1] by 1
@left_combs[i].feedback = feedback
@right_combs[i].feedback = feedback
@left_combs[i].damp = damp
@right_combs[i].damp = damp
@spread = .5-data.rate*.5
process:(buffer,length)->
for s in [0..length-1] by 1
outL = 0
outR = 0
left = buffer[0][s]
right = buffer[1][s]
input = (left+right)*.5
for i in [0..@left_combs.length-1]
outL += @left_combs[i].process(input)
outR += @right_combs[i].process(input)
for i in [0..@left_allpass.length-1]
outL = @left_allpass[i].process(outL)
outR = @right_allpass[i].process(outR)
#for i in [0..@reflections.length-1]
# r = @reflections[i].process(input)
# outL += r[0]
# outR += r[1]
buffer[0][s] = (outL*(1-@spread)+outR*@spread)*@wet+left*(1-@wet)
buffer[1][s] = (outR*(1-@spread)+outL*@spread)*@wet+right*(1-@wet)
return
class Voice
@oscillators = [SawOscillator,SquareOscillator,SineOscillator,VoiceOscillator,StringOscillator]
constructor:(@engine)->
@osc1 = new SineOscillator @
@osc2 = new SineOscillator @
@noise = new Noise @
@lfo1 = new LFO @
@lfo2 = new LFO @
@filter = new Filter @
@env1 = new AmpEnvelope @
@env2 = new ModEnvelope @
@filter_increment = 0.0005*44100/@engine.sampleRate
@modulation = 0
@noteon_time = 0
init:(layer)->
if @osc1 not instanceof Voice.oscillators[layer.inputs.osc1.type]
@osc1 = new Voice.oscillators[layer.inputs.osc1.type] @
if @osc2 not instanceof Voice.oscillators[layer.inputs.osc2.type]
@osc2 = new Voice.oscillators[layer.inputs.osc2.type] @
@osc1.init(layer.inputs.osc1,layer.inputs.sync)
@osc2.init(layer.inputs.osc2,layer.inputs.sync)
@noise.init(layer,layer.inputs.sync)
@lfo1.init(layer.inputs.lfo1,layer.inputs.sync)
@lfo2.init(layer.inputs.lfo2,layer.inputs.sync)
@filter.init(layer)
@env1.init(layer.inputs.env1)
@env2.init(layer.inputs.env2)
@updateConstantMods()
update:()->
return if not @layer?
if @osc1 not instanceof Voice.oscillators[@layer.inputs.osc1.type]
@osc1 = new Voice.oscillators[@layer.inputs.osc1.type] @,@layer.inputs.osc1
if @osc2 not instanceof Voice.oscillators[@layer.inputs.osc2.type]
@osc2 = new Voice.oscillators[@layer.inputs.osc2.type] @,@layer.inputs.osc2
@osc1.update(@layer.inputs.osc1,@layer.inputs.sync)
@osc2.update(@layer.inputs.osc2,@layer.inputs.sync)
@env1.update()
@env2.update()
@lfo1.update()
@lfo2.update()
@filter.update()
@updateConstantMods()
updateConstantMods:()->
@osc1_amp = DBSCALE(@inputs.osc1.amp,3)
@osc2_amp = DBSCALE(@inputs.osc2.amp,3)
@osc1_mod = @inputs.osc1.mod
@osc2_mod = @inputs.osc2.mod
@noise_amp = DBSCALE(@inputs.noise.amp,3)
@noise_mod = @inputs.noise.mod
if @inputs.velocity.amp>0
p = @inputs.velocity.amp
p *= p
p *= 4
#amp = Math.pow(@velocity,p) #Math.exp((-1+@velocity)*5)
norm = @inputs.velocity.amp*4
amp = Math.exp(@velocity*norm)/Math.exp(norm)
#amp = @inputs.velocity.amp*amp+(1-@inputs.velocity.amp)
else
amp = 1
@osc1_amp *= amp
@osc2_amp *= amp
@noise_amp *= amp
c = Math.log(1024*@freq/22000)/(10*Math.log(2))
@cutoff_keymod = (c-.5)*@inputs.filter.follow
@cutoff_base = @inputs.filter.cutoff+@cutoff_keymod
@env2_amount = @inputs.env2.amount
@lfo1_rate = @inputs.lfo1.rate
@lfo1_amount = @inputs.lfo1.amount
@lfo2_rate = @inputs.lfo2.rate
@lfo2_amount = @inputs.lfo2.amount
# "Mod"
#
# "Filter Cutoff"
# "Filter Resonance"
#
# "Osc1 Mod"
# "Osc1 Amp"
#
# "Osc2 Mod"
# "Osc2 Amp"
#
# "Noise Amp"
# "Noise Color"
#
# "Env1 Attack"
# "Env2 Attack"
# "Env2 Amount"
#
# "LFO1 Amount"
# "LFO1 Rate"
#
# "LFO2 Amount"
# "LFO2 Rate"
# ]
switch @inputs.velocity.out
when 0 # Oscs Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_mod = Math.min(1,Math.max(0,@osc1_mod+mod))
@osc2_mod = Math.min(1,Math.max(0,@osc2_mod+mod))
when 1 # Filter Cutoff
mod = @velocity*(@inputs.velocity.amount-.5)*2
@cutoff_base += mod
when 3 # Osc1 Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_mod = Math.min(1,Math.max(0,@osc1_mod+mod))
when 4 # Osc1 Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc1_amp = Math.max(0,@osc1_amp+mod)
when 5 # Osc2 Mod
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc2_mod = Math.min(1,Math.max(0,@osc2_mod+mod))
when 6 # Osc2 Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@osc2_amp = Math.max(0,@osc2_amp+mod)
when 7 # Noise Amp
mod = @velocity*(@inputs.velocity.amount-.5)*2
@noise_amp = Math.max(0,@noise_amp+mod)
when 8 # Noise Color
mod = @velocity*(@inputs.velocity.amount-.5)*2
@noise_mod = Math.min(1,Math.max(0,@noise_mod+mod))
when 9 # Env1 Attack
mod = @velocity*(@inputs.velocity.amount-.5)*2
a = Math.max(0,Math.min(@inputs.env1.a+mod,1))
@env1.update(a)
when 10 # Env2 Attack
mod = @velocity*(@inputs.velocity.amount-.5)*2
a = Math.max(0,Math.min(@inputs.env2.a+mod,1))
@env2.update(a)
when 11 # Env2 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@env2_amount = Math.max(0,Math.min(1,@env2_amount+mod))
when 12 # LFO1 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo1_amount = Math.max(0,@lfo1_amount+mod)
when 13 # LFO1 Rate
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo1_rate = Math.min(1,Math.max(0,@lfo1_rate+mod))
when 14 # LFO2 Amount
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo2_amount = Math.max(0,@lfo2_amount+mod)
when 15 # LFO2 Rate
mod = @velocity*(@inputs.velocity.amount-.5)*2
@lfo2_rate = Math.min(1,Math.max(0,@lfo2_rate+mod))
if @freq?
c = Math.log(1024*@freq/22000)/(10*Math.log(2))
@cutoff_keymod = (c-.5)*@inputs.filter.follow
noteOn:(layer,@key,velocity,legato=false)->
@velocity = velocity/127
if @layer?
@layer.removeVoice @
@layer = layer
@inputs = @layer.inputs
if legato and @on
@freq = 440*Math.pow(Math.pow(2,1/12),@key-57)
if layer.last_key?
@glide_from = layer.last_key
@glide = true
@glide_phase = 0
glide_time = (@inputs.glide*.025+Math.pow(@inputs.glide,16)*.975)*10
@glide_inc = 1/(glide_time*@engine.sampleRate+1)
else
@freq = 440*Math.pow(Math.pow(2,1/12),@key-57)
@init @layer
@on = true
@cutoff = 0
@modulation = @layer.instrument.modulation
@pitch_bend = @layer.instrument.pitch_bend
@modulation_v = 0
@pitch_bend_v = 0
@noteon_time = Date.now()
noteOff:()->
@on = false
process:()->
osc1_mod = @osc1_mod
osc2_mod = @osc2_mod
osc1_amp = @osc1_amp
osc2_amp = @osc2_amp
if @glide
k = @glide_from*(1-@glide_phase)+@key*@glide_phase
osc1_freq = osc2_freq = 440*Math.pow(Math.pow(2,1/12),k-57)
@glide_phase += @glide_inc
if @glide_phase>=1
@glide = false
else
osc1_freq = osc2_freq = @freq
if Math.abs(@pitch_bend-@layer.instrument.pitch_bend)>.0001
@pitch_bend_v += .001*(@layer.instrument.pitch_bend-@pitch_bend)
@pitch_bend_v *= .5
@pitch_bend += @pitch_bend_v
if Math.abs(@pitch_bend-.5)>.0001
p = @pitch_bend*2-1
p *= 2
f = Math.pow(Math.pow(2,1/12),p)
osc1_freq *= f
osc2_freq *= f
noise_amp = @noise_amp
noise_mod = @noise_mod
lfo1_rate = @lfo1_rate
lfo1_amount = @lfo1_amount
lfo2_rate = @lfo2_rate
lfo2_amount = @lfo2_amount
cutoff = @cutoff_base
q = @inputs.filter.resonance
if Math.abs(@modulation-@layer.instrument.modulation)>.0001
@modulation_v += .001*(@layer.instrument.modulation-@modulation)
@modulation_v *= .5
@modulation += @modulation_v
switch @inputs.modulation.out
when 0 # Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 1 # Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Osc1 Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_amp = Math.max(0,osc1_amp+mod)
when 3 # Osc1 Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 4 # Osc2 Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc2_amp = Math.max(0,osc2_amp+mod)
when 5 # Osc2 Mod
mod = (@inputs.modulation.amount-.5)*2*@modulation
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 6 # Noise Amp
mod = (@inputs.modulation.amount-.5)*2*@modulation
noise_amp = Math.max(0,noise_amp+mod)
when 7 # Noise Color
mod = (@inputs.modulation.amount-.5)*2*@modulation
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 8 # Filter Cutoff
mod = (@inputs.modulation.amount-.5)*2*@modulation
cutoff += mod
when 9 # Filter Resonance
mod = (@inputs.modulation.amount-.5)*2*@modulation
q = Math.max(0,Math.min(1,q+mod))
when 10 # LFO1 Amount
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo1_amount = Math.max(0,Math.min(1,lfo1_amount+mod))
when 11 # LFO1 Rate
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo1_rate = Math.max(0,Math.min(1,lfo1_rate+mod))
when 12 # LFO2 Amount
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo2_amount = Math.max(0,Math.min(1,lfo2_amount+mod))
when 13 # LFO2 Rate
mod = (@inputs.modulation.amount-.5)*2*@modulation
lfo2_rate = Math.max(0,Math.min(1,lfo2_rate+mod))
switch @inputs.env2.out
when 0 # Filter Cutoff
cutoff += @env2.process(@on)*(@env2_amount*2-1)
when 1 # Filter Resonance
q = Math.max(0,Math.min(1,@env2.process(@on)*(@env2_amount*2-1)))
when 2 # Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
mod = 1+mod
osc1_freq *= mod
osc2_freq *= mod
when 3 # Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 4 # Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 5 #Osc1 Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
osc1_freq *= 1+mod
when 6 #Osc1 Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 7 #Osc1 Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc1_amp = Math.max(0,osc1_amp+mod)
when 8 #Osc2 Pitch
mod = @env2_amount*2-1
mod *= @env2.process(@on)
osc1_freq *= 1+mod
when 9 #Osc2 Mod
mod = @env2.process(@on)*(@env2_amount*2-1)
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 10 #Osc2 Amp
mod = @env2.process(@on)*(@env2_amount*2-1)
osc2_amp = Math.max(0,osc2_amp+mod)
when 11 # Noise amp
mod = @env2.process(@on)*(@env2_amount*2-1)
noise_amp = Math.max(0,noise_amp+mod)
when 12 # Noise color
mod = @env2.process(@on)*(@env2_amount*2-1)
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 13 # LFO1 Amount
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo1_amount = Math.min(1,Math.max(0,lfo1_amount+mod))
when 14 # LFO1 rate
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo1_rate = Math.min(1,Math.max(0,lfo1_rate+mod))
when 15 # LFO2 Amount
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo2_amount = Math.min(1,Math.max(0,lfo2_amount+mod))
when 16 # LFO2 rate
mod = @env2.process(@on)*(@env2_amount*2-1)
lfo2_rate = Math.min(1,Math.max(0,lfo2_rate+mod))
switch @inputs.lfo1.out
when 0 # Pitch
mod = lfo1_amount
if @inputs.lfo1.audio
mod = 1+mod*mod*@lfo1.process(lfo1_rate)*16
else
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc1_freq *= mod
osc2_freq *= mod
when 1 # Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 3 #Osc1 Pitch
mod = lfo1_amount
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc1_freq *= mod
when 4 #Osc1 Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 5 #Osc1 Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc1_amp = Math.max(0,osc1_amp+mod)
when 6 #Osc2 Pitch
mod = lfo1_amount
mod = 1+mod*mod*@lfo1.process(lfo1_rate)
osc2_freq *= mod
when 7 #Osc2 Mod
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 8 #Osc2 Amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
osc2_amp = Math.max(0,osc2_amp+mod)
when 9 # Noise amp
mod = @lfo1.process(lfo1_rate)*lfo1_amount
noise_amp = Math.max(0,noise_amp+mod)
when 10 # Noise color
mod = @lfo1.process(lfo1_rate)*lfo1_amount
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 11
cutoff += @lfo1.process(lfo1_rate)*lfo1_amount
when 12
q = Math.max(0,Math.min(1,@lfo1.process(lfo1_rate)*lfo1_amount))
when 13 # LFO2 Amount
mod = @lfo1.process(lfo1_rate)*lfo1_amount
lfo2_amount = Math.min(1,Math.max(0,lfo2_amount+mod))
when 14 # LFO2 rate
mod = @lfo1.process(lfo1_rate)*lfo1_amount
lfo2_rate = Math.min(1,Math.max(0,lfo2_rate+mod))
switch @inputs.lfo2.out
when 0 # Pitch
mod = lfo2_amount
if @inputs.lfo2.audio
mod = 1+mod*mod*@lfo2.process(lfo2_rate)*16
else
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc1_freq *= mod
osc2_freq *= mod
when 1 # Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 2 # Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_amp = Math.max(0,osc1_amp+mod)
osc2_amp = Math.max(0,osc2_amp+mod)
noise_amp = Math.max(0,noise_amp*(1+mod))
when 3 #Osc1 Pitch
mod = lfo2_amount
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc1_freq *= mod
when 4 #Osc1 Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_mod = Math.min(1,Math.max(0,osc1_mod+mod))
when 5 #Osc1 Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc1_amp = Math.max(0,osc1_amp+mod)
when 6 #Osc2 Pitch
mod = lfo2_amount
mod = 1+mod*mod*@lfo2.process(lfo2_rate)
osc2_freq *= mod
when 7 #Osc2 Mod
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc2_mod = Math.min(1,Math.max(0,osc2_mod+mod))
when 8 #Osc2 Amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
osc2_amp = Math.max(0,osc2_amp+mod)
when 9 # Noise amp
mod = @lfo2.process(lfo2_rate)*lfo2_amount
noise_amp = Math.max(0,noise_amp+mod)
when 10 # Noise color
mod = @lfo2.process(lfo2_rate)*lfo2_amount
noise_mod = Math.min(1,Math.max(0,noise_mod+mod))
when 11
cutoff += @lfo2.process(lfo2_rate)*lfo2_amount
when 12
q = Math.max(0,Math.min(1,@lfo2.process(lfo2_rate)*lfo2_amount))
switch @inputs.combine
when 1
s1 = @osc1.process(osc1_freq,osc1_mod)*osc1_amp
s2 = @osc2.process(osc2_freq,osc2_mod)*osc2_amp
sig = (s1+s2)*(s1+s2)
#sig = @osc1.process(osc1_freq,osc1_mod)*@osc2.process(osc2_freq,osc2_mod)*Math.max(osc1_amp,osc2_amp)*4
when 2
sig = @osc2.process(osc2_freq*Math.max(0,1-@osc1.process(osc1_freq,osc1_mod)*osc1_amp),osc2_mod)*osc2_amp
else
sig = @osc1.process(osc1_freq,osc1_mod)*osc1_amp+@osc2.process(osc2_freq,osc2_mod)*osc2_amp
if noise_amp>0
sig += @noise.process(noise_mod)*noise_amp
mod = @env2.process(@on)
if not @cutoff
@cutoff = cutoff
else
if @cutoff<cutoff
@cutoff += Math.min(cutoff-@cutoff,@filter_increment)
else if @cutoff>cutoff
@cutoff += Math.max(cutoff-@cutoff,-@filter_increment)
cutoff = @cutoff
# VCF
cutoff = Math.pow(2,Math.max(0,Math.min(cutoff,1))*10)*22000/1024
sig *= @env1.process(@on)
#@cutoff += 0.001*(cutoff-@cutoff)
sig = @filter.process(sig,cutoff,q*q*9.5+.5)
class Instrument
constructor:(@engine)->
@layers = []
@layers.push new Layer @
@modulation = 0
@pitch_bend = .5
noteOn:(key,velocity)->
for l in @layers
l.noteOn(key,velocity)
return
noteOff:(key)->
for l in @layers
l.noteOff(key)
return
setModulation:(@modulation)->
setPitchBend:(@pitch_bend)->
process:(length)->
if false #@layers.length == 1
@layers[0].process(length)
@output = @layers[0].output
else
if not @output? or @output[0].length<length
@output = [new Float64Array(length),new Float64Array(length)]
for l in @layers
l.process(length)
for i in [0..length-1] by 1
left = 0
right = 0
for l in @layers
left += l.output[0][i]
right += l.output[1][i]
# res = @fx1.process(sig)
@output[0][i] = left
@output[1][i] = right
return
FX1 = [
Distortion
BitCrusher
Chorus
Flanger
Phaser
Delay
]
FX2 = [
Delay
Reverb
Chorus
Flanger
Phaser
]
class Layer
constructor:(@instrument)->
@engine = @instrument.engine
@voices = []
@eq = new EQ(@engine)
@spatializer = new Spatializer(@engine)
@inputs =
osc1:
type: 0
tune: .5
coarse: .5
amp: .5
mod: 0
osc2:
type: 0
tune: .5
coarse: .5
amp: .5
mod: 0
combine: 0
noise:
amp: 0
mod: 0
filter:
cutoff: 1
resonance: 0
type: 0
slope: 1
follow: 0
disto:
wet:0
drive:0
bitcrusher:
wet: 0
drive: 0
crush: 0
env1:
a: 0
d: 0
s: 1
r: 0
env2:
a: .1
d: .1
s: .5
r: .1
out: 0
amount: .5
lfo1:
type: 0
amount: 0
rate: .5
out: 0
lfo2:
type: 0
amount: 0
rate: .5
out: 0
fx1:
type: -1
amount: 0
rate: 0
fx2:
type: -1
amount: 0
rate: 0
eq:
low: .5
mid: .5
high: .5
spatialize: .5
pan: .5
polyphony: 1
glide: .5
sync: 1
velocity:
out: 0
amount: .5
amp: .5
modulation:
out: 0
amount: .5
noteOn:(key,velocity)->
if @inputs.polyphony == 1 and @last_voice? and @last_voice.on
voice = @last_voice
voice.noteOn @,key,velocity,true
@voices.push voice
else
voice = @engine.getVoice()
voice.noteOn @,key,velocity
@voices.push voice
@last_voice = voice
@last_key = key
removeVoice:(voice)->
index = @voices.indexOf(voice)
if index>=0
@voices.splice index,1
noteOff:(key)->
for v in @voices
if v.key == key
v.noteOff()
return
update:()->
if @inputs.fx1.type>=0
if not @fx1? or @fx1 not instanceof FX1[@inputs.fx1.type]
@fx1 = new FX1[@inputs.fx1.type] @engine
else
@fx1 = null
if @inputs.fx2.type>=0
if not @fx2? or @fx2 not instanceof FX2[@inputs.fx2.type]
@fx2 = new FX2[@inputs.fx2.type] @engine
else
@fx2 = null
if @fx1?
@fx1.update(@inputs.fx1)
if @fx2?
@fx2.update(@inputs.fx2)
@eq.update(@inputs.eq)
process:(length)->
if not @output? or @output[0].length<length
@output = [new Float64Array(length),new Float64Array(length)]
for i in [@voices.length-1..0] by -1
v = @voices[i]
if not v.on and v.env1.sig<.00001
v.env1.sig = 0
@removeVoice v
for i in [0..length-1] by 1
sig = 0
for v in @voices
sig += v.process()
@output[0][i] = sig
@output[1][i] = sig
@spatializer.process(@output,length,@inputs.spatialize,@inputs.pan)
if @fx1?
@fx1.process(@output,length)
if @fx2?
@fx2.process(@output,length)
@eq.process(@output,length)
return
class AudioEngine
constructor:(@sampleRate)->
@voices = []
@voice_index = 0
@num_voices = 8
for i in [0..@num_voices-1]
@voices[i] = new Voice @
@instruments = []
@instruments.push new Instrument @
@avg = 0
@samples = 0
@time = 0
@layer =
inputs: @inputs
@start = Date.now()
event:(data)->
if data[0] == 144 and data[2]>0
@instruments[0].noteOn(data[1],data[2])
else if data[0] == 128 or (data[0] == 144 and data[2] == 0)
@instruments[0].noteOff(data[1])
else if data[0]>=176 and data[0]<192 and data[1] == 1 # modulation wheel
@instruments[0].setModulation(data[2]/127)
else if data[0]>=224 and data[0]<240 # pitch bend
v = (data[1]+128*data[2])
console.info "PB value=#{v}"
if v>=8192
v = (v-8192)/(16383-8192)
v = .5+.5*v
else
v = .5*v/8192
console.info("Pitch Bend = #{v}")
@instruments[0].setPitchBend(v)
return
getTime:()->
(Date.now()-@start)/1000
getVoice:()->
best = @voices[0]
for i in [1..@voices.length-1]
v = @voices[i]
if best.on
if v.on
if v.noteon_time<best.noteon_time
best = v
else
best = v
else
if not v.on and v.env1.sig<best.env1.sig
best = v
return best
updateVoices:()->
for v in @voices
v.update()
for l in @instruments[0].layers
l.update()
return
process:(inputs,outputs,parameters)->
output = outputs[0]
time = Date.now()
res = [0,0]
for inst in @instruments
inst.process(output[0].length)
for channel,i in output
if i<2
for j in [0..channel.length-1] by 1
sig = 0
for inst in @instruments
sig += inst.output[i][j]
sig *= .125
sig = if sig<0 then -(1-Math.exp(sig)) else 1-Math.exp(-sig)
channel[j] = sig
@time += Date.now()-time
@samples += channel.length
if @samples >= @sampleRate
@samples -= @sampleRate
console.info @time+" ms ; buffer size = "+channel.length
@time = 0
return
class Blip
constructor:()->
@size = 512
@samples = new Float64Array(@size+1)
for p in [1..31] by 2
for i in [0..@size] by 1
x = (i/@size-.5)*.5
@samples[i] += Math.sin(x*2*Math.PI*p)/p
norm = @samples[@size]
for i in [0..@size] by 1
@samples[i] /= norm
`
class MyWorkletProcessor extends AudioWorkletProcessor {
constructor() {
super()
this.synth = new AudioEngine(sampleRate)
this.port.onmessage = (e) => {
console.info(e)
var data = JSON.parse(e.data)
if (data.name == "note")
{
this.synth.event(data.data)
}
else if (data.name == "param")
{
var value = data.value
var s = data.id.split(".")
data = this.synth.instruments[0].layers[0].inputs
while (s.length>1)
{
data = data[s.splice(0,1)[0]]
}
data[s[0]] = value
this.synth.updateVoices()
}
}
}
process(inputs, outputs, parameters) {
this.synth.process(inputs,outputs,parameters)
return true
}
}
registerProcessor('my-worklet-processor', MyWorkletProcessor)
`
|
[
{
"context": "ittle Mocha Reference ========\nhttps://github.com/visionmedia/should.js\nhttps://github.com/visionmedia/mocha\n\nM",
"end": 169,
"score": 0.9995896816253662,
"start": 158,
"tag": "USERNAME",
"value": "visionmedia"
},
{
"context": "thub.com/visionmedia/should.js\nhttps:... | src/test/voicetext_test.coffee | pchw/node-voicetext | 20 | 'use strict'
should = require 'should'
voicetext = require '../../lib/voicetext.js'
###
======== A Handy Little Mocha Reference ========
https://github.com/visionmedia/should.js
https://github.com/visionmedia/mocha
Mocha hooks:
before ()-> # before describe
after ()-> # after describe
beforeEach ()-> # before each it
afterEach ()-> # after each it
Should assertions:
should.exist('hello')
should.fail('expected an error!')
true.should.be.ok
true.should.be.true
false.should.be.false
(()-> arguments)(1,2,3).should.be.arguments
[1,2,3].should.eql([1,2,3])
should.strictEqual(undefined, value)
user.age.should.be.within(5, 50)
username.should.match(/^\w+$/)
user.should.be.a('object')
[].should.be.an.instanceOf(Array)
user.should.have.property('age', 15)
user.age.should.be.above(5)
user.age.should.be.below(100)
user.pets.should.have.length(5)
res.should.have.status(200) #res.statusCode should be 200
res.should.be.json
res.should.be.html
res.should.have.header('Content-Length', '123')
[].should.be.empty
[1,2,3].should.include(3)
'foo bar baz'.should.include('foo')
{ name: 'TJ', pet: tobi }.user.should.include({ pet: tobi, name: 'TJ' })
{ foo: 'bar', baz: 'raz' }.should.have.keys('foo', 'bar')
(()-> throw new Error('failed to baz')).should.throwError(/^fail.+/)
user.should.have.property('pets').with.lengthOf(4)
user.should.be.a('object').and.have.property('name', 'tj')
###
describe 'VoiceText', ->
describe '#of()', ->
it 'new', ->
v = new voicetext()
should.exists v
| 221833 | 'use strict'
should = require 'should'
voicetext = require '../../lib/voicetext.js'
###
======== A Handy Little Mocha Reference ========
https://github.com/visionmedia/should.js
https://github.com/visionmedia/mocha
Mocha hooks:
before ()-> # before describe
after ()-> # after describe
beforeEach ()-> # before each it
afterEach ()-> # after each it
Should assertions:
should.exist('hello')
should.fail('expected an error!')
true.should.be.ok
true.should.be.true
false.should.be.false
(()-> arguments)(1,2,3).should.be.arguments
[1,2,3].should.eql([1,2,3])
should.strictEqual(undefined, value)
user.age.should.be.within(5, 50)
username.should.match(/^\w+$/)
user.should.be.a('object')
[].should.be.an.instanceOf(Array)
user.should.have.property('age', 15)
user.age.should.be.above(5)
user.age.should.be.below(100)
user.pets.should.have.length(5)
res.should.have.status(200) #res.statusCode should be 200
res.should.be.json
res.should.be.html
res.should.have.header('Content-Length', '123')
[].should.be.empty
[1,2,3].should.include(3)
'foo bar baz'.should.include('foo')
{ name: '<NAME>', pet: tobi }.user.should.include({ pet: tobi, name: '<NAME>' })
{ foo: 'bar', baz: 'raz' }.should.have.keys('foo', 'bar')
(()-> throw new Error('failed to baz')).should.throwError(/^fail.+/)
user.should.have.property('pets').with.lengthOf(4)
user.should.be.a('object').and.have.property('name', '<NAME>')
###
describe 'VoiceText', ->
describe '#of()', ->
it 'new', ->
v = new voicetext()
should.exists v
| true | 'use strict'
should = require 'should'
voicetext = require '../../lib/voicetext.js'
###
======== A Handy Little Mocha Reference ========
https://github.com/visionmedia/should.js
https://github.com/visionmedia/mocha
Mocha hooks:
before ()-> # before describe
after ()-> # after describe
beforeEach ()-> # before each it
afterEach ()-> # after each it
Should assertions:
should.exist('hello')
should.fail('expected an error!')
true.should.be.ok
true.should.be.true
false.should.be.false
(()-> arguments)(1,2,3).should.be.arguments
[1,2,3].should.eql([1,2,3])
should.strictEqual(undefined, value)
user.age.should.be.within(5, 50)
username.should.match(/^\w+$/)
user.should.be.a('object')
[].should.be.an.instanceOf(Array)
user.should.have.property('age', 15)
user.age.should.be.above(5)
user.age.should.be.below(100)
user.pets.should.have.length(5)
res.should.have.status(200) #res.statusCode should be 200
res.should.be.json
res.should.be.html
res.should.have.header('Content-Length', '123')
[].should.be.empty
[1,2,3].should.include(3)
'foo bar baz'.should.include('foo')
{ name: 'PI:NAME:<NAME>END_PI', pet: tobi }.user.should.include({ pet: tobi, name: 'PI:NAME:<NAME>END_PI' })
{ foo: 'bar', baz: 'raz' }.should.have.keys('foo', 'bar')
(()-> throw new Error('failed to baz')).should.throwError(/^fail.+/)
user.should.have.property('pets').with.lengthOf(4)
user.should.be.a('object').and.have.property('name', 'PI:NAME:<NAME>END_PI')
###
describe 'VoiceText', ->
describe '#of()', ->
it 'new', ->
v = new voicetext()
should.exists v
|
[
{
"context": "# Copyright© 2017 Merck Sharp & Dohme Corp. a subsidiary of Merck & Co., Inc., ",
"end": 29,
"score": 0.9998757243156433,
"start": 18,
"tag": "NAME",
"value": "Merck Sharp"
}
] | app/assets/javascripts/connection_info.js.coffee | Merck/Linea | 0 | # Copyright© 2017 Merck Sharp & Dohme Corp. a subsidiary of Merck & Co., Inc., Kenilworth, NJ, USA. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
$ ->
Handlebars.registerHelper("each_except", (obj, k, opts) ->
keys = Object.keys(obj)
result = ''
filter = (key) ->
if key != k
result = result + opts.fn('key': key, 'value': obj[key]);
filter key for key in keys
return result
)
Handlebars.registerHelper('with', (context, options) ->
if context
return options.fn(context)
)
getConnectionInfo = (e) ->
e.preventDefault()
btn = $(e.target)
btnText = ""
if $("#connection-modal").length > 0
# Checks if the modal was already rendered.
# Prevents multiple equal modals to be appended to the page.
$("#connection-modal").modal()
else
$.ajax(
url: btn.data("connection"),
beforeSend: ->
btnText = btn.text()
btn.html("<img src='/images/ajax-loader.gif'>")
btn.addClass('disabled')
complete: ->
btn.text(btnText)
btn.removeClass('disabled')
error: (xhr, status, err) ->
modalTemplate = Handlebars.compile($("#connection-modal-template").html())
modalEl = $(modalTemplate(error: err + ": " + xhr.status ))
modalEl.appendTo('body').modal()
modalEl.on "hidden.bs.modal", (e) ->
modalEl.remove()
success: (xhr) ->
modalTemplate = Handlebars.compile($("#connection-modal-template").html())
modalEl = $(modalTemplate(response: xhr))
modalEl.appendTo('body').modal()
client = new ZeroClipboard($(".btn-copy"));
# //initializing Clipboard plugin
client.on "copy", (e) ->
clipboard = e.clipboardData
$commonParent = $(e.target).closest('.input-group')
$input = if $commonParent.find('input').length then $commonParent.find('input') else $commonParent.find('textarea')
clipboard.setData( "text/plain", $input.val() )
$input = $(".modal-body").find("input[type=text]")
$textarea = $(".modal-body").find("textarea")
$input.on "click", (e) ->
this.select()
$textarea.on "click", (e) ->
this.select()
)
$("body").on "click", ".js-get-connection-info", getConnectionInfo
| 37120 | # Copyright© 2017 <NAME> & Dohme Corp. a subsidiary of Merck & Co., Inc., Kenilworth, NJ, USA. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
$ ->
Handlebars.registerHelper("each_except", (obj, k, opts) ->
keys = Object.keys(obj)
result = ''
filter = (key) ->
if key != k
result = result + opts.fn('key': key, 'value': obj[key]);
filter key for key in keys
return result
)
Handlebars.registerHelper('with', (context, options) ->
if context
return options.fn(context)
)
getConnectionInfo = (e) ->
e.preventDefault()
btn = $(e.target)
btnText = ""
if $("#connection-modal").length > 0
# Checks if the modal was already rendered.
# Prevents multiple equal modals to be appended to the page.
$("#connection-modal").modal()
else
$.ajax(
url: btn.data("connection"),
beforeSend: ->
btnText = btn.text()
btn.html("<img src='/images/ajax-loader.gif'>")
btn.addClass('disabled')
complete: ->
btn.text(btnText)
btn.removeClass('disabled')
error: (xhr, status, err) ->
modalTemplate = Handlebars.compile($("#connection-modal-template").html())
modalEl = $(modalTemplate(error: err + ": " + xhr.status ))
modalEl.appendTo('body').modal()
modalEl.on "hidden.bs.modal", (e) ->
modalEl.remove()
success: (xhr) ->
modalTemplate = Handlebars.compile($("#connection-modal-template").html())
modalEl = $(modalTemplate(response: xhr))
modalEl.appendTo('body').modal()
client = new ZeroClipboard($(".btn-copy"));
# //initializing Clipboard plugin
client.on "copy", (e) ->
clipboard = e.clipboardData
$commonParent = $(e.target).closest('.input-group')
$input = if $commonParent.find('input').length then $commonParent.find('input') else $commonParent.find('textarea')
clipboard.setData( "text/plain", $input.val() )
$input = $(".modal-body").find("input[type=text]")
$textarea = $(".modal-body").find("textarea")
$input.on "click", (e) ->
this.select()
$textarea.on "click", (e) ->
this.select()
)
$("body").on "click", ".js-get-connection-info", getConnectionInfo
| true | # Copyright© 2017 PI:NAME:<NAME>END_PI & Dohme Corp. a subsidiary of Merck & Co., Inc., Kenilworth, NJ, USA. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
$ ->
Handlebars.registerHelper("each_except", (obj, k, opts) ->
keys = Object.keys(obj)
result = ''
filter = (key) ->
if key != k
result = result + opts.fn('key': key, 'value': obj[key]);
filter key for key in keys
return result
)
Handlebars.registerHelper('with', (context, options) ->
if context
return options.fn(context)
)
getConnectionInfo = (e) ->
e.preventDefault()
btn = $(e.target)
btnText = ""
if $("#connection-modal").length > 0
# Checks if the modal was already rendered.
# Prevents multiple equal modals to be appended to the page.
$("#connection-modal").modal()
else
$.ajax(
url: btn.data("connection"),
beforeSend: ->
btnText = btn.text()
btn.html("<img src='/images/ajax-loader.gif'>")
btn.addClass('disabled')
complete: ->
btn.text(btnText)
btn.removeClass('disabled')
error: (xhr, status, err) ->
modalTemplate = Handlebars.compile($("#connection-modal-template").html())
modalEl = $(modalTemplate(error: err + ": " + xhr.status ))
modalEl.appendTo('body').modal()
modalEl.on "hidden.bs.modal", (e) ->
modalEl.remove()
success: (xhr) ->
modalTemplate = Handlebars.compile($("#connection-modal-template").html())
modalEl = $(modalTemplate(response: xhr))
modalEl.appendTo('body').modal()
client = new ZeroClipboard($(".btn-copy"));
# //initializing Clipboard plugin
client.on "copy", (e) ->
clipboard = e.clipboardData
$commonParent = $(e.target).closest('.input-group')
$input = if $commonParent.find('input').length then $commonParent.find('input') else $commonParent.find('textarea')
clipboard.setData( "text/plain", $input.val() )
$input = $(".modal-body").find("input[type=text]")
$textarea = $(".modal-body").find("textarea")
$input.on "click", (e) ->
this.select()
$textarea.on "click", (e) ->
this.select()
)
$("body").on "click", ".js-get-connection-info", getConnectionInfo
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.