entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "eepEqual user,\n name: 'foo'\n id: 3\n ",
"end": 1746,
"score": 0.9372177720069885,
"start": 1743,
"tag": "NAME",
"value": "foo"
},
{
"context": " person:\n name: 'foo'\n... | test/associations.coffee | clariture/mesa | 0 | mesa = require '../src/postgres'
mesa.enableConnectionReuseForIncludes = true
module.exports =
'associations':
'first':
'hasOne': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "address" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'djfslkfj', zip_code: 12345, user_id: 4}
]}
addressTable = mesa
.connection(-> test.fail())
.table('address')
userTable = mesa
.connection(connection)
.table('user')
.hasOne('billing_address', addressTable)
userTable
.includes(billing_address: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: 'foo'
id: 3
billing_address:
street: 'foo street'
zip_code: 12345
user_id: 3
test.done()
'belongsTo': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "address" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 10}
{name: 'baz', id: 4}
]}
userTable = mesa
.connection(-> test.fail())
.table('user')
addressTable = mesa
.connection(connection)
.table('address')
.belongsTo('person', userTable)
addressTable.hookBeforeIncludes = ->
console.log 'before includes'
addressTable.hookBeforeGetIncludesForFirst = ->
console.log 'before get includes for first'
addressTable
.includes(person: true)
.where(id: 3)
.first (err, address) ->
throw err if err?
test.deepEqual address,
street: 'foo street'
zip_code: 12345
user_id: 3
person:
name: 'foo'
id: 3
test.done()
'hasMany': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "task" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'do laundry', user_id: 3}
{name: 'buy groceries', user_id: 4}
{name: 'buy the iphone 5', user_id: 3}
{name: 'learn clojure', user_id: 3}
]}
taskTable = mesa
.connection(-> test.fail())
.table('task')
userTable = mesa
.connection(connection)
.table('user')
.hasMany('tasks', taskTable)
userTable
.includes(tasks: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: 'foo'
id: 3
tasks: [
{name: 'do laundry', user_id: 3}
{name: 'buy the iphone 5', user_id: 3}
{name: 'learn clojure', user_id: 3}
]
test.done()
'hasManyThrough': (test) ->
test.expect 7
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{user_id: 3, role_id: 30}
{user_id: 3, role_id: 40}
{user_id: 3, role_id: 60}
]}
when 3
test.equal sql, 'SELECT * FROM "role" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [30, 40, 60]
cb null, {rows: [
{id: 30, name: 'jedi'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]}
roleTable = mesa
.connection(connection)
.table('role')
joinTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, joinTable)
userTable
.includes(roles: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: 'foo'
id: 3
roles: [
{id: 30, name: 'jedi'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
]
test.done()
'find':
'hasOne': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "address" WHERE user_id IN ($1, $2)'
test.deepEqual params, [3, 10]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'djfslkfj', zip_code: 12345, user_id: 4}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
addressTable = mesa
.connection(-> test.fail())
.table('address')
userTable = mesa
.connection(connection)
.table('user')
.hasOne('billing_address', addressTable)
userTable
.includes(billing_address: true)
.find (err, users) ->
test.deepEqual users, [
{
name: 'foo'
id: 3
billing_address:
street: 'foo street'
zip_code: 12345
user_id: 3
}
{
name: 'bar'
id: 10
billing_address:
street: 'bar street'
zip_code: 12345
user_id: 10
}
]
test.done()
'belongsTo': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "address"'
test.deepEqual params, []
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1, $2)'
test.deepEqual params, [3, 10]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 10}
{name: 'baz', id: 4}
]}
userTable = mesa
.connection(-> test.fail())
.table('user')
addressTable = mesa
.connection(connection)
.table('address')
.belongsTo('person', userTable)
addressTable
.includes(person: true)
.find (err, addresses) ->
test.deepEqual addresses, [
{
street: 'foo street'
zip_code: 12345
user_id: 3
person:
name: 'foo'
id: 3
}
{
street: 'bar street'
zip_code: 12345
user_id: 10
person:
name: 'bar'
id: 10
}
]
test.done()
'hasMany': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "task" WHERE user_id IN ($1, $2)'
test.deepEqual params, [3, 4]
cb null, {rows: [
{name: 'do laundry', user_id: 3}
{name: 'buy groceries', user_id: 4}
{name: 'foo', user_id: 3}
{name: 'bar', user_id: 3}
{name: 'buy the iphone 5', user_id: 5}
{name: 'learn clojure', user_id: 4}
]}
taskTable = mesa
.connection(-> test.fail())
.table('task')
userTable = mesa
.connection(connection)
.table('user')
.hasMany('tasks', taskTable)
userTable
.includes(tasks: true)
.find (err, users) ->
test.deepEqual users, [
{
name: 'foo'
id: 3
tasks: [
{name: 'do laundry', user_id: 3}
{name: 'foo', user_id: 3}
{name: 'bar', user_id: 3}
]
}
{
name: 'bar'
id: 4
tasks: [
{name: 'buy groceries', user_id: 4}
{name: 'learn clojure', user_id: 4}
]
}
]
test.done()
'hasManyThrough': (test) ->
test.expect 7
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
{name: 'baz', id: 5}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1, $2, $3)'
test.deepEqual params, [3, 4, 5]
cb null, {rows: [
{user_id: 5, role_id: 40}
{user_id: 5, role_id: 60}
{user_id: 3, role_id: 30}
{user_id: 4, role_id: 60}
{user_id: 3, role_id: 40}
{user_id: 3, role_id: 60}
{user_id: 5, role_id: 50}
]}
when 3
test.equal sql, 'SELECT * FROM "role" WHERE id IN ($1, $2, $3, $4)'
test.deepEqual params, [40, 60, 30, 50]
cb null, {rows: [
{id: 30, name: 'jedi'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]}
roleTable = mesa
.connection(connection)
.table('role')
joinTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, joinTable)
userTable
.includes(roles: true)
.find (err, users) ->
test.deepEqual users, [
{
name: 'foo'
id: 3
roles: [
{id: 30, name: 'jedi'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
]
}
{
name: 'bar'
id: 4
roles: [
{id: 60, name: 'master of the universe'}
]
}
{
name: 'baz'
id: 5
roles: [
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]
}
]
test.done()
'self associations with custom keys and nested includes': (test) ->
test.expect 15
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'foo', id: 1, shipping_id: 11, billing_id: 101}
{name: 'bar', id: 2, shipping_id: 12, billing_id: 102}
{name: 'baz', id: 3, shipping_id: 13, billing_id: 103}
]}
when 2
test.equal sql, 'SELECT * FROM "friend" WHERE user_id1 IN ($1, $2, $3)'
test.deepEqual params, [1, 2, 3]
cb null, {rows: [
{user_id1: 1, user_id2: 2}
{user_id1: 2, user_id2: 3}
{user_id1: 3, user_id2: 1}
{user_id1: 3, user_id2: 2}
]}
when 3
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [2, 3, 1]
cb null, {rows: [
{name: 'bar', id: 2, shipping_id: 12, billing_id: 102}
{name: 'baz', id: 3, shipping_id: 13, billing_id: 103}
{name: 'foo', id: 1, shipping_id: 11, billing_id: 101}
]}
when 4
test.equal sql, 'SELECT * FROM "address" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [12, 13, 11]
cb null, {rows: [
{street: 'bar shipping street', id: 12}
{street: 'baz shipping street', id: 13}
{street: 'foo shipping street', id: 11}
]}
when 5
test.equal sql, 'SELECT * FROM "address" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [101, 102, 103]
cb null, {rows: [
{street: 'foo billing street', id: 101}
{street: 'bar billing street', id: 102}
{street: 'baz billing street', id: 103}
]}
when 6
test.equal sql, 'SELECT * FROM "user" WHERE billing_id IN ($1, $2, $3)'
test.deepEqual params, [101, 102, 103]
cb null, {rows: [
{name: 'bar', id: 2, shipping_id: 12, billing_id: 102}
{name: 'foo', id: 1, shipping_id: 11, billing_id: 101}
{name: 'baz', id: 3, shipping_id: 13, billing_id: 103}
]}
table = {}
table.address = mesa
.connection(connection)
.table('address')
.hasOne('user', (-> table.user),
foreignKey: 'billing_id'
)
table.friend = mesa
.connection(connection)
.table('friend')
table.user = mesa
.connection(connection)
.table('user')
.belongsTo('billing_address', (-> table.address),
foreignKey: 'billing_id'
)
.belongsTo('shipping_address', (-> table.address),
foreignKey: 'shipping_id'
)
.hasManyThrough('friends', (-> table.user), (-> table.friend),
foreignKey: 'user_id1'
otherForeignKey: 'user_id2'
)
# include the billing address and all the friends with their
# shipping adresses
table.user
.includes(
friends: {shipping_address: true}
billing_address: {user: true}
)
.find (err, users) ->
test.deepEqual users[0],
name: 'foo'
id: 1
shipping_id: 11
billing_id: 101
friends: [
{
name: 'bar'
id: 2
shipping_id: 12
billing_id: 102
shipping_address: {
street: 'bar shipping street'
id: 12
}
}
]
billing_address: {
street: 'foo billing street'
id: 101
user: {
name: 'foo'
id: 1
shipping_id: 11
billing_id: 101
}
}
test.deepEqual users[1],
name: 'bar'
id: 2
shipping_id: 12
billing_id: 102
friends: [
{
name: 'baz'
id: 3
shipping_id: 13
billing_id: 103
shipping_address: {
street: 'baz shipping street'
id: 13
}
}
]
billing_address: {
street: 'bar billing street'
id: 102
user: {
name: 'bar'
id: 2
shipping_id: 12
billing_id: 102
}
}
test.deepEqual users[2],
name: 'baz'
id: 3
shipping_id: 13
billing_id: 103
friends: [
{
name: 'bar'
id: 2
shipping_id: 12
billing_id: 102
shipping_address: {
street: 'bar shipping street'
id: 12
}
}
{
name: 'foo'
id: 1
shipping_id: 11
billing_id: 101
shipping_address: {
street: 'foo shipping street'
id: 11
}
}
]
billing_address: {
street: 'baz billing street'
id: 103
user: {
name: 'baz'
id: 3
shipping_id: 13
billing_id: 103
}
}
test.done()
'hasManyThrough works if there are no associated': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
{name: 'baz', id: 5}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1, $2, $3)'
test.deepEqual params, [3, 4, 5]
cb null, {rows: []}
roleTable = mesa
.connection(connection)
.table('role')
userRoleTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, userRoleTable)
userTable
.includes(roles: true)
.find (err, users) ->
test.deepEqual users, [
{
name: 'foo'
id: 3
roles: []
}
{
name: 'bar'
id: 4
roles: []
}
{
name: 'baz'
id: 5
roles: []
}
]
test.done()
| 185093 | mesa = require '../src/postgres'
mesa.enableConnectionReuseForIncludes = true
module.exports =
'associations':
'first':
'hasOne': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "address" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'djfslkfj', zip_code: 12345, user_id: 4}
]}
addressTable = mesa
.connection(-> test.fail())
.table('address')
userTable = mesa
.connection(connection)
.table('user')
.hasOne('billing_address', addressTable)
userTable
.includes(billing_address: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: '<NAME>'
id: 3
billing_address:
street: 'foo street'
zip_code: 12345
user_id: 3
test.done()
'belongsTo': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "address" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 10}
{name: 'baz', id: 4}
]}
userTable = mesa
.connection(-> test.fail())
.table('user')
addressTable = mesa
.connection(connection)
.table('address')
.belongsTo('person', userTable)
addressTable.hookBeforeIncludes = ->
console.log 'before includes'
addressTable.hookBeforeGetIncludesForFirst = ->
console.log 'before get includes for first'
addressTable
.includes(person: true)
.where(id: 3)
.first (err, address) ->
throw err if err?
test.deepEqual address,
street: 'foo street'
zip_code: 12345
user_id: 3
person:
name: '<NAME>'
id: 3
test.done()
'hasMany': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "task" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'do laundry', user_id: 3}
{name: 'buy groceries', user_id: 4}
{name: 'buy the iphone 5', user_id: 3}
{name: 'learn clojure', user_id: 3}
]}
taskTable = mesa
.connection(-> test.fail())
.table('task')
userTable = mesa
.connection(connection)
.table('user')
.hasMany('tasks', taskTable)
userTable
.includes(tasks: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: '<NAME>'
id: 3
tasks: [
{name: 'do laundry', user_id: 3}
{name: 'buy the iphone 5', user_id: 3}
{name: 'learn clojure', user_id: 3}
]
test.done()
'hasManyThrough': (test) ->
test.expect 7
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: '<NAME>', id: 3}
{name: '<NAME>', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{user_id: 3, role_id: 30}
{user_id: 3, role_id: 40}
{user_id: 3, role_id: 60}
]}
when 3
test.equal sql, 'SELECT * FROM "role" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [30, 40, 60]
cb null, {rows: [
{id: 30, name: '<NAME>'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]}
roleTable = mesa
.connection(connection)
.table('role')
joinTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, joinTable)
userTable
.includes(roles: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: '<NAME>'
id: 3
roles: [
{id: 30, name: '<NAME>'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
]
test.done()
'find':
'hasOne': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: '<NAME>', id: 3}
{name: '<NAME>', id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "address" WHERE user_id IN ($1, $2)'
test.deepEqual params, [3, 10]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'djfslkfj', zip_code: 12345, user_id: 4}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
addressTable = mesa
.connection(-> test.fail())
.table('address')
userTable = mesa
.connection(connection)
.table('user')
.hasOne('billing_address', addressTable)
userTable
.includes(billing_address: true)
.find (err, users) ->
test.deepEqual users, [
{
name: '<NAME>'
id: 3
billing_address:
street: 'foo street'
zip_code: 12345
user_id: 3
}
{
name: '<NAME>'
id: 10
billing_address:
street: 'bar street'
zip_code: 12345
user_id: 10
}
]
test.done()
'belongsTo': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "address"'
test.deepEqual params, []
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1, $2)'
test.deepEqual params, [3, 10]
cb null, {rows: [
{name: '<NAME>', id: 3}
{name: 'bar', id: 10}
{name: 'baz', id: 4}
]}
userTable = mesa
.connection(-> test.fail())
.table('user')
addressTable = mesa
.connection(connection)
.table('address')
.belongsTo('person', userTable)
addressTable
.includes(person: true)
.find (err, addresses) ->
test.deepEqual addresses, [
{
street: 'foo street'
zip_code: 12345
user_id: 3
person:
name: '<NAME>'
id: 3
}
{
street: 'bar street'
zip_code: 12345
user_id: 10
person:
name: '<NAME>'
id: 10
}
]
test.done()
'hasMany': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: '<NAME>', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "task" WHERE user_id IN ($1, $2)'
test.deepEqual params, [3, 4]
cb null, {rows: [
{name: 'do laundry', user_id: 3}
{name: 'buy groceries', user_id: 4}
{name: 'foo', user_id: 3}
{name: 'bar', user_id: 3}
{name: 'buy the iphone 5', user_id: 5}
{name: 'learn clojure', user_id: 4}
]}
taskTable = mesa
.connection(-> test.fail())
.table('task')
userTable = mesa
.connection(connection)
.table('user')
.hasMany('tasks', taskTable)
userTable
.includes(tasks: true)
.find (err, users) ->
test.deepEqual users, [
{
name: '<NAME>'
id: 3
tasks: [
{name: 'do laundry', user_id: 3}
{name: 'foo', user_id: 3}
{name: 'bar', user_id: 3}
]
}
{
name: '<NAME>'
id: 4
tasks: [
{name: 'buy groceries', user_id: 4}
{name: 'learn clojure', user_id: 4}
]
}
]
test.done()
'hasManyThrough': (test) ->
test.expect 7
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: '<NAME>', id: 3}
{name: 'bar', id: 4}
{name: 'baz', id: 5}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1, $2, $3)'
test.deepEqual params, [3, 4, 5]
cb null, {rows: [
{user_id: 5, role_id: 40}
{user_id: 5, role_id: 60}
{user_id: 3, role_id: 30}
{user_id: 4, role_id: 60}
{user_id: 3, role_id: 40}
{user_id: 3, role_id: 60}
{user_id: 5, role_id: 50}
]}
when 3
test.equal sql, 'SELECT * FROM "role" WHERE id IN ($1, $2, $3, $4)'
test.deepEqual params, [40, 60, 30, 50]
cb null, {rows: [
{id: 30, name: '<NAME>'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]}
roleTable = mesa
.connection(connection)
.table('role')
joinTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, joinTable)
userTable
.includes(roles: true)
.find (err, users) ->
test.deepEqual users, [
{
name: '<NAME>'
id: 3
roles: [
{id: 30, name: '<NAME>'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
]
}
{
name: '<NAME>'
id: 4
roles: [
{id: 60, name: 'master of the universe'}
]
}
{
name: '<NAME>'
id: 5
roles: [
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]
}
]
test.done()
'self associations with custom keys and nested includes': (test) ->
test.expect 15
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: '<NAME>', id: 1, shipping_id: 11, billing_id: 101}
{name: '<NAME>', id: 2, shipping_id: 12, billing_id: 102}
{name: '<NAME>', id: 3, shipping_id: 13, billing_id: 103}
]}
when 2
test.equal sql, 'SELECT * FROM "friend" WHERE user_id1 IN ($1, $2, $3)'
test.deepEqual params, [1, 2, 3]
cb null, {rows: [
{user_id1: 1, user_id2: 2}
{user_id1: 2, user_id2: 3}
{user_id1: 3, user_id2: 1}
{user_id1: 3, user_id2: 2}
]}
when 3
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [2, 3, 1]
cb null, {rows: [
{name: '<NAME>', id: 2, shipping_id: 12, billing_id: 102}
{name: '<NAME>', id: 3, shipping_id: 13, billing_id: 103}
{name: '<NAME>', id: 1, shipping_id: 11, billing_id: 101}
]}
when 4
test.equal sql, 'SELECT * FROM "address" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [12, 13, 11]
cb null, {rows: [
{street: 'bar shipping street', id: 12}
{street: 'baz shipping street', id: 13}
{street: 'foo shipping street', id: 11}
]}
when 5
test.equal sql, 'SELECT * FROM "address" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [101, 102, 103]
cb null, {rows: [
{street: 'foo billing street', id: 101}
{street: 'bar billing street', id: 102}
{street: 'baz billing street', id: 103}
]}
when 6
test.equal sql, 'SELECT * FROM "user" WHERE billing_id IN ($1, $2, $3)'
test.deepEqual params, [101, 102, 103]
cb null, {rows: [
{name: '<NAME>', id: 2, shipping_id: 12, billing_id: 102}
{name: '<NAME>', id: 1, shipping_id: 11, billing_id: 101}
{name: '<NAME>', id: 3, shipping_id: 13, billing_id: 103}
]}
table = {}
table.address = mesa
.connection(connection)
.table('address')
.hasOne('user', (-> table.user),
foreignKey: 'billing_id'
)
table.friend = mesa
.connection(connection)
.table('friend')
table.user = mesa
.connection(connection)
.table('user')
.belongsTo('billing_address', (-> table.address),
foreignKey: 'billing_id'
)
.belongsTo('shipping_address', (-> table.address),
foreignKey: 'shipping_id'
)
.hasManyThrough('friends', (-> table.user), (-> table.friend),
foreignKey: 'user_id1'
otherForeignKey: 'user_id2'
)
# include the billing address and all the friends with their
# shipping adresses
table.user
.includes(
friends: {shipping_address: true}
billing_address: {user: true}
)
.find (err, users) ->
test.deepEqual users[0],
name: '<NAME>'
id: 1
shipping_id: 11
billing_id: 101
friends: [
{
name: '<NAME>'
id: 2
shipping_id: 12
billing_id: 102
shipping_address: {
street: 'bar shipping street'
id: 12
}
}
]
billing_address: {
street: 'foo billing street'
id: 101
user: {
name: '<NAME>'
id: 1
shipping_id: 11
billing_id: 101
}
}
test.deepEqual users[1],
name: '<NAME>'
id: 2
shipping_id: 12
billing_id: 102
friends: [
{
name: '<NAME>'
id: 3
shipping_id: 13
billing_id: 103
shipping_address: {
street: 'baz shipping street'
id: 13
}
}
]
billing_address: {
street: 'bar billing street'
id: 102
user: {
name: '<NAME>'
id: 2
shipping_id: 12
billing_id: 102
}
}
test.deepEqual users[2],
name: '<NAME>'
id: 3
shipping_id: 13
billing_id: 103
friends: [
{
name: '<NAME>'
id: 2
shipping_id: 12
billing_id: 102
shipping_address: {
street: 'bar shipping street'
id: 12
}
}
{
name: '<NAME>'
id: 1
shipping_id: 11
billing_id: 101
shipping_address: {
street: 'foo shipping street'
id: 11
}
}
]
billing_address: {
street: 'baz billing street'
id: 103
user: {
name: '<NAME>'
id: 3
shipping_id: 13
billing_id: 103
}
}
test.done()
'hasManyThrough works if there are no associated': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: '<NAME>', id: 3}
{name: '<NAME>', id: 4}
{name: '<NAME>', id: 5}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1, $2, $3)'
test.deepEqual params, [3, 4, 5]
cb null, {rows: []}
roleTable = mesa
.connection(connection)
.table('role')
userRoleTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, userRoleTable)
userTable
.includes(roles: true)
.find (err, users) ->
test.deepEqual users, [
{
name: '<NAME>'
id: 3
roles: []
}
{
name: '<NAME>'
id: 4
roles: []
}
{
name: '<NAME>'
id: 5
roles: []
}
]
test.done()
| true | mesa = require '../src/postgres'
mesa.enableConnectionReuseForIncludes = true
module.exports =
'associations':
'first':
'hasOne': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "address" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'djfslkfj', zip_code: 12345, user_id: 4}
]}
addressTable = mesa
.connection(-> test.fail())
.table('address')
userTable = mesa
.connection(connection)
.table('user')
.hasOne('billing_address', addressTable)
userTable
.includes(billing_address: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: 'PI:NAME:<NAME>END_PI'
id: 3
billing_address:
street: 'foo street'
zip_code: 12345
user_id: 3
test.done()
'belongsTo': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "address" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 10}
{name: 'baz', id: 4}
]}
userTable = mesa
.connection(-> test.fail())
.table('user')
addressTable = mesa
.connection(connection)
.table('address')
.belongsTo('person', userTable)
addressTable.hookBeforeIncludes = ->
console.log 'before includes'
addressTable.hookBeforeGetIncludesForFirst = ->
console.log 'before get includes for first'
addressTable
.includes(person: true)
.where(id: 3)
.first (err, address) ->
throw err if err?
test.deepEqual address,
street: 'foo street'
zip_code: 12345
user_id: 3
person:
name: 'PI:NAME:<NAME>END_PI'
id: 3
test.done()
'hasMany': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'foo', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "task" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'do laundry', user_id: 3}
{name: 'buy groceries', user_id: 4}
{name: 'buy the iphone 5', user_id: 3}
{name: 'learn clojure', user_id: 3}
]}
taskTable = mesa
.connection(-> test.fail())
.table('task')
userTable = mesa
.connection(connection)
.table('user')
.hasMany('tasks', taskTable)
userTable
.includes(tasks: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: 'PI:NAME:<NAME>END_PI'
id: 3
tasks: [
{name: 'do laundry', user_id: 3}
{name: 'buy the iphone 5', user_id: 3}
{name: 'learn clojure', user_id: 3}
]
test.done()
'hasManyThrough': (test) ->
test.expect 7
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user" WHERE id = $1'
test.deepEqual params, [3]
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 3}
{name: 'PI:NAME:<NAME>END_PI', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1)'
test.deepEqual params, [3]
cb null, {rows: [
{user_id: 3, role_id: 30}
{user_id: 3, role_id: 40}
{user_id: 3, role_id: 60}
]}
when 3
test.equal sql, 'SELECT * FROM "role" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [30, 40, 60]
cb null, {rows: [
{id: 30, name: 'PI:NAME:<NAME>END_PI'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]}
roleTable = mesa
.connection(connection)
.table('role')
joinTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, joinTable)
userTable
.includes(roles: true)
.where(id: 3)
.first (err, user) ->
throw err if err?
test.deepEqual user,
name: 'PI:NAME:<NAME>END_PI'
id: 3
roles: [
{id: 30, name: 'PI:NAME:<NAME>END_PI'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
]
test.done()
'find':
'hasOne': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 3}
{name: 'PI:NAME:<NAME>END_PI', id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "address" WHERE user_id IN ($1, $2)'
test.deepEqual params, [3, 10]
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'djfslkfj', zip_code: 12345, user_id: 4}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
addressTable = mesa
.connection(-> test.fail())
.table('address')
userTable = mesa
.connection(connection)
.table('user')
.hasOne('billing_address', addressTable)
userTable
.includes(billing_address: true)
.find (err, users) ->
test.deepEqual users, [
{
name: 'PI:NAME:<NAME>END_PI'
id: 3
billing_address:
street: 'foo street'
zip_code: 12345
user_id: 3
}
{
name: 'PI:NAME:<NAME>END_PI'
id: 10
billing_address:
street: 'bar street'
zip_code: 12345
user_id: 10
}
]
test.done()
'belongsTo': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "address"'
test.deepEqual params, []
cb null, {rows: [
{street: 'foo street', zip_code: 12345, user_id: 3}
{street: 'bar street', zip_code: 12345, user_id: 10}
]}
when 2
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1, $2)'
test.deepEqual params, [3, 10]
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 3}
{name: 'bar', id: 10}
{name: 'baz', id: 4}
]}
userTable = mesa
.connection(-> test.fail())
.table('user')
addressTable = mesa
.connection(connection)
.table('address')
.belongsTo('person', userTable)
addressTable
.includes(person: true)
.find (err, addresses) ->
test.deepEqual addresses, [
{
street: 'foo street'
zip_code: 12345
user_id: 3
person:
name: 'PI:NAME:<NAME>END_PI'
id: 3
}
{
street: 'bar street'
zip_code: 12345
user_id: 10
person:
name: 'PI:NAME:<NAME>END_PI'
id: 10
}
]
test.done()
'hasMany': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 3}
{name: 'bar', id: 4}
]}
when 2
test.equal sql, 'SELECT * FROM "task" WHERE user_id IN ($1, $2)'
test.deepEqual params, [3, 4]
cb null, {rows: [
{name: 'do laundry', user_id: 3}
{name: 'buy groceries', user_id: 4}
{name: 'foo', user_id: 3}
{name: 'bar', user_id: 3}
{name: 'buy the iphone 5', user_id: 5}
{name: 'learn clojure', user_id: 4}
]}
taskTable = mesa
.connection(-> test.fail())
.table('task')
userTable = mesa
.connection(connection)
.table('user')
.hasMany('tasks', taskTable)
userTable
.includes(tasks: true)
.find (err, users) ->
test.deepEqual users, [
{
name: 'PI:NAME:<NAME>END_PI'
id: 3
tasks: [
{name: 'do laundry', user_id: 3}
{name: 'foo', user_id: 3}
{name: 'bar', user_id: 3}
]
}
{
name: 'PI:NAME:<NAME>END_PI'
id: 4
tasks: [
{name: 'buy groceries', user_id: 4}
{name: 'learn clojure', user_id: 4}
]
}
]
test.done()
'hasManyThrough': (test) ->
test.expect 7
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 3}
{name: 'bar', id: 4}
{name: 'baz', id: 5}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1, $2, $3)'
test.deepEqual params, [3, 4, 5]
cb null, {rows: [
{user_id: 5, role_id: 40}
{user_id: 5, role_id: 60}
{user_id: 3, role_id: 30}
{user_id: 4, role_id: 60}
{user_id: 3, role_id: 40}
{user_id: 3, role_id: 60}
{user_id: 5, role_id: 50}
]}
when 3
test.equal sql, 'SELECT * FROM "role" WHERE id IN ($1, $2, $3, $4)'
test.deepEqual params, [40, 60, 30, 50]
cb null, {rows: [
{id: 30, name: 'PI:NAME:<NAME>END_PI'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]}
roleTable = mesa
.connection(connection)
.table('role')
joinTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, joinTable)
userTable
.includes(roles: true)
.find (err, users) ->
test.deepEqual users, [
{
name: 'PI:NAME:<NAME>END_PI'
id: 3
roles: [
{id: 30, name: 'PI:NAME:<NAME>END_PI'}
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
]
}
{
name: 'PI:NAME:<NAME>END_PI'
id: 4
roles: [
{id: 60, name: 'master of the universe'}
]
}
{
name: 'PI:NAME:<NAME>END_PI'
id: 5
roles: [
{id: 40, name: 'administrator'}
{id: 60, name: 'master of the universe'}
{id: 50, name: 'bad bad role'}
]
}
]
test.done()
'self associations with custom keys and nested includes': (test) ->
test.expect 15
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 1, shipping_id: 11, billing_id: 101}
{name: 'PI:NAME:<NAME>END_PI', id: 2, shipping_id: 12, billing_id: 102}
{name: 'PI:NAME:<NAME>END_PI', id: 3, shipping_id: 13, billing_id: 103}
]}
when 2
test.equal sql, 'SELECT * FROM "friend" WHERE user_id1 IN ($1, $2, $3)'
test.deepEqual params, [1, 2, 3]
cb null, {rows: [
{user_id1: 1, user_id2: 2}
{user_id1: 2, user_id2: 3}
{user_id1: 3, user_id2: 1}
{user_id1: 3, user_id2: 2}
]}
when 3
test.equal sql, 'SELECT * FROM "user" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [2, 3, 1]
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 2, shipping_id: 12, billing_id: 102}
{name: 'PI:NAME:<NAME>END_PI', id: 3, shipping_id: 13, billing_id: 103}
{name: 'PI:NAME:<NAME>END_PI', id: 1, shipping_id: 11, billing_id: 101}
]}
when 4
test.equal sql, 'SELECT * FROM "address" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [12, 13, 11]
cb null, {rows: [
{street: 'bar shipping street', id: 12}
{street: 'baz shipping street', id: 13}
{street: 'foo shipping street', id: 11}
]}
when 5
test.equal sql, 'SELECT * FROM "address" WHERE id IN ($1, $2, $3)'
test.deepEqual params, [101, 102, 103]
cb null, {rows: [
{street: 'foo billing street', id: 101}
{street: 'bar billing street', id: 102}
{street: 'baz billing street', id: 103}
]}
when 6
test.equal sql, 'SELECT * FROM "user" WHERE billing_id IN ($1, $2, $3)'
test.deepEqual params, [101, 102, 103]
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 2, shipping_id: 12, billing_id: 102}
{name: 'PI:NAME:<NAME>END_PI', id: 1, shipping_id: 11, billing_id: 101}
{name: 'PI:NAME:<NAME>END_PI', id: 3, shipping_id: 13, billing_id: 103}
]}
table = {}
table.address = mesa
.connection(connection)
.table('address')
.hasOne('user', (-> table.user),
foreignKey: 'billing_id'
)
table.friend = mesa
.connection(connection)
.table('friend')
table.user = mesa
.connection(connection)
.table('user')
.belongsTo('billing_address', (-> table.address),
foreignKey: 'billing_id'
)
.belongsTo('shipping_address', (-> table.address),
foreignKey: 'shipping_id'
)
.hasManyThrough('friends', (-> table.user), (-> table.friend),
foreignKey: 'user_id1'
otherForeignKey: 'user_id2'
)
# include the billing address and all the friends with their
# shipping adresses
table.user
.includes(
friends: {shipping_address: true}
billing_address: {user: true}
)
.find (err, users) ->
test.deepEqual users[0],
name: 'PI:NAME:<NAME>END_PI'
id: 1
shipping_id: 11
billing_id: 101
friends: [
{
name: 'PI:NAME:<NAME>END_PI'
id: 2
shipping_id: 12
billing_id: 102
shipping_address: {
street: 'bar shipping street'
id: 12
}
}
]
billing_address: {
street: 'foo billing street'
id: 101
user: {
name: 'PI:NAME:<NAME>END_PI'
id: 1
shipping_id: 11
billing_id: 101
}
}
test.deepEqual users[1],
name: 'PI:NAME:<NAME>END_PI'
id: 2
shipping_id: 12
billing_id: 102
friends: [
{
name: 'PI:NAME:<NAME>END_PI'
id: 3
shipping_id: 13
billing_id: 103
shipping_address: {
street: 'baz shipping street'
id: 13
}
}
]
billing_address: {
street: 'bar billing street'
id: 102
user: {
name: 'PI:NAME:<NAME>END_PI'
id: 2
shipping_id: 12
billing_id: 102
}
}
test.deepEqual users[2],
name: 'PI:NAME:<NAME>END_PI'
id: 3
shipping_id: 13
billing_id: 103
friends: [
{
name: 'PI:NAME:<NAME>END_PI'
id: 2
shipping_id: 12
billing_id: 102
shipping_address: {
street: 'bar shipping street'
id: 12
}
}
{
name: 'PI:NAME:<NAME>END_PI'
id: 1
shipping_id: 11
billing_id: 101
shipping_address: {
street: 'foo shipping street'
id: 11
}
}
]
billing_address: {
street: 'baz billing street'
id: 103
user: {
name: 'PI:NAME:<NAME>END_PI'
id: 3
shipping_id: 13
billing_id: 103
}
}
test.done()
'hasManyThrough works if there are no associated': (test) ->
test.expect 5
call = 1
connection =
query: (sql, params, cb) ->
switch call++
when 1
test.equal sql, 'SELECT * FROM "user"'
test.deepEqual params, []
cb null, {rows: [
{name: 'PI:NAME:<NAME>END_PI', id: 3}
{name: 'PI:NAME:<NAME>END_PI', id: 4}
{name: 'PI:NAME:<NAME>END_PI', id: 5}
]}
when 2
test.equal sql, 'SELECT * FROM "user_role" WHERE user_id IN ($1, $2, $3)'
test.deepEqual params, [3, 4, 5]
cb null, {rows: []}
roleTable = mesa
.connection(connection)
.table('role')
userRoleTable = mesa
.connection(connection)
.table('user_role')
userTable = mesa
.connection(connection)
.table('user')
.hasManyThrough('roles', roleTable, userRoleTable)
userTable
.includes(roles: true)
.find (err, users) ->
test.deepEqual users, [
{
name: 'PI:NAME:<NAME>END_PI'
id: 3
roles: []
}
{
name: 'PI:NAME:<NAME>END_PI'
id: 4
roles: []
}
{
name: 'PI:NAME:<NAME>END_PI'
id: 5
roles: []
}
]
test.done()
|
[
{
"context": "ocess')\n\nNPM = '/usr/local/bin/npm'\ndep_keys = [ 'devDependencies', 'dependencies' ]\n\nextend = ( dest, items... ) ->\n",
"end": 122,
"score": 0.7674651145935059,
"start": 105,
"tag": "KEY",
"value": "devDependencies',"
},
{
"context": "r/local/bin/npm'\ndep_keys = ... | lib/util/require-install.coffee | venkatperi/jsgradle | 0 | {spawnSync, execFile, execFileSync} = require('child_process')
NPM = '/usr/local/bin/npm'
dep_keys = [ 'devDependencies', 'dependencies' ]
extend = ( dest, items... ) ->
for item in items
for own k,v of item
dest[ k ] = v
dest
exec = ( cmd, args = [], opts = {}, cb ) ->
execFile cmd, args, opts, ( e, stdout, stderr ) ->
if e
return cb switch e.code
when 'ENOENT' then new Error "#{cmd}: command not found"
else
e
cb null, stdout : stdout, stderr : stderr
npmInstall = ( pkg, args, cb ) ->
args.unshift pkg
exec NPM, args, {}, cb
npmInstallSync = ( pkg, args ) ->
args.unshift pkg
args.unshift 'install'
spawnSync NPM, args
_require = ( pkg, opts = { save : true, dev : true } ) ->
try
require pkg
catch e
console.log "Installing #{pkg}"
args = []
if opts.save and opts.dev
args.push '--save-dev'
else if opts.save
args.push '--save'
npmInstallSync pkg, args
return require pkg
module.exports = _require
| 113616 | {spawnSync, execFile, execFileSync} = require('child_process')
NPM = '/usr/local/bin/npm'
dep_keys = [ '<KEY> '<KEY>' ]
extend = ( dest, items... ) ->
for item in items
for own k,v of item
dest[ k ] = v
dest
exec = ( cmd, args = [], opts = {}, cb ) ->
execFile cmd, args, opts, ( e, stdout, stderr ) ->
if e
return cb switch e.code
when 'ENOENT' then new Error "#{cmd}: command not found"
else
e
cb null, stdout : stdout, stderr : stderr
npmInstall = ( pkg, args, cb ) ->
args.unshift pkg
exec NPM, args, {}, cb
npmInstallSync = ( pkg, args ) ->
args.unshift pkg
args.unshift 'install'
spawnSync NPM, args
_require = ( pkg, opts = { save : true, dev : true } ) ->
try
require pkg
catch e
console.log "Installing #{pkg}"
args = []
if opts.save and opts.dev
args.push '--save-dev'
else if opts.save
args.push '--save'
npmInstallSync pkg, args
return require pkg
module.exports = _require
| true | {spawnSync, execFile, execFileSync} = require('child_process')
NPM = '/usr/local/bin/npm'
dep_keys = [ 'PI:KEY:<KEY>END_PI 'PI:KEY:<KEY>END_PI' ]
extend = ( dest, items... ) ->
for item in items
for own k,v of item
dest[ k ] = v
dest
exec = ( cmd, args = [], opts = {}, cb ) ->
execFile cmd, args, opts, ( e, stdout, stderr ) ->
if e
return cb switch e.code
when 'ENOENT' then new Error "#{cmd}: command not found"
else
e
cb null, stdout : stdout, stderr : stderr
npmInstall = ( pkg, args, cb ) ->
args.unshift pkg
exec NPM, args, {}, cb
npmInstallSync = ( pkg, args ) ->
args.unshift pkg
args.unshift 'install'
spawnSync NPM, args
_require = ( pkg, opts = { save : true, dev : true } ) ->
try
require pkg
catch e
console.log "Installing #{pkg}"
args = []
if opts.save and opts.dev
args.push '--save-dev'
else if opts.save
args.push '--save'
npmInstallSync pkg, args
return require pkg
module.exports = _require
|
[
{
"context": "\"negate\", ->\n expect(_.negate(_.always(true))(\"timanttikobra\")).to.be.false\n describe \"empty\", ->\n expect(",
"end": 1074,
"score": 0.9942218661308289,
"start": 1061,
"tag": "USERNAME",
"value": "timanttikobra"
},
{
"context": " it \"for circular refs\", ... | out/vendor/bacon/spec/BaconSpec.coffee | beer-challenge/beer-challenge-html5 | 1 | expect = require("chai").expect
Bacon = require("../src/Bacon").Bacon
Mocks = require( "./Mock")
TickScheduler = require("./TickScheduler").TickScheduler
mock = Mocks.mock
mockFunction = Mocks.mockFunction
EventEmitter = require("events").EventEmitter
th = require("./SpecHelper")
t = th.t
expectStreamEvents = th.expectStreamEvents
expectPropertyEvents = th.expectPropertyEvents
verifyCleanup = th.verifyCleanup
error = th.error
soon = th.soon
series = th.series
repeat = th.repeat
toValues = th.toValues
sc = TickScheduler()
Bacon.scheduler = sc
# Some streams are unstable when testing with verifySwitching2.
# Generally, all flatMap-based streams are unstable because flatMap discards
# child streams on unsubscribe.
unstable = {unstable:true}
describe "Bacon._", ->
_ = Bacon._
describe "head", ->
expect(_.head([5,2,9])).to.equal(5)
expect(_.head([])).to.equal(undefined)
expect(_.head(5)).to.equal(undefined)
describe "always", -> expect(_.always(5)("francis")).to.equal(5)
describe "negate", ->
expect(_.negate(_.always(true))("timanttikobra")).to.be.false
describe "empty", ->
expect(_.empty([])).to.be.true
expect(_.empty("")).to.be.true
expect(_.empty([1])).to.be.false
expect(_.empty("1")).to.be.false
describe "tail", ->
expect(_.tail([1,2,3])).to.deep.equal([2,3])
expect(_.tail([1])).to.deep.equal([])
expect(_.tail([])).to.deep.equal([])
describe "filter", ->
expect(_.filter(_.empty, ["","1",[],[2]])).to.deep.equal(["",[]])
describe "map", ->
expect(_.map(_.head, [
[], [1], [2,2], [3,3,3]
])).to.deep.equal([
undefined, 1, 2, 3
])
describe "flatMap", ->
expect(_.flatMap(((x) -> [x, x]), [1,2,3])).to.deep.equal([1,1,2,2,3,3])
describe "each", ->
it "provides key and value to iterator", ->
expectKeyVals = (x, expectedKeys, expectedValues) ->
keys = []
values = []
_.each(x, (key, value) ->
keys.push(key)
values.push(value)
)
expect([keys, values]).to.deep.equal([expectedKeys, expectedValues])
expectKeyVals(
{cat:"furry",bird:"feathery"}, ["cat","bird"], ["furry","feathery"]
)
expectKeyVals([1,2,3], ["0","1","2"], [1,2,3])
describe "toArray", ->
expect(_.toArray(2)).to.deep.equal([2])
it "ignores rest of arguments", ->
expect(_.toArray(1,1,2)).to.deep.equal([1])
it "should, when given an array, return it back (not a copy)", ->
arr = []
expect(_.toArray(arr)).to.equal(arr)
describe "indexOf", ->
expect(_.indexOf([1,2], 1)).to.equal(0)
expect(_.indexOf([1,2], 2)).to.equal(1)
expect(_.indexOf([1,2], 3)).to.equal(-1)
describe "contains", ->
expect(_.contains("abc", "c")).to.be.true
expect(_.contains("abc", "x")).to.be.false
expect(_.contains([2,4,6], 4)).to.be.true
expect(_.contains([2,4,6], 3)).to.be.false
describe "id", ->
obj = {}
expect(_.id(obj)).to.equal(obj)
describe "last", ->
expect(_.last([2,4])).to.equal(4)
expect(_.last("last")).to.equal("t")
describe "all", ->
expect(_.all([ [false,true], [true,true] ], _.head)).to.be.false
expect(_.all([ [true,false], [true,true] ], _.head)).to.be.true
it "should test truthiness if no function given", ->
expect(_.all([true, false, true])).to.be.false
expect(_.all([true, true, true])).to.be.true
expect(_.all([1, true, 1])).to.be.true
describe "any", ->
expect(_.any([ [false,true], [true,true] ], _.head)).to.be.true
expect(_.any([ [false,false], [false,true] ], _.head)).to.be.false
it "should test truthiness if no function given", ->
expect(_.any([false, false, false])).to.be.false
expect(_.any([true, false, true])).to.be.true
describe "without", ->
expect(_.without("apple", ["bacon","apple","apple","omelette"]))
.to.deep.equal(["bacon","omelette"])
describe "remove", ->
expect(_.remove("apple", ["bacon","apple","apple","omelette"]))
.to.deep.equal(["apple"])
expect(_.remove("raisin", ["bacon","apple","apple","omelette"]))
.to.deep.equal(undefined)
describe "fold", ->
expect(_.fold([1,2,3,4,5], 0, (s, n) -> s + n)).to.equal(15)
describe "toString", ->
it "for booleans", ->
expect(_.toString(true)).to.equal("true")
it "for numbers", ->
expect(_.toString(1)).to.equal("1")
expect(_.toString(1.1)).to.equal("1.1")
it "for undefined and null", ->
expect(_.toString(undefined)).to.equal("undefined")
expect(_.toString(null)).to.equal("undefined")
it "for strings", ->
expect(_.toString("lol")).to.equal("lol")
it "for dates", ->
expect(_.toString(new Date(0))).to.contain("1970")
it "for arrays", ->
expect(_.toString([1,2,3])).to.equal("[1,2,3]")
it "for objects", ->
expect(_.toString({a: "b"})).to.equal("{a:b}")
expect(_.toString({a: "b", c: "d"})).to.equal("{a:b,c:d}")
it "for circular refs", ->
obj = { name : "nasty" }
obj.self = obj
expect(_.toString(obj).length).to.be.below(100)
it "works even when enumerable properties throw errors on access", ->
obj = { "name": "madcow" }
Object.defineProperty obj, "prop",
enumerable: true
get: ->
throw new Error "an error"
expect(_.toString(obj)).to.equal("{name:madcow,prop:Error: an error}")
describe "Bacon.later", ->
describe "should send single event and end", ->
expectStreamEvents(
-> Bacon.later(t(1), "lol")
["lol"])
describe "supports sending an Error event as well", ->
expectStreamEvents(
-> Bacon.later(t(1), new Bacon.Error("oops"))
[error()])
it "toString", ->
expect(Bacon.later(1, "wat").toString()).to.equal("Bacon.later(1,wat)")
it "inspect", ->
expect(Bacon.later(1, "wat").inspect()).to.equal("Bacon.later(1,wat)")
describe "Bacon.sequentially", ->
describe "should send given events and end", ->
expectStreamEvents(
-> Bacon.sequentially(t(1), ["lol", "wut"])
["lol", "wut"])
describe "include error events", ->
expectStreamEvents(
-> Bacon.sequentially(t(1), [error(), "lol"])
[error(), "lol"])
describe "will stop properly even when exception thrown by subscriber", ->
expectStreamEvents(
->
s = Bacon.sequentially(t(1), ["lol", "wut"])
s.onValue (value) ->
throw "testing"
s
[])
it "toString", ->
expect(Bacon.sequentially(1, [2]).toString()).to.equal("Bacon.sequentially(1,[2])")
describe "Bacon.repeatedly", ->
describe "repeats given sequence forever", ->
expectStreamEvents(
-> Bacon.repeatedly(1, [1,2]).take(5)
[1,2,1,2,1])
it "toString", ->
expect(Bacon.repeatedly(1, [1]).toString()).to.equal("Bacon.repeatedly(1,[1])")
describe "Bacon.interval", ->
describe "repeats single element indefinitely", ->
expectStreamEvents(
-> Bacon.interval(t(1), "x").take(3)
["x", "x", "x"])
it "toString", ->
expect(Bacon.interval(1, 2).toString()).to.equal("Bacon.interval(1,2)")
describe "Bacon.fromPoll", ->
describe "repeatedly polls given function for values", ->
expectStreamEvents(
-> Bacon.fromPoll(1, (-> "lol")).take(2)
["lol", "lol"])
it "toString", ->
expect(Bacon.fromPoll(1, (->)).toString()).to.equal("Bacon.fromPoll(1,function)")
testLiftedCallback = (src, liftedCallback) ->
input = [
Bacon.constant('a')
'x'
Bacon.constant('b').toProperty()
'y'
]
output = ['a', 'x', 'b', 'y']
expectStreamEvents(
-> liftedCallback(src, input...)
[output]
)
describe "Bacon.fromCallback", ->
describe "makes an EventStream from function that takes a callback", ->
expectStreamEvents(
->
src = (callback) -> callback("lol")
stream = Bacon.fromCallback(src)
["lol"])
describe "supports partial application", ->
expectStreamEvents(
->
src = (param, callback) -> callback(param)
stream = Bacon.fromCallback(src, "lol")
["lol"])
describe "supports partial application with Observable arguments", ->
testLiftedCallback(
(values..., callback) -> callback(values)
Bacon.fromCallback
)
describe "supports object, methodName, partial application", ->
expectStreamEvents(
->
src = {
"go": (param, callback) -> callback(param + " " + this.name)
"name": "bob"
}
stream = Bacon.fromCallback(src, "go", "hello")
["hello bob"])
it "toString", ->
expect(Bacon.fromCallback((->), "lol").toString()).to.equal("Bacon.fromCallback(function,lol)")
describe "Bacon.fromNodeCallback", ->
describe "makes an EventStream from function that takes a node-style callback", ->
expectStreamEvents(
->
src = (callback) -> callback(null, "lol")
stream = Bacon.fromNodeCallback(src)
["lol"])
describe "handles error parameter correctly", ->
expectStreamEvents(
->
src = (callback) -> callback('errortxt', null)
stream = Bacon.fromNodeCallback(src)
[error()])
describe "supports partial application", ->
expectStreamEvents(
->
src = (param, callback) -> callback(null, param)
stream = Bacon.fromNodeCallback(src, "lol")
["lol"])
describe "supports partial application with Observable arguments", ->
testLiftedCallback(
(values..., callback) -> callback(null, values)
Bacon.fromNodeCallback
)
describe "supports object, methodName, partial application", ->
expectStreamEvents(
->
src = {
"go": (param, callback) -> callback(null, param + " " + this.name)
"name": "bob"
}
stream = Bacon.fromNodeCallback(src, "go", "hello")
["hello bob"])
it "toString", ->
expect(Bacon.fromNodeCallback((->), "lol").toString()).to.equal("Bacon.fromNodeCallback(function,lol)")
# Wrap EventEmitter as EventTarget
toEventTarget = (emitter) ->
addEventListener: (event, handler) ->
emitter.addListener(event, handler)
removeEventListener: (event, handler) -> emitter.removeListener(event, handler)
describe "Bacon.fromEventTarget", ->
soon = (f) -> setTimeout f, 0
describe "should create EventStream from DOM object", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "click", "x"
element = toEventTarget emitter
Bacon.fromEventTarget(element, "click").take(1)
["x"]
)
describe "should create EventStream from EventEmitter", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "data", "x"
Bacon.fromEventTarget(emitter, "data").take(1)
["x"]
)
describe "should allow a custom map function for EventStream from EventEmitter", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "data", "x", "y"
Bacon.fromEventTarget(emitter, "data", (x, y) => [x, y]).take(1)
[["x", "y"]]
)
it "should clean up event listeners from EventEmitter", ->
emitter = new EventEmitter()
Bacon.fromEventTarget(emitter, "data").take(1).subscribe ->
emitter.emit "data", "x"
expect(emitter.listeners("data").length).to.deep.equal(0)
it "should clean up event listeners from DOM object", ->
emitter = new EventEmitter()
element = toEventTarget emitter
dispose = Bacon.fromEventTarget(element, "click").subscribe ->
dispose()
expect(emitter.listeners("click").length).to.deep.equal(0)
it "toString", ->
expect(Bacon.fromEventTarget({}, "click").toString()).to.equal("Bacon.fromEventTarget({},click)")
describe "Observable.log", ->
preservingLog = (f) ->
originalConsole = console
originalLog = console.log
try
f()
finally
global.console = originalConsole
console.log = originalLog
it "does not crash", ->
preservingLog ->
console.log = ->
Bacon.constant(1).log()
it "does not crash in case console.log is not defined", ->
preservingLog ->
console.log = undefined
Bacon.constant(1).log()
it "toString", ->
expect(Bacon.never().log().toString()).to.equal("Bacon.never()")
describe "Observable.slidingWindow", ->
describe "slides the window for EventStreams", ->
expectPropertyEvents(
-> series(1, [1,2,3]).slidingWindow(2)
[[], [1], [1,2], [2,3]])
describe "slides the window for Properties", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty().slidingWindow(2)
[[], [1], [1,2], [2,3]])
describe "accepts second parameter for minimum amount of values", ->
expectPropertyEvents(
-> series(1, [1,2,3,4]).slidingWindow(3, 2)
[[1,2], [1,2,3], [2,3,4]])
expectPropertyEvents(
-> series(1, [1,2,3,4]).toProperty(0).slidingWindow(3, 2)
[[0,1], [0, 1, 2], [1,2,3], [2,3,4]])
it "toString", ->
expect(Bacon.never().slidingWindow(2).toString()).to.equal("Bacon.never().slidingWindow(2,0)")
describe "EventStream.filter", ->
describe "should filter values", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3]).filter(lessThan(3))
[1, 2, error()])
describe "extracts field values", ->
expectStreamEvents(
-> series(1, [{good:true, value:"yes"}, {good:false, value:"no"}]).filter(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.filter(odd)
[1,1,3,7])
it "toString", ->
expect(Bacon.never().filter(false).toString()).to.equal("Bacon.never().filter(function)")
describe "EventStream.map", ->
describe "should map with given function", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).map(times, 2)
[2, 4, 6])
describe "also accepts a constant value", ->
expectStreamEvents(
-> series(1, [1, 2, 3,]).map("lol")
["lol", "lol", "lol"])
describe "extracts property from value object", ->
o = { lol : "wut" }
expectStreamEvents(
-> repeat(1, [o]).take(3).map(".lol")
["wut", "wut", "wut"])
describe "extracts a nested property too", ->
o = { lol : { wut : "wat" } }
expectStreamEvents(
-> Bacon.once(o).map(".lol.wut")
["wat"])
describe "in case of a function property, calls the function with no args", ->
expectStreamEvents(
-> Bacon.once([1,2,3]).map(".length")
[3])
describe "allows arguments for methods", ->
thing = { square: (x) -> x * x }
expectStreamEvents(
-> Bacon.once(thing).map(".square", 2)
[4])
describe "works with method call on given object, with partial application", ->
multiplier = { multiply: (x, y) -> x * y }
expectStreamEvents(
-> series(1, [1,2,3]).map(multiplier, "multiply", 2)
[2,4,6])
describe "can map to a Property value", ->
expectStreamEvents(
-> series(1, [1,2,3]).map(Bacon.constant(2))
[2,2,2])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.fromArray([1,2,3,4,5]).map(id).skip(4).onValue()
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.once(1).map(true).toString()).to.equal("Bacon.once(1).map(function)")
describe "EventStream.mapError", ->
describe "should map error events with given function", ->
expectStreamEvents(
-> repeat(1, [1, error("OOPS")]).mapError(id).take(2)
[1, "OOPS"])
describe "also accepts a constant value", ->
expectStreamEvents(
-> repeat(1, [1, error()]).mapError("ERR").take(2)
[1, "ERR"])
it "toString", ->
expect(Bacon.never().mapError(true).toString()).to.equal("Bacon.never().mapError(function)")
describe "EventStream.doAction", ->
it "calls function before sending value to listeners", ->
called = []
bus = new Bacon.Bus()
s = bus.doAction((x) -> called.push(x))
s.onValue(->)
s.onValue(->)
bus.push(1)
expect(called).to.deep.equal([1])
describe "does not alter the stream", ->
expectStreamEvents(
-> series(1, [1, 2]).doAction(->)
[1, 2])
it "toString", ->
expect(Bacon.never().doAction((->)).toString()).to.equal("Bacon.never().doAction(function)")
describe "EventStream.mapEnd", ->
describe "produces an extra element on stream end", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd("the end")
["1", error(), "the end"])
describe "accepts either a function or a constant value", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd(-> "the end")
["1", error(), "the end"])
describe "works with undefined value as well", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd()
["1", error(), undefined])
it "toString", ->
expect(Bacon.never().mapEnd(true).toString()).to.equal("Bacon.never().mapEnd(function)")
describe "EventStream.take", ->
describe "takes N first elements", ->
expectStreamEvents(
-> series(1, [1,2,3,4]).take(2)
[1,2])
describe "works with N=0", ->
expectStreamEvents(
-> series(1, [1,2,3,4]).take(0)
[])
describe "will stop properly even when exception thrown by subscriber", ->
expectStreamEvents(
->
s = Bacon.repeatedly(t(1), ["lol", "wut"]).take(2)
s.onValue (value) ->
throw "testing"
s
[])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3,4]).take(2)
[1,2])
it "toString", ->
expect(Bacon.never().take(1).toString()).to.equal("Bacon.never().take(1)")
describe "EventStream.takeWhile", ->
describe "takes while predicate is true", ->
expectStreamEvents(
-> repeat(1, [1, error("wat"), 2, 3]).takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "extracts field values", ->
expectStreamEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.takeWhile(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.takeWhile(odd)
[1,1])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).takeWhile(lessThan(3))
[1, 2])
it "toString", ->
expect(Bacon.never().takeWhile(true).toString()).to.equal("Bacon.never().takeWhile(function)")
describe "EventStream.skip", ->
describe "should skip first N items", ->
expectStreamEvents(
-> series(1, [1, error(), 2, error(), 3]).skip(1)
[error(), 2, error(), 3])
describe "accepts N <= 0", ->
expectStreamEvents(
-> series(1, [1, 2]).skip(-1)
[1, 2])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).skip(1)
[2, 3])
it "toString", ->
expect(Bacon.never().skip(1).toString()).to.equal("Bacon.never().skip(1)")
describe "EventStream.skipWhile", ->
describe "skips filter predicate holds true", ->
expectStreamEvents(
-> series(1, [1, error(), 2, error(), 3, 2]).skipWhile(lessThan(3))
[error(), error(), 3, 2])
describe "extracts field values", ->
expectStreamEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.skipWhile(".good").map(".value")
["no"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.skipWhile(odd)
[2,3,4,4,8,7])
describe "for synchronous sources", ->
describe "skips filter predicate holds true", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 2]).skipWhile(lessThan(3))
[3, 2])
it "toString", ->
expect(Bacon.never().skipWhile(1).toString()).to.equal("Bacon.never().skipWhile(function)")
describe "EventStream.skipUntil", ->
describe "skips events until one appears in given starter stream", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
src.onValue(->) # to start "time" immediately instead of on subscribe
starter = series(4, ["start"])
src.skipUntil(starter)
[2,3])
describe "works with self-derived starter", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
starter = src.filter((x) -> x == 3)
src.skipUntil(starter)
[3])
describe "works with self-derived starter with an evil twist", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
data = src.map((x) -> x)
data.onValue(->)
starter = src.filter((x) -> x == 3)
data.skipUntil(starter)
[3])
it "toString", ->
expect(Bacon.never().skipUntil(Bacon.once(1)).toString()).to.equal("Bacon.never().skipUntil(Bacon.once(1))")
describe "EventStream.skipDuplicates", ->
it "Drops duplicates with subscribers with non-overlapping subscription time (#211)", ->
b = new Bacon.Bus()
noDups = b.skipDuplicates()
round = (expected) ->
values = []
noDups.take(1).onValue (x) -> values.push(x)
b.push 1
expect(values).to.deep.equal(expected)
round([1])
round([])
round([])
describe "drops duplicates", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 2, 3, 1]).skipDuplicates()
[1, 2, error(), 3, 1])
describe "allows undefined as initial value", ->
expectStreamEvents(
-> series(1, [undefined, undefined, 1, 2]).skipDuplicates()
[undefined, 1, 2])
describe "works with custom isEqual function", ->
a = {x: 1}; b = {x: 2}; c = {x: 2}; d = {x: 3}; e = {x: 1}
isEqual = (a, b) -> a?.x == b?.x
expectStreamEvents(
-> series(1, [a, b, error(), c, d, e]).skipDuplicates(isEqual)
[a, b, error(), d, e])
describe "works with synchrounous sources", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 2, 3, 1]).skipDuplicates()
[1, 2, 3, 1], unstable)
it "toString", ->
expect(Bacon.never().skipDuplicates().toString()).to.equal("Bacon.never().skipDuplicates()")
describe "EventStream.flatMap", ->
describe "should spawn new stream for each value and collect results into a single stream", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.sequentially(t(2), [value, error(), value])
[1, 2, error(), error(), 1, 2], unstable)
describe "should pass source errors through to the result", ->
expectStreamEvents(
-> series(1, [error(), 1]).flatMap (value) ->
Bacon.later(t(1), value)
[error(), 1])
describe "should work with a spawned stream responding synchronously", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value))
[1, 2], unstable)
expectStreamEvents(
-> series(1, [1,2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value)).concat(Bacon.once("lol"))
[1, "lol", 2, "lol"], unstable)
describe "should work with a source stream responding synchronously", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2]).flatMap (value) ->
Bacon.once(value)
[1, 2])
expectStreamEvents(
-> Bacon.fromArray([1, 2]).flatMap (value) ->
Bacon.fromArray([value, value*10])
[1, 10, 2, 20])
expectStreamEvents(
-> Bacon.once(1).flatMap (value) ->
Bacon.later(0, value)
[1])
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap(Bacon.constant)
[1,2], unstable)
describe "Works also when f returns a constant value instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> x)
[1,2], unstable)
describe "Works also when f returns an Error instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> new Bacon.Error(x))
[new Bacon.Error(1), new Bacon.Error(2)], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.constant("bacon"))
["bacon"])
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.once("bacon"))
["bacon"])
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("sir francis")}).flatMap(".bacon")
["sir francis"])
expectStreamEvents(
-> Bacon.once({ bacon: "sir francis"}).flatMap(".bacon")
["sir francis"])
expectStreamEvents(
->
glorify = (x, y) -> Bacon.fromArray([x, y])
Bacon.once("francis").flatMap(glorify, "sir")
["sir", "francis"])
it "toString", ->
expect(Bacon.never().flatMap(->).toString()).to.equal("Bacon.never().flatMap(function)")
describe "Property.flatMap", ->
describe "should spawn new stream for all events including Init", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
series(1, [1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant)
[1,2], unstable)
expectPropertyEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant).toProperty()
[1,2], unstable)
describe "works for synchronous source", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
Bacon.fromArray([1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMap(->).toString()).to.equal("Bacon.constant(1).flatMap(function)")
describe "EventStream.flatMapLatest", ->
describe "spawns new streams but collects values from the latest spawned stream only", ->
expectStreamEvents(
-> series(3, [1, 2]).flatMapLatest (value) ->
Bacon.sequentially(t(2), [value, error(), value])
[1, 2, error(), 2], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMapLatest(Bacon.constant("bacon"))
["bacon"], unstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("sir francis")}).flatMapLatest(".bacon")
["sir francis"])
expectStreamEvents(
-> Bacon.once({ bacon: "sir francis"}).flatMapLatest(".bacon")
["sir francis"])
it "toString", ->
expect(Bacon.never().flatMapLatest(->).toString()).to.equal("Bacon.never().flatMapLatest(function)")
describe "Property.flatMapLatest", ->
describe "spawns new streams but collects values from the latest spawned stream only", ->
expectStreamEvents(
-> series(3, [1, 2]).toProperty(0).flatMapLatest (value) ->
Bacon.sequentially(t(2), [value, value])
[0, 1, 2, 2], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.constant("asdf").flatMapLatest(Bacon.constant("bacon"))
["bacon"], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMapLatest(->).toString()).to.equal("Bacon.constant(1).flatMapLatest(function)")
describe "EventStream.flatMapFirst", ->
describe "spawns new streams and ignores source events until current spawned stream has ended", ->
expectStreamEvents(
-> series(2, [2, 4, 6, 8]).flatMapFirst (value) ->
series(1, ["a" + value, "b" + value, "c" + value])
["a2", "b2", "c2", "a6", "b6", "c6"], unstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("sir francis")}).flatMapFirst(".bacon")
["sir francis"])
expectStreamEvents(
-> Bacon.once({ bacon: "sir francis"}).flatMapFirst(".bacon")
["sir francis"])
it "toString", ->
expect(Bacon.never().flatMapFirst(->).toString()).to.equal("Bacon.never().flatMapFirst(function)")
describe "EventStream.merge", ->
describe "merges two streams and ends when both are exhausted", ->
expectStreamEvents(
->
left = series(1, [1, error(), 2, 3])
right = series(1, [4, 5, 6]).delay(t(4))
left.merge(right)
[1, error(), 2, 3, 4, 5, 6], unstable)
describe "respects subscriber return value", ->
expectStreamEvents(
->
left = repeat(2, [1, 3]).take(3)
right = repeat(3, [2]).take(3)
left.merge(right).takeWhile(lessThan(2))
[1])
describe "does not duplicate same error from two streams", ->
expectStreamEvents(
->
src = series(1, [1, error(), 2, error(), 3])
left = src.map((x) -> x)
right = src.map((x) -> x * 2)
left.merge(right)
[1, 2, error(), 2, 4, error(), 3, 6])
describe "works with synchronous sources", ->
expectStreamEvents(
-> Bacon.fromArray([1,2]).merge(Bacon.fromArray([3,4]))
[1,2,3,4])
it "toString", ->
expect(Bacon.once(1).merge(Bacon.once(2)).toString()).to.equal("Bacon.once(1).merge(Bacon.once(2))")
describe "EventStream.delay", ->
describe "delays all events (except errors) by given delay in milliseconds", ->
expectStreamEvents(
->
left = series(2, [1, 2, 3])
right = series(1, [error(), 4, 5, 6]).delay(t(6))
left.merge(right)
[error(), 1, 2, 3, 4, 5, 6], unstable)
describe "works with synchronous streams", ->
expectStreamEvents(
->
left = Bacon.fromArray([1, 2, 3])
right = Bacon.fromArray([4, 5, 6]).delay(t(6))
left.merge(right)
[1, 2, 3, 4, 5, 6], unstable)
it "toString", ->
expect(Bacon.never().delay(1).toString()).to.equal("Bacon.never().delay(1)")
describe "EventStream.debounce", ->
describe "throttles input by given delay, passing-through errors", ->
expectStreamEvents(
-> series(2, [1, error(), 2]).debounce(t(7))
[error(), 2])
describe "waits for a quiet period before outputing anything", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3, 4]).debounce(t(3))
[[11, 4]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 4]).debounce(t(3))
[4])
describe "works in combination with scan", ->
count = 0
expectPropertyEvents(
-> series(2, [1,2,3]).debounce(1).scan(0, (x,y) -> count++; x + y)
[0, 1, 3, 6]
)
it "calls accumulator once per value", ->
expect(count).to.equal(3)
it "toString", ->
expect(Bacon.never().debounce(1).toString()).to.equal("Bacon.never().debounce(1)")
describe "EventStream.debounceImmediate(delay)", ->
describe "outputs first event immediately, then ignores events for given amount of milliseconds", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3, 4]).debounceImmediate(t(3))
[[2, 1], [6, 3]], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 4]).debounceImmediate(t(3))
[1])
it "toString", ->
expect(Bacon.never().debounceImmediate(1).toString()).to.equal("Bacon.never().debounceImmediate(1)")
describe "EventStream.throttle(delay)", ->
describe "outputs at steady intervals, without waiting for quiet period", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3]).throttle(t(3))
[[5, 2], [8, 3]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).throttle(t(3))
[3])
it "toString", ->
expect(Bacon.never().throttle(1).toString()).to.equal("Bacon.never().throttle(1)")
describe "EventStream.bufferWithTime", ->
describe "returns events in bursts, passing through errors", ->
expectStreamEvents(
-> series(2, [error(), 1, 2, 3, 4, 5, 6, 7]).bufferWithTime(t(7))
[error(), [1, 2, 3, 4], [5, 6, 7]])
describe "keeps constant output rate even when input is sporadical", ->
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [3, "b"], [5, "c"]]).bufferWithTime(t(2))
[[2, ["a"]], [4, ["b"]], [6, ["c"]]]
unstable
)
describe "works with empty stream", ->
expectStreamEvents(
-> Bacon.never().bufferWithTime(t(1))
[])
describe "allows custom defer-function", ->
fast = (f) -> sc.setTimeout(f, 0)
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [2, "b"]]).bufferWithTime(fast)
[[0, ["a"]], [2, ["b"]]])
describe "works with synchronous defer-function", ->
sync = (f) -> f()
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [2, "b"]]).bufferWithTime(sync)
[[0, ["a"]], [2, ["b"]]])
describe "works with synchronous source", ->
expectStreamEvents(
-> series(2, [1,2,3]).bufferWithTime(t(7))
[[1,2,3]])
it "toString", ->
expect(Bacon.never().bufferWithTime(1).toString()).to.equal("Bacon.never().bufferWithTime(1)")
describe "EventStream.bufferWithCount", ->
describe "returns events in chunks of fixed size, passing through errors", ->
expectStreamEvents(
-> series(1, [1, 2, 3, error(), 4, 5]).bufferWithCount(2)
[[1, 2], error(), [3, 4], [5]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3,4,5]).bufferWithCount(2)
[[1, 2], [3, 4], [5]])
it "toString", ->
expect(Bacon.never().bufferWithCount(1).toString()).to.equal("Bacon.never().bufferWithCount(1)")
describe "EventStream.bufferWithTimeOrCount", ->
describe "flushes on count", ->
expectStreamEvents(
-> series(1, [1, 2, 3, error(), 4, 5]).bufferWithTimeOrCount(t(10), 2)
[[1, 2], error(), [3, 4], [5]])
describe "flushes on timeout", ->
expectStreamEvents(
-> series(2, [error(), 1, 2, 3, 4, 5, 6, 7]).bufferWithTimeOrCount(t(7), 10)
[error(), [1, 2, 3, 4], [5, 6, 7]])
it "toString", ->
expect(Bacon.never().bufferWithTimeOrCount(1, 2).toString()).to.equal("Bacon.never().bufferWithTimeOrCount(1,2)")
describe "EventStream.takeUntil", ->
describe "takes elements from source until an event appears in the other stream", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = repeat(7, ["stop!"])
src.takeUntil(stopper)
[1, 2], unstable)
describe "works on self-derived stopper", ->
expectStreamEvents(
->
src = repeat(3, [3, 2, 1])
stopper = src.filter(lessThan(3))
src.takeUntil(stopper)
[3])
describe "works on self-derived stopper with an evil twist", ->
expectStreamEvents(
->
src = repeat(3, [3, 2, 1])
data = src.map((x) -> x)
data.take(3).onValue(->)
stopper = src.filter(lessThan(3))
data.takeUntil(stopper)
[3])
describe "includes source errors, ignores stopper errors", ->
expectStreamEvents(
->
src = repeat(2, [1, error(), 2, 3])
stopper = repeat(7, ["stop!"]).merge(repeat(1, [error()]))
src.takeUntil(stopper)
[1, error(), 2], unstable)
describe "works with Property as stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = repeat(7, ["stop!"]).toProperty()
src.takeUntil(stopper)
[1, 2], unstable)
describe "considers Property init value as stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = Bacon.constant("stop")
src.takeUntil(stopper)
[])
describe "ends immediately with synchronous stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = Bacon.once("stop")
src.takeUntil(stopper)
[])
describe "ends properly with a never-ending stopper", ->
expectStreamEvents(
->
src = series(1, [1,2,3])
stopper = new Bacon.Bus()
src.takeUntil(stopper)
[1,2,3])
describe "ends properly with a never-ending stopper and synchronous source", ->
expectStreamEvents(
->
src = Bacon.fromArray([1,2,3]).mapEnd("finito")
stopper = new Bacon.Bus()
src.takeUntil(stopper)
[1,2,3, "finito"])
describe "unsubscribes its source as soon as possible", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(20)
.onUnsub(->
expect(sc.now()).to.equal(startTick + 1))
.takeUntil Bacon.later(1)
[])
describe "it should unsubscribe its stopper on end", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(1,'x').takeUntil(Bacon.later(20).onUnsub(->
expect(sc.now()).to.equal(startTick + 1)))
['x'])
describe "it should unsubscribe its stopper on no more", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(1,'x').takeUntil(Bacon.later(20).onUnsub(->
expect(sc.now()).to.equal(startTick + 1)))
['x'])
### TODO does not pass
describe "works with synchronous self-derived sources", ->
expectStreamEvents(
->
a = Bacon.fromArray [1,2]
b = a.filter((x) -> x >= 2)
a.takeUntil b
[1])
###
it "toString", ->
expect(Bacon.later(1, "a").takeUntil(Bacon.later(2, "b")).toString()).to.equal("Bacon.later(1,a).takeUntil(Bacon.later(2,b))")
describe "When an Event triggers another one in the same stream, while dispatching", ->
it "Delivers triggered events correctly", ->
bus = new Bacon.Bus
values = []
bus.take(2).onValue (v) ->
bus.push "A"
bus.push "B"
bus.onValue (v) ->
values.push(v)
bus.push "a"
bus.push "b"
expect(values).to.deep.equal(["a", "A", "B", "A", "B", "b"])
it "EventStream.take(1) works correctly (bug fix)", ->
bus = new Bacon.Bus
values = []
bus.take(1).onValue (v) ->
bus.push("onValue triggers a side-effect here")
values.push(v)
bus.push("foo")
expect(values).to.deep.equal(["foo"])
describe "EventStream.awaiting(other)", ->
describe "indicates whether s1 has produced output after s2 (or only the former has output so far)", ->
expectPropertyEvents(
-> series(2, [1, 1]).awaiting(series(3, [2]))
[false, true, false, true])
describe "supports Properties", ->
expectPropertyEvents(
-> series(2, [1, 1]).awaiting(series(3, [2]).toProperty())
[false, true, false, true])
describe "supports simultaneouts events", ->
expectPropertyEvents(
->
src = Bacon.later(1, 1)
src.awaiting(src.map(->))
[false])
expectPropertyEvents(
->
src = Bacon.later(1, 1)
src.map(->).awaiting(src)
[false])
it "toString", ->
expect(Bacon.never().awaiting(Bacon.once(1)).toString()).to.equal("Bacon.never().awaiting(Bacon.once(1))")
describe "EventStream.endOnError", ->
describe "terminates on error", ->
expectStreamEvents(
-> repeat(1, [1, 2, error(), 3]).endOnError()
[1, 2, error()])
describe "accepts predicate function", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3, new Bacon.Error({serious:true}), 4]).endOnError((e) -> e?.serious)
[1,2,error(),3,error()])
describe "accepts extractor string", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3, new Bacon.Error({serious:true}), 4]).endOnError(".serious")
[1,2,error(),3,error()])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, error(), 3]).endOnError()
[1, 2, error()])
it "toString", ->
expect(Bacon.never().endOnError().toString()).to.equal("Bacon.never().endOnError()")
describe "Bacon.constant", ->
describe "creates a constant property", ->
expectPropertyEvents(
-> Bacon.constant("lol")
["lol"])
it "ignores unsubscribe", ->
Bacon.constant("lol").onValue(=>)()
describe "provides same value to all listeners", ->
c = Bacon.constant("lol")
expectPropertyEvents((-> c), ["lol"])
it "check check", ->
f = mockFunction()
c.onValue(f)
f.verify("lol")
it "provides same value to all listeners, when mapped (bug fix)", ->
c = Bacon.constant("lol").map(id)
f = mockFunction()
c.onValue(f)
f.verify("lol")
c.onValue(f)
f.verify("lol")
it "toString", ->
expect(Bacon.constant(1).toString()).to.equal("Bacon.constant(1)")
describe "Bacon.never", ->
describe "should send just end", ->
expectStreamEvents(
-> Bacon.never()
[])
describe "Bacon.once", ->
describe "should send single event and end", ->
expectStreamEvents(
-> Bacon.once("pow")
["pow"])
describe "accepts an Error event as parameter", ->
expectStreamEvents(
-> Bacon.once(new Bacon.Error("oop"))
[error()])
describe "Allows wrapped events, for instance, Bacon.Error", ->
expectStreamEvents(
-> Bacon.once(error())
[error()])
describe "Bacon.fromArray", ->
describe "Turns an empty array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([])
[])
describe "Turns a single-element array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([1])
[1])
describe "Turns a longer array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3])
[1, 2, 3])
describe "Allows wrapped events, for instance, Bacon.Error", ->
expectStreamEvents(
-> Bacon.fromArray([error(), 1])
[error(), 1])
describe "EventStream.concat", ->
describe "provides values from streams in given order and ends when both are exhausted", ->
expectStreamEvents(
->
left = series(2, [1, error(), 2, 3])
right = series(1, [4, 5, 6])
left.concat(right)
[1, error(), 2, 3, 4, 5, 6], unstable)
describe "respects subscriber return value when providing events from left stream", ->
expectStreamEvents(
->
left = repeat(3, [1, 3]).take(3)
right = repeat(2, [1]).take(3)
left.concat(right).takeWhile(lessThan(2))
[1])
describe "respects subscriber return value when providing events from right stream", ->
expectStreamEvents(
->
left = series(3, [1, 2])
right = series(2, [2, 4, 6])
left.concat(right).takeWhile(lessThan(4))
[1, 2, 2])
describe "works with Bacon.never()", ->
expectStreamEvents(
-> Bacon.never().concat(Bacon.never())
[])
describe "works with Bacon.once()", ->
expectStreamEvents(
-> Bacon.once(2).concat(Bacon.once(1))
[2, 1])
describe "works with Bacon.once() and Bacon.never()", ->
expectStreamEvents(
-> Bacon.once(1).concat(Bacon.never())
[1])
describe "works with Bacon.never() and Bacon.once()", ->
expectStreamEvents(
-> Bacon.never().concat(Bacon.once(1))
[1])
describe "works with Bacon.once() and async source", ->
expectStreamEvents(
-> Bacon.once(1).concat(series(1, [2, 3]))
[1, 2, 3])
describe "works with Bacon.once() and Bacon.fromArray()", ->
expectStreamEvents(
-> Bacon.once(1).concat(Bacon.fromArray([2, 3]))
[1, 2, 3], unstable)
describe "Works with synchronized left stream and doAction", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
stream = Bacon.fromArray([1,2]).flatMapLatest (x) ->
Bacon.once(x).concat(Bacon.later(10, x).doAction((x) -> bus.push(x); bus.end()))
stream.onValue ->
bus
[2])
it "toString", ->
expect(Bacon.once(1).concat(Bacon.once(2)).toString()).to.equal("Bacon.once(1).concat(Bacon.once(2))")
describe "EventStream.startWith", ->
describe "provides seed value, then the rest", ->
expectStreamEvents(
->
left = series(1, [1, 2, 3])
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
->
left = Bacon.fromArray([1, 2, 3])
left.startWith('pow')
['pow', 1, 2, 3], unstable)
it "toString", ->
expect(Bacon.never().startWith(0).toString()).to.equal("Bacon.never().startWith(0)")
describe "Property.startWith", ->
describe "starts with given value if the Property doesn't have an initial value", ->
expectPropertyEvents(
->
left = series(1, [1, 2, 3]).toProperty()
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "works with synchronous source", ->
expectPropertyEvents(
->
left = Bacon.fromArray([1, 2, 3]).toProperty()
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "starts with the initial value of the Property if any", ->
expectPropertyEvents(
->
left = series(1, [1, 2, 3]).toProperty(0)
left.startWith('pow')
[0, 1, 2, 3], unstable)
describe "works with combineAsArray", ->
result = null
a = Bacon.constant("lolbal")
result = Bacon.combineAsArray([a.map(true), a.map(true)]).map("right").startWith("wrong")
result.onValue((x) -> result = x)
expect(result).to.equal("right")
it "toString", ->
expect(Bacon.constant(2).startWith(1).toString()).to.equal("Bacon.constant(2).startWith(1)")
describe "EventStream.toProperty", ->
describe "delivers current value and changes to subscribers", ->
expectPropertyEvents(
->
s = new Bacon.Bus()
p = s.toProperty("a")
soon ->
s.push "b"
s.end()
p
["a", "b"])
describe "passes through also Errors", ->
expectPropertyEvents(
-> series(1, [1, error(), 2]).toProperty()
[1, error(), 2])
describe "supports null as value", ->
expectPropertyEvents(
-> series(1, [null, 1, null]).toProperty(null)
[null, null, 1, null])
describe "does not get messed-up by a transient subscriber (bug fix)", ->
expectPropertyEvents(
->
prop = series(1, [1,2,3]).toProperty(0)
prop.subscribe (event) =>
Bacon.noMore
prop
[0, 1, 2, 3])
describe "works with synchronous source", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty()
[1,2,3])
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty(0)
[0,1,2,3])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.fromArray([1,2,3,4,5]).map(id).toProperty().skip(4).onValue()
expect(calls).to.equal(1)
describe "Property.toEventStream", ->
describe "creates a stream that starts with current property value", ->
expectStreamEvents(
-> series(1, [1, 2]).toProperty(0).toEventStream()
[0, 1, 2], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2]).toProperty(0).toEventStream()
[0, 1, 2], unstable)
describe "Property.toProperty", ->
describe "returns the same Property", ->
expectPropertyEvents(
-> Bacon.constant(1).toProperty()
[1])
it "rejects arguments", ->
try
Bacon.constant(1).toProperty(0)
fail()
catch e
describe "Property.map", ->
describe "maps property values", ->
expectPropertyEvents(
->
s = new Bacon.Bus()
p = s.toProperty(1).map(times, 2)
soon ->
s.push 2
s.error()
s.end()
p
[2, 4, error()])
describe "Property.filter", ->
describe "should filter values", ->
expectPropertyEvents(
-> series(1, [1, error(), 2, 3]).toProperty().filter(lessThan(3))
[1, error(), 2])
it "preserves old current value if the updated value is non-matching", ->
s = new Bacon.Bus()
p = s.toProperty().filter(lessThan(2))
p.onValue(=>) # to ensure that property is actualy updated
s.push(1)
s.push(2)
values = []
p.onValue((v) => values.push(v))
expect(values).to.deep.equal([1])
describe "can filter by Property value", ->
expectPropertyEvents(
->
src = series(2, [1, 2, 3, 4]).delay(t(1)).toProperty()
ok = series(2, [false, true, true, false]).toProperty()
src.filter(ok)
[2, 3])
describe "Property.take(1)", ->
describe "takes the Initial event", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).take(1)
[0])
describe "takes the first Next event, if no Initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty().take(1)
[1])
describe "works for constants", ->
expectPropertyEvents(
-> Bacon.constant(1)
[1])
describe "works for never-ending Property", ->
expectPropertyEvents(
-> repeat(1, [1,2,3]).toProperty(0).take(1)
[0])
expectPropertyEvents(
-> repeat(1, [1,2,3]).toProperty().take(1)
[1])
describe "Bacon.once().take(1)", ->
describe "works", ->
expectStreamEvents(
-> Bacon.once(1).take(1)
[1])
describe "Property.takeWhile", ->
describe "takes while predicate is true", ->
expectPropertyEvents(
->
series(1, [1, error("wat"), 2, 3])
.toProperty().takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "extracts field values", ->
expectPropertyEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.toProperty().takeWhile(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectPropertyEvents(
->
src = series(1, [1,1,2,3,4,4,8,7]).toProperty()
odd = src.map((x) -> x % 2)
src.takeWhile(odd)
[1,1])
describe "works with never-ending Property", ->
expectPropertyEvents(
->
repeat(1, [1, error("wat"), 2, 3])
.toProperty().takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "Property.takeUntil", ->
describe "takes elements from source until an event appears in the other stream", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty().takeUntil(Bacon.later(t(3)))
[1])
describe "works with errors", ->
expectPropertyEvents(
->
src = repeat(2, [1, error(), 3])
stopper = repeat(5, ["stop!"])
src.toProperty(0).takeUntil(stopper)
[0, 1, error()])
it "toString", ->
expect(Bacon.constant(1).takeUntil(Bacon.never()).toString()).to.equal("Bacon.constant(1).takeUntil(Bacon.never())")
describe "Property.delay", ->
describe "delivers initial value and changes", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).delay(t(1))
[0,1,2,3])
describe "delays changes", ->
expectStreamEvents(
->
series(2, [1,2,3])
.toProperty()
.delay(t(2)).changes().takeUntil(Bacon.later(t(5)))
[1], unstable)
describe "does not delay initial value", ->
expectPropertyEvents(
-> series(3, [1]).toProperty(0).delay(1).takeUntil(Bacon.later(t(2)))
[0])
it "toString", ->
expect(Bacon.constant(0).delay(1).toString()).to.equal("Bacon.constant(0).delay(1)")
describe "Property.debounce", ->
describe "delivers initial value and changes", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty(0).debounce(t(1))
[0,1,2,3])
describe "throttles changes, but not initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).debounce(t(4))
[0,3])
describe "works without initial value", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty().debounce(t(4))
[3])
describe "works with Bacon.constant (bug fix)", ->
expectPropertyEvents(
-> Bacon.constant(1).debounce(1)
[1])
it "toString", ->
expect(Bacon.constant(0).debounce(1).toString()).to.equal("Bacon.constant(0).debounce(1)")
describe "Property.throttle", ->
describe "throttles changes, but not initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).throttle(t(4))
[0,3])
describe "works with Bacon.once (bug fix)", ->
expectPropertyEvents(
-> Bacon.once(1).toProperty().throttle(1)
[1])
it "toString", ->
expect(Bacon.constant(0).throttle(1).toString()).to.equal("Bacon.constant(0).throttle(1)")
describe "Property.endOnError", ->
describe "terminates on Error", ->
expectPropertyEvents(
-> series(2, [1, error(), 2]).toProperty().endOnError()
[1, error()])
describe "Property.awaiting(other)", ->
describe "indicates whether p1 has produced output after p2 (or only the former has output so far)", ->
expectPropertyEvents(
-> series(2, [1, 1]).toProperty().awaiting(series(3, [2]))
[false, true, false, true])
describe "Property.skipDuplicates", ->
describe "drops duplicates", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 2, 3, 1]).toProperty(0).skipDuplicates()
[0, 1, 2, error(), 3, 1])
describe "Doesn't skip initial value (bug fix #211)", ->
b = new Bacon.Bus()
p = b.toProperty()
p.onValue -> # force property update
s = p.skipDuplicates()
b.push 'foo'
describe "series 1", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "series 2", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "series 3", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "Property.changes", ->
describe "sends property change events", ->
expectStreamEvents(
->
s = new Bacon.Bus()
p = s.toProperty("a").changes()
soon ->
s.push "b"
s.error()
s.end()
p
["b", error()])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).toProperty(0).changes()
[1, 2, 3])
describe "Property.combine", ->
describe "combines latest values of two properties, with given combinator function, passing through errors", ->
expectPropertyEvents(
->
left = series(2, [1, error(), 2, 3]).toProperty()
right = series(2, [4, error(), 5, 6]).delay(t(1)).toProperty()
left.combine(right, add)
[5, error(), error(), 6, 7, 8, 9])
describe "also accepts a field name instead of combinator function", ->
expectPropertyEvents(
->
left = series(1, [[1]]).toProperty()
right = series(2, [[2]]).toProperty()
left.combine(right, ".concat")
[[1, 2]])
describe "combines with null values", ->
expectPropertyEvents(
->
left = series(1, [null]).toProperty()
right = series(1, [null]).toProperty()
left.combine(right, (l, r)-> [l, r])
[[null, null]])
it "unsubscribes when initial value callback returns Bacon.noMore", ->
calls = 0
bus = new Bacon.Bus()
other = Bacon.constant(["rolfcopter"])
bus.toProperty(["lollerskates"]).combine(other, ".concat").subscribe (e) ->
if !e.isInitial()
calls += 1
Bacon.noMore
bus.push(["fail whale"])
expect(calls).to.equal 0
describe "does not duplicate same error from two streams", ->
expectPropertyEvents(
->
src = series(1, ["same", error()])
Bacon.combineAsArray(src, src)
[["same", "same"], error()])
it "toString", ->
expect(Bacon.constant(1).combine(Bacon.constant(2), (->)).toString()).to.equal("Bacon.constant(1).combine(Bacon.constant(2),function)")
describe "with random methods on Array.prototype", ->
it "doesn't throw exceptions", ->
try
Array.prototype.foo = "bar"
events = []
Bacon.once("a").combine(Bacon.once("b"), (a,b) -> [a,b]).onValue (v) ->
events.push(v)
expect(events).to.deep.equal([["a", "b"]])
finally
delete Array.prototype.foo
describe "EventStream.combine", ->
describe "converts stream to Property, then combines", ->
expectPropertyEvents(
->
left = series(2, [1, error(), 2, 3])
right = series(2, [4, error(), 5, 6]).delay(t(1)).toProperty()
left.combine(right, add)
[5, error(), error(), 6, 7, 8, 9])
describe "Bacon.groupSimultaneous", ->
describe "groups simultaneous values in to arrays", ->
expectStreamEvents(
->
src = series(1, [1,2])
stream = src.merge(src.map((x) -> x * 2))
Bacon.groupSimultaneous(stream)
[[[1, 2]], [[2,4]]])
describe "groups simultaneous values from multiple sources in to arrays", ->
expectStreamEvents(
->
src = series(1, [1,2])
stream = src.merge(src.map((x) -> x * 2))
stream2 = src.map (x) -> x * 4
Bacon.groupSimultaneous(stream, stream2)
[[[1, 2], [4]], [[2,4], [8]]])
describe "accepts an array or multiple args", ->
expectStreamEvents(
-> Bacon.groupSimultaneous([Bacon.later(1, 1), Bacon.later(2, 2)])
[[[1],[]], [[], [2]]])
describe "returns empty stream for zero sources", ->
expectStreamEvents(
-> Bacon.groupSimultaneous()
[])
expectStreamEvents(
-> Bacon.groupSimultaneous([])
[])
describe "works with synchronous sources", ->
expectStreamEvents(
-> Bacon.groupSimultaneous(Bacon.fromArray([1,2]))
[[[1]], [[2]]])
expectStreamEvents(
-> Bacon.groupSimultaneous(Bacon.fromArray([1,2]).mapEnd(3))
[[[1]], [[2]], [[3]]])
it "toString", ->
expect(Bacon.groupSimultaneous(Bacon.never()).toString()).to.equal("Bacon.groupSimultaneous(Bacon.never())")
describe "Property update is atomic", ->
describe "in a diamond-shaped combine() network", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
describe "in a triangle-shaped combine() network", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map (x) -> x
a.combine(b, (x, y) -> x + y)
[2, 4])
describe "when filter is involved", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map((x) -> x).filter(true)
a.combine(b, (x, y) -> x + y)
[2, 4])
describe "when root property is based on combine*", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty().combine(Bacon.constant(0), (x, y) -> x)
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
describe "when root is not a Property", ->
expectPropertyEvents(
->
a = series(1, [1, 2])
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
it "calls combinator function for valid combos only", ->
calls = 0
results = []
combinator = (x,y) ->
calls++
x+y
src = new Bacon.Bus()
prop = src.toProperty()
out = prop.map((x) -> x)
.combine(prop.map((x) -> x * 2), combinator)
.doAction(->)
.combine(prop, (x,y) -> x)
out.onValue((x) -> results.push(x))
src.push(1)
src.push(2)
expect(results).to.deep.equal([3,6])
expect(calls).to.equal(2)
describe "yet respects subscriber return values (bug fix)", ->
expectStreamEvents(
-> Bacon.repeatedly(t(1), [1, 2, 3]).toProperty().changes().take(1)
[1])
describe "independent observables created within the dispatch loop", ->
it "combineAsArray", ->
calls = 0
Bacon.once(1).onValue ->
Bacon.combineAsArray([Bacon.constant(1)]).onValue ->
calls++
expect(calls).to.equal(1)
it "combineAsArray.startWith", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = Bacon.combineAsArray([a, a]).map("right").startWith("wrong");
s.onValue((x) -> result = x)
expect(result).to.equal("right")
it "stream.startWith", ->
result = null
Bacon.once(1).onValue ->
s = Bacon.later(1).startWith(0)
s.onValue((x) -> result = x)
expect(result).to.equal(0)
it "combineAsArray.changes.startWith", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = Bacon.combineAsArray([a, a]).changes().startWith("right")
s.onValue((x) -> result = x)
expect(result).to.equal("right")
it "flatMap", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = a.flatMap((x) -> Bacon.once(x))
s.onValue((x) -> result = x)
expect(result).to.equal("lolbal")
it "awaiting", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant(1)
s = a.awaiting(a.map(->))
s.onValue((x) -> result = x)
expect(result).to.equal(false)
it "concat", ->
result = []
Bacon.once(1).onValue ->
s = Bacon.once(1).concat(Bacon.once(2))
s.onValue((x) -> result.push(x))
expect(result).to.deep.equal([1,2])
it "Property.delay", ->
result = []
Bacon.once(1).onValue ->
c = Bacon.constant(1)
s = Bacon.combineAsArray([c, c]).delay(1).map(".0")
s.onValue((x) -> result.push(x))
expect(result).to.deep.equal([1])
describe "when subscribing within the dispatch loop", ->
describe "single subscriber", ->
describe "up-to-date values are used (skipped bounce)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.toProperty()
value.onValue ->
trigger.flatMap ->
value.take(1)
[1,2])
describe "delayed bounce (TODO: how to name better)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.filter((x) -> x == 1).toProperty(0)
value.onValue ->
trigger.flatMap ->
value.take(1)
[0, 1])
describe "multiple subscribers", ->
describe "up-to-date values are used (skipped bounce)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.toProperty()
value.onValue ->
trigger.flatMap ->
value.onValue(->)
value.take(1)
[1,2])
describe "delayed bounce (TODO: how to name better)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.filter((x) -> x == 1).toProperty(0)
value.onValue ->
trigger.flatMap ->
value.onValue(->)
value.take(1)
[0, 1])
describe "delayed bounce in case Property ended (bug fix)", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
root = Bacon.once(0).toProperty()
root.onValue ->
Bacon.later(1).onValue ->
root.map(-> 1).subscribe (event) ->
if event.isEnd()
bus.end()
else
bus.push(event.value())
bus
[1])
describe "poking for errors 2", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
root = Bacon.sequentially(1, [1,2]).toProperty()
root.subscribe (event) ->
outdatedChild = root.filter((x) -> x == 1).map((x) -> x)
outdatedChild.onValue(->) # sets value but will be outdated at value 2
Bacon.later(3).onValue ->
outdatedChild.subscribe (event) ->
if event.isEnd()
bus.end()
else
bus.push(event.value())
bus
[1]
)
describe "Bacon.combineAsArray", ->
describe "initial value", ->
event = null
before ->
prop = Bacon.constant(1)
Bacon.combineAsArray(prop).subscribe (x) ->
event = x if x.hasValue()
it "is output as Initial event", ->
expect(event.isInitial()).to.equal(true)
describe "combines properties and latest values of streams, into a Property having arrays as values", ->
expectPropertyEvents(
->
stream = series(1, ["a", "b"])
Bacon.combineAsArray([Bacon.constant(1), Bacon.constant(2), stream])
[[1, 2, "a"], [1, 2, "b"]])
describe "Works with streams provided as a list of arguments as well as with a single array arg", ->
expectPropertyEvents(
->
stream = series(1, ["a", "b"])
Bacon.combineAsArray(Bacon.constant(1), Bacon.constant(2), stream)
[[1, 2, "a"], [1, 2, "b"]])
describe "works with single property", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant(1)])
[[1]])
describe "works with single stream", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.once(1)])
[[1]])
describe "works with arrays as values, with first array being empty (bug fix)", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant([]), Bacon.constant([1])])
([[[], [1]]]))
describe "works with arrays as values, with first array being non-empty (bug fix)", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant([1]), Bacon.constant([2])])
([[[1], [2]]]))
describe "works with empty array", ->
expectPropertyEvents(
-> Bacon.combineAsArray([])
[[]])
describe "works with empty args list", ->
expectPropertyEvents(
-> Bacon.combineAsArray()
[[]])
describe "accepts constant values instead of Observables", ->
expectPropertyEvents(
-> Bacon.combineAsArray(Bacon.constant(1), 2, 3)
[[1,2,3]])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.combineAsArray(Bacon.fromArray([1,2,3,4,5]).map(id)).skip(4).onValue()
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.combineAsArray(Bacon.never()).toString()).to.equal("Bacon.combineAsArray(Bacon.never())")
describe "Bacon.combineWith", ->
describe "combines n properties, streams and constants using an n-ary function", ->
expectPropertyEvents(
->
stream = series(1, [1, 2])
f = (x, y, z) -> x + y + z
Bacon.combineWith(f, stream, Bacon.constant(10), 100)
[111, 112])
describe "works with single input", ->
expectPropertyEvents(
->
stream = series(1, [1, 2])
f = (x) -> x * 2
Bacon.combineWith(f, stream)
[2, 4])
describe "works with 0 inputs (results to a constant)", ->
expectPropertyEvents(
->
Bacon.combineWith(-> 1)
[1])
it "toString", ->
expect(Bacon.combineWith((->), Bacon.never()).toString()).to.equal("Bacon.combineWith(function,Bacon.never())")
describe "Boolean logic", ->
describe "combines Properties with and()", ->
expectPropertyEvents(
-> Bacon.constant(true).and(Bacon.constant(false))
[false])
describe "combines Properties with or()", ->
expectPropertyEvents(
-> Bacon.constant(true).or(Bacon.constant(false))
[true])
describe "inverts property with not()", ->
expectPropertyEvents(
-> Bacon.constant(true).not()
[false])
describe "accepts constants instead of properties", ->
describe "true and false", ->
expectPropertyEvents(
-> Bacon.constant(true).and(false)
[false])
describe "true and true", ->
expectPropertyEvents(
-> Bacon.constant(true).and(true)
[true])
describe "true or false", ->
expectPropertyEvents(
-> Bacon.constant(true).or(false)
[true])
it "toString", ->
expect(Bacon.constant(1).and(Bacon.constant(2).not()).or(Bacon.constant(3)).toString()).to.equal("Bacon.constant(1).and(Bacon.constant(2).not()).or(Bacon.constant(3))")
describe "Bacon.mergeAll", ->
describe ("merges all given streams"), ->
expectStreamEvents(
->
Bacon.mergeAll([
series(3, [1, 2])
series(3, [3, 4]).delay(t(1))
series(3, [5, 6]).delay(t(2))])
[1, 3, 5, 2, 4, 6], unstable)
describe ("supports n-ary syntax"), ->
expectStreamEvents(
->
Bacon.mergeAll(
series(3, [1, 2])
series(3, [3, 4]).delay(t(1))
series(3, [5, 6]).delay(t(2)))
[1, 3, 5, 2, 4, 6], unstable)
describe "works with a single stream", ->
expectStreamEvents(
-> Bacon.mergeAll([Bacon.once(1)])
[1])
expectStreamEvents(
-> Bacon.mergeAll(Bacon.once(1))
[1])
describe "returns empty stream for zero input", ->
expectStreamEvents(
-> Bacon.mergeAll([])
[])
expectStreamEvents(
-> Bacon.mergeAll()
[])
it "toString", ->
expect(Bacon.mergeAll(Bacon.never()).toString()).to.equal("Bacon.mergeAll(Bacon.never())")
describe "Property.sampledBy(stream)", ->
describe "samples property at events, resulting to EventStream", ->
expectStreamEvents(
->
prop = series(2, [1, 2]).toProperty()
stream = repeat(3, ["troll"]).take(4)
prop.sampledBy(stream)
[1, 2, 2, 2])
describe "includes errors from both Property and EventStream", ->
expectStreamEvents(
->
prop = series(2, [error(), 2]).toProperty()
stream = series(3, [error(), "troll"])
prop.sampledBy(stream)
[error(), error(), 2])
describe "ends when sampling stream ends", ->
expectStreamEvents(
->
prop = repeat(2, [1, 2]).toProperty()
stream = repeat(2, [""]).delay(t(1)).take(4)
prop.sampledBy(stream)
[1, 2, 1, 2])
describe "accepts optional combinator function f(Vp, Vs)", ->
expectStreamEvents(
->
prop = series(2, ["a", "b"]).toProperty()
stream = series(2, ["1", "2", "1", "2"]).delay(t(1))
prop.sampledBy(stream, add)
["a1", "b2", "b1", "b2"])
describe "allows method name instead of function too", ->
expectStreamEvents(
->
Bacon.constant([1]).sampledBy(Bacon.once([2]), ".concat")
[[1, 2]])
describe "works with same origin", ->
expectStreamEvents(
->
src = series(2, [1, 2])
src.toProperty().sampledBy(src)
[1, 2])
expectStreamEvents(
->
src = series(2, [1, 2])
src.toProperty().sampledBy(src.map(times, 2))
[1, 2])
describe "uses updated property after combine", ->
latter = (a, b) -> b
expectPropertyEvents(
->
src = series(2, ["b", "c"]).toProperty("a")
combined = Bacon.constant().combine(src, latter)
src.sampledBy(combined, add)
["aa", "bb", "cc"])
describe "uses updated property after combine with subscriber", ->
latter = (a, b) -> b
expectPropertyEvents(
->
src = series(2, ["b", "c"]).toProperty("a")
combined = Bacon.constant().combine(src, latter)
combined.onValue(->)
src.sampledBy(combined, add)
["aa", "bb", "cc"])
describe "skips samplings that occur before the property gets its first value", ->
expectStreamEvents(
->
p = series(5, [1]).toProperty()
p.sampledBy(series(3, [0]))
[])
expectStreamEvents(
->
p = series(5, [1, 2]).toProperty()
p.sampledBy(series(3, [0, 0, 0, 0]))
[1, 1, 2], unstable)
expectPropertyEvents(
->
p = series(5, [1, 2]).toProperty()
p.sampledBy(series(3, [0, 0, 0, 0]).toProperty())
[1, 1, 2], unstable)
describe "works with stream of functions", ->
f = ->
expectStreamEvents(
->
p = series(1, [f]).toProperty()
p.sampledBy(series(1, [1, 2, 3]))
[f, f, f])
describe "works with synchronous sampler stream", ->
expectStreamEvents(
-> Bacon.constant(1).sampledBy(Bacon.fromArray([1,2,3]))
[1,1,1], unstable)
expectStreamEvents(
-> Bacon.later(1, 1).toProperty().sampledBy(Bacon.fromArray([1,2,3]))
[])
describe "laziness", ->
calls = 0
before (done) ->
id = (x) ->
calls++
x
sampler = Bacon.later(5).map(id)
property = repeat(1, [1]).toProperty().map(id)
sampled = property.sampledBy sampler
sampled.onValue()
sampled.onEnd(done)
it "preserves laziness", ->
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.constant(0).sampledBy(Bacon.never()).toString()).to.equal("Bacon.constant(0).sampledBy(Bacon.never(),function)")
describe "Property.sampledBy(property)", ->
describe "samples property at events, resulting to a Property", ->
expectPropertyEvents(
->
prop = series(2, [1, 2]).toProperty()
sampler = repeat(3, ["troll"]).take(4).toProperty()
prop.sampledBy(sampler)
[1, 2, 2, 2])
describe "works on an event stream by automatically converting to property", ->
expectPropertyEvents(
->
stream = series(2, [1, 2])
sampler = repeat(3, ["troll"]).take(4).toProperty()
stream.sampledBy(sampler)
[1, 2, 2, 2])
describe "accepts optional combinator function f(Vp, Vs)", ->
expectPropertyEvents(
->
prop = series(2, ["a", "b"]).toProperty()
sampler = series(2, ["1", "2", "1", "2"]).delay(t(1)).toProperty()
prop.sampledBy(sampler, add)
["a1", "b2", "b1", "b2"])
describe "Property.sample", ->
describe "samples property by given interval", ->
expectStreamEvents(
->
prop = series(2, [1, 2]).toProperty()
prop.sample(t(3)).take(4)
[1, 2, 2, 2])
describe "includes all errors", ->
expectStreamEvents(
->
prop = series(2, [1, error(), 2]).toProperty()
prop.sample(t(5)).take(2)
[error(), 1, 2], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
->
prop = Bacon.constant(1)
prop.sample(t(3)).take(4)
[1, 1, 1, 1])
it "toString", ->
expect(Bacon.constant(0).sample(1).toString()).to.equal("Bacon.constant(0).sample(1)")
describe "EventStream.errors", ->
describe "Includes errors only", ->
expectStreamEvents(
-> series(1, [1, error(), 2]).errors()
[error()])
it "toString", ->
expect(Bacon.never().errors().toString()).to.equal("Bacon.never().errors()")
describe "EventStream.scan", ->
describe "accumulates values with given seed and accumulator function, passing through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).scan(0, add)
[0, 1, 3, error(), 6])
describe "also works with method name", ->
expectPropertyEvents(
-> series(1, [[1], [2]]).scan([], ".concat")
[[], [1], [1, 2]])
it "yields the seed value immediately", ->
outputs = []
bus = new Bacon.Bus()
bus.scan(0, -> 1).onValue((value) -> outputs.push(value))
expect(outputs).to.deep.equal([0])
describe "yields null seed value", ->
expectPropertyEvents(
-> series(1, [1]).scan(null, ->1)
[null, 1])
describe "works with synchronous streams", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).scan(0, ((x,y)->x+y))
[0,1,3,6])
describe "calls accumulator function once per value", ->
count = 0
expectPropertyEvents(
-> series(2, [1,2,3]).scan(0, (x,y) -> count++; x + y)
[0, 1, 3, 6]
)
it "calls accumulator once per value", ->
expect(count).to.equal(3)
describe "EventStream.fold", ->
describe "folds stream into a single-valued Property, passes through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "has reduce as synonym", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "works with synchronous source", ->
expectPropertyEvents(
-> Bacon.fromArray([1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "Property.scan", ->
describe "with Init value, starts with f(seed, init)", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).scan(0, add)
[1, 3, 6])
describe "without Init value, starts with seed", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty().scan(0, add)
[0, 2, 5])
describe "treats null seed value like any other value", ->
expectPropertyEvents(
-> series(1, [1]).toProperty().scan(null, add)
[null, 1])
expectPropertyEvents(
-> series(1, [2]).toProperty(1).scan(null, add)
[1, 3])
describe "for synchronous source", ->
describe "with Init value, starts with f(seed, init)", ->
expectPropertyEvents(
-> Bacon.fromArray([2,3]).toProperty(1).scan(0, add)
[1, 3, 6])
describe "without Init value, starts with seed", ->
expectPropertyEvents(
-> Bacon.fromArray([2,3]).toProperty().scan(0, add)
[0, 2, 5])
describe "works with synchronously responding empty source", ->
expectPropertyEvents(
-> Bacon.never().toProperty(1).scan(0, add)
[1])
describe "EventStream.withStateMachine", ->
f = (sum, event) ->
if event.hasValue()
[sum + event.value(), []]
else if event.isEnd()
[sum, [new Bacon.Next(-> sum), event]]
else
[sum, [event]]
describe "runs state machine on the stream", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3]).withStateMachine(0, f)
[6])
describe "Property.withStateMachine", ->
describe "runs state machine on the stream", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty().withStateMachine(0, (sum, event) ->
if event.hasValue()
[sum + event.value(), []]
else if event.isEnd()
[sum, [new Bacon.Next(-> sum), event]]
else
[sum, [event]])
[6])
describe "Property.fold", ->
describe "Folds Property into a single-valued one", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).fold(0, add)
[6])
describe "EventStream.diff", ->
describe "apply diff function to previous and current values, passing through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).diff(0, add)
[1, 3, error(), 5])
describe "also works with method name", ->
expectPropertyEvents(
-> series(1, [[1], [2]]).diff([0], ".concat")
[[0, 1], [1, 2]])
it "does not yields the start value immediately", ->
outputs = []
bus = new Bacon.Bus()
bus.diff(0, -> 1).onValue((value) -> outputs.push(value))
expect(outputs).to.deep.equal([])
it "toString", ->
expect(Bacon.once(1).diff(0, (->)).toString()).to.equal("Bacon.once(1).diff(0,function)")
describe "Property.diff", ->
describe "with Init value, starts with f(start, init)", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).diff(0, add)
[1, 3, 5])
describe "without Init value, waits for the first value", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty().diff(0, add)
[2, 5])
describe "treats null start value like any other value", ->
expectPropertyEvents(
-> series(1, [1]).toProperty().diff(null, add)
[1])
expectPropertyEvents(
-> series(1, [2]).toProperty(1).diff(null, add)
[1, 3])
describe "EventStream.zip", ->
describe "pairwise combines values from two streams", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).zip(series(1, ['a', 'b', 'c']))
[[1, 'a'], [2, 'b'], [3, 'c']])
describe "passes through errors", ->
expectStreamEvents(
-> series(2, [1, error(), 2]).zip(series(2, ['a', 'b']).delay(1))
[[1, 'a'], error(), [2, 'b']])
describe "completes as soon as possible", ->
expectStreamEvents(
-> series(1, [1]).zip(series(1, ['a', 'b', 'c']))
[[1, 'a']])
describe "can zip an observable with itself", ->
expectStreamEvents(
->
obs = series(1, ['a', 'b', 'c'])
obs.zip(obs.skip(1))
[['a', 'b'], ['b', 'c']])
it "toString", ->
expect(Bacon.never().zip(Bacon.once(1)).toString()).to.equal("Bacon.never().zip(Bacon.once(1))")
describe "Property.zip", ->
describe "pairwise combines values from two properties", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).toProperty().zip(series(1, ['a', 'b', 'c']).toProperty())
[[1, 'a'], [2, 'b'], [3, 'c']], { unstable })
describe "Bacon.zipAsArray", ->
describe "zips an array of streams into a stream of arrays", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray([obs, obs.skip(1), obs.skip(2)])
[[1 , 2 , 3], [2 , 3 , 4]])
describe "supports n-ary syntax", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray(obs, obs.skip(1))
[[1 , 2], [2 , 3], [3, 4]])
describe "accepts Properties as well as EventStreams", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray(obs, obs.skip(1), Bacon.constant(5))
[[1 , 2, 5]])
describe "works with single stream", ->
expectStreamEvents(
->
obs = series(1, [1, 2])
Bacon.zipAsArray([obs])
[[1], [2]])
expectStreamEvents(
->
obs = series(1, [1, 2])
Bacon.zipAsArray(obs)
[[1], [2]])
describe "works with 0 streams (=Bacon.never())", ->
expectStreamEvents(
-> Bacon.zipAsArray([])
[])
expectStreamEvents(
-> Bacon.zipAsArray()
[])
it "toString", ->
expect(Bacon.zipAsArray(Bacon.never(), Bacon.never()).toString()).to.equal("Bacon.zipAsArray(Bacon.never(),Bacon.never())")
describe "Bacon.zipWith", ->
describe "zips an array of streams with given function", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipWith([obs, obs.skip(1), obs.skip(2)], ((x,y,z) -> (x + y + z)))
[1 + 2 + 3, 2 + 3 + 4])
describe "supports n-ary syntax", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
f = ((x,y,z) -> (x + y + z))
Bacon.zipWith(f, obs, obs.skip(1), obs.skip(2))
[1 + 2 + 3, 2 + 3 + 4])
describe "works with single stream", ->
expectStreamEvents(
->
obs = series(1, [1,2])
f = (x) -> x * 2
Bacon.zipWith(f, obs)
[1 * 2, 2 * 2])
describe "works with 0 streams (=Bacon.never())", ->
expectStreamEvents(
->
Bacon.zipWith([], ->)
[])
expectStreamEvents(
->
Bacon.zipWith(->)
[])
it "toString", ->
expect(Bacon.zipWith((->), Bacon.never()).toString()).to.equal("Bacon.zipWith(function,Bacon.never())")
describe "Bacon.when", ->
describe "synchronizes on join patterns", ->
expectStreamEvents(
->
[a,b,_] = ['a','b','_']
as = series(1, [a, _, a, a, _, a, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, b, _, b, b, _, _]).filter((x) -> x == b)
Bacon.when(
[as, bs], (a,b) -> a + b,
[as], (a) -> a)
['a', 'ab', 'a', 'ab', 'ab', 'ab'], unstable)
describe "consider the join patterns from top to bottom", ->
expectStreamEvents(
->
[a,b,_] = ['a','b','_']
as = series(1, [a, _, a, a, _, a, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, b, _, b, b, _, _]).filter((x) -> x == b)
Bacon.when(
[as], (a) -> a,
[as, bs], (a,b) -> a + b)
['a', 'a', 'a', 'a', 'a', 'a'])
describe "handles any number of join patterns", ->
expectStreamEvents(
->
[a,b,c,_] = ['a','b','c','_']
as = series(1, [a, _, a, _, a, _, a, _, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, _, b, _, b, _, b, _, _]).filter((x) -> x == b)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, _]).filter((x) -> x == c)
Bacon.when(
[as, bs, cs], (a,b,c) -> a + b + c,
[as, bs], (a,b) -> a + b,
[as], (a) -> a)
['a', 'ab', 'a', 'abc', 'abc', 'ab'], unstable)
describe "does'nt synchronize on properties", ->
expectStreamEvents(
->
p = repeat(1, ["p"]).take(100).toProperty()
s = series(3, ["1", "2", "3"])
Bacon.when(
[p,s], (p, s) -> p + s)
["p1", "p2", "p3"])
expectStreamEvents(
->
p = series(3, ["p"]).toProperty()
s = series(1, ["1"])
Bacon.when(
[p,s], (p, s) -> p + s)
[])
expectStreamEvents(
->
p = repeat(1, ["p"]).take(100).toProperty()
s = series(3, ["1", "2", "3"]).toProperty()
Bacon.when(
[p,s], (p, s) -> p + s)
[])
expectStreamEvents(
->
[a,b,c,_] = ['a','b','c','_']
as = series(1, [a, _, a, _, a, _, a, _, _, _, a, _, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, _, b, _, b, _, b, _, _, _]).filter((x) -> x == b)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, c, _]).filter((x) -> x == c).map(1).scan 0, ((x,y) -> x + y)
Bacon.when(
[as, bs, cs], (a,b,c) -> a + b + c,
[as], (a) -> a)
['a', 'ab0', 'a', 'ab1', 'ab2', 'ab3'], unstable)
describe "doesn't output before properties have values", ->
expectStreamEvents(
->
p = series(2, ["p"])
s = series(1, ["s"])
Bacon.when(
[s, p], (s, p) -> s + p)
["sp"])
describe "returns Bacon.never() on the empty list of patterns", ->
expectStreamEvents(
->
Bacon.when()
[])
describe "returns Bacon.never() when all patterns are zero-length", ->
expectStreamEvents(
->
Bacon.when([], ->)
[])
describe "works with empty patterns", ->
expectStreamEvents(
-> Bacon.when(
[Bacon.once(1)], (x) -> x,
[], ->)
[1])
describe "works with empty patterns (2)", ->
expectStreamEvents(
-> Bacon.when(
[], ->,
[Bacon.once(1)], (x) -> x)
[1])
describe "works with single stream", ->
expectStreamEvents(
-> Bacon.when([Bacon.once(1)], (x) -> x)
[1])
describe "works with multiples of streams", ->
expectStreamEvents(
->
[h,o,c,_] = ['h','o','c','_']
hs = series(1, [h, _, h, _, h, _, h, _, _, _, h, _, h]).filter((x) -> x == h)
os = series(1, [_, o, _, _, _, o, _, o, _, o, _, _, _]).filter((x) -> x == o)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, c, _]).filter((x) -> x == c)
Bacon.when(
[hs, hs, os], (h1,h2,o) -> [h1,h2,o],
[cs, os], (c,o) -> [c,o])
[['h', 'h', 'o'], ['c', 'o'], ['h', 'h', 'o'], ['c', 'o']], unstable)
describe "works with multiples of properties", ->
expectStreamEvents(
->
c = Bacon.constant("c")
Bacon.when(
[c, c, Bacon.once(1)], (c1, c2, _) -> c1 + c2)
["cc"])
describe "accepts constants instead of functions too", ->
expectStreamEvents(
-> Bacon.when(Bacon.once(1), 2)
[2])
describe "works with synchronous sources", ->
expectStreamEvents(
->
xs = Bacon.once "x"
ys = Bacon.once "y"
Bacon.when(
[xs, ys], (x, y) -> x + y
)
["xy"])
it "toString", ->
expect(Bacon.when([Bacon.never()], (->)).toString()).to.equal("Bacon.when([Bacon.never()],function)")
describe "Bacon.update", ->
describe "works like Bacon.when, but produces a property, and can be defined in terms of a current value", ->
expectPropertyEvents(
->
[r,i,_] = ['r','i',0]
incr = series(1, [1, _, 1, _, 2, _, 1, _, _, _, 2, _, 1]).filter((x) -> x != _)
reset = series(1, [_, r, _, _, _, r, _, r, _, r, _, _, _]).filter((x) -> x == r)
Bacon.update(
0,
[reset], 0,
[incr], (i,c) -> i+c)
[0, 1, 0, 1, 3, 0, 1, 0, 0, 2, 3])
describe "Correctly handles multiple arguments in parameter list, and synchronous sources", ->
expectPropertyEvents(
->
one = Bacon.once(1)
two = Bacon.once(2)
Bacon.update(
0,
[one, two], (i, a, b) -> [i,a,b])
[0, [0,1,2]])
it "toString", ->
expect(Bacon.update(0, [Bacon.never()], (->)).toString()).to.equal("Bacon.update(0,[Bacon.never()],function)")
describe "combineTemplate", ->
describe "combines streams according to a template object", ->
expectPropertyEvents(
->
firstName = Bacon.constant("juha")
lastName = Bacon.constant("paananen")
userName = Bacon.constant("mr.bacon")
Bacon.combineTemplate({ userName: userName, password: "*****", fullName: { firstName: firstName, lastName: lastName }})
[{ userName: "mr.bacon", password: "*****", fullName: { firstName: "juha", lastName: "paananen" } }])
describe "works with a single-stream template", ->
expectPropertyEvents(
->
bacon = Bacon.constant("bacon")
Bacon.combineTemplate({ favoriteFood: bacon })
[{ favoriteFood: "bacon" }])
describe "works when dynamic part is not the last part (bug fix)", ->
expectPropertyEvents(
->
username = Bacon.constant("raimohanska")
password = Bacon.constant("easy")
Bacon.combineTemplate({url: "/user/login",
data: { username: username, password: password }, type: "post"})
[url: "/user/login", data: {username: "raimohanska", password: "easy"}, type: "post"])
describe "works with arrays as data (bug fix)", ->
expectPropertyEvents(
-> Bacon.combineTemplate( { x : Bacon.constant([]), y : Bacon.constant([[]]), z : Bacon.constant(["z"])})
[{ x : [], y : [[]], z : ["z"]}])
describe "supports empty object", ->
expectPropertyEvents(
-> Bacon.combineTemplate({})
[{}])
it "supports arrays", ->
value = {key: [{ x: 1 }, { x: 2 }]}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x.key instanceof Array).to.deep.equal(true) # seems that the former passes even if x is not an array
value = [{ x: 1 }, { x: 2 }]
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x instanceof Array).to.deep.equal(true)
value = {key: [{ x: 1 }, { x: 2 }], key2: {}}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x.key instanceof Array).to.deep.equal(true)
value = {key: [{ x: 1 }, { x: Bacon.constant(2) }]}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal({key: [{ x: 1 }, { x: 2 }]})
expect(x.key instanceof Array).to.deep.equal(true) # seems that the former passes even if x is not an array
it "supports nulls", ->
value = {key: null}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports NaNs", ->
value = {key: NaN}
Bacon.combineTemplate(value).onValue (x) ->
expect(isNaN(x.key)).to.deep.equal(true)
it "supports dates", ->
value = {key: new Date()}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports regexps", ->
value = {key: /[0-0]/i}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports functions", ->
value = {key: ->}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "toString", ->
expect(Bacon.combineTemplate({ thing: Bacon.never(), const: "a" }).toString()).to.equal("Bacon.combineTemplate({thing:Bacon.never(),const:a})")
describe "Property.decode", ->
describe "switches between source Properties based on property value", ->
expectPropertyEvents(
->
a = Bacon.constant("a")
b = Bacon.constant("b")
c = Bacon.constant("c")
series(1, [1,2,3]).toProperty().decode({1: a, 2: b, 3: c})
["a", "b", "c"])
it "toString", ->
expect(Bacon.constant(1).decode({1: "lol"}).toString()).to.equal("Bacon.constant(1).decode({1:lol})")
describe "EventStream.decode", ->
describe "switches between source Properties based on property value", ->
expectPropertyEvents(
->
a = Bacon.constant("a")
b = Bacon.constant("b")
c = Bacon.constant("c")
series(1, [1,2,3]).decode({1: a, 2: b, 3: c})
["a", "b", "c"])
describe "Observable.onValues", ->
it "splits value array to callback arguments", ->
f = mockFunction()
Bacon.constant([1,2,3]).onValues(f)
f.verify(1,2,3)
describe "Bacon.onValues", ->
it "is a shorthand for combineAsArray.onValues", ->
f = mockFunction()
Bacon.onValues(1, 2, 3, f)
f.verify(1,2,3)
describe "Observable.subscribe and onValue", ->
it "returns a dispose() for unsubscribing", ->
s = new Bacon.Bus()
values = []
dispose = s.onValue (value) -> values.push value
s.push "lol"
dispose()
s.push "wut"
expect(values).to.deep.equal(["lol"])
describe "Observable.onEnd", ->
it "is called on stream end", ->
s = new Bacon.Bus()
ended = false
s.onEnd(-> ended = true)
s.push("LOL")
expect(ended).to.deep.equal(false)
s.end()
expect(ended).to.deep.equal(true)
describe "Field value extraction", ->
describe "extracts field value", ->
expectStreamEvents(
-> Bacon.once({lol:"wut"}).map(".lol")
["wut"])
describe "extracts nested field value", ->
expectStreamEvents(
-> Bacon.once({lol:{wut: "wat"}}).map(".lol.wut")
["wat"])
describe "yields 'undefined' if any value on the path is 'undefined'", ->
expectStreamEvents(
-> Bacon.once({}).map(".lol.wut")
[undefined])
it "if field value is method, it does a method call", ->
context = null
result = null
object = {
method: ->
context = this
"result"
}
Bacon.once(object).map(".method").onValue((x) -> result = x)
expect(result).to.deep.equal("result")
expect(context).to.deep.equal(object)
testSideEffects = (wrapper, method) ->
->
it "(f) calls function with property value", ->
f = mockFunction()
wrapper("kaboom")[method](f)
f.verify("kaboom")
it "(f, param) calls function, partially applied with param", ->
f = mockFunction()
wrapper("kaboom")[method](f, "pow")
f.verify("pow", "kaboom")
it "('.method') calls event value object method", ->
value = mock("get")
value.when().get().thenReturn("pow")
wrapper(value)[method](".get")
value.verify().get()
it "('.method', param) calls event value object method with param", ->
value = mock("get")
value.when().get("value").thenReturn("pow")
wrapper(value)[method](".get", "value")
value.verify().get("value")
it "(object, method) calls object method with property value", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow")
target.verify().pow("kaboom")
it "(object, method, param) partially applies object method with param", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow", "smack")
target.verify().pow("smack", "kaboom")
it "(object, method, param1, param2) partially applies with 2 args", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow", "smack", "whack")
target.verify().pow("smack", "whack", "kaboom")
describe "Property.onValue", testSideEffects(Bacon.constant, "onValue")
describe "Property.assign", testSideEffects(Bacon.constant, "assign")
describe "EventStream.onValue", testSideEffects(Bacon.once, "onValue")
describe "Property.assign", ->
it "calls given objects given method with property values", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow")
target.verify().pow("kaboom")
it "allows partial application of method (i.e. adding fixed args)", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow", "smack")
target.verify().pow("smack", "kaboom")
it "allows partial application of method with 2 args (i.e. adding fixed args)", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow", "smack", "whack")
target.verify().pow("smack", "whack", "kaboom")
describe "Bacon.Bus", ->
it "merges plugged-in streams", ->
bus = new Bacon.Bus()
values = []
dispose = bus.onValue (value) -> values.push value
push = new Bacon.Bus()
bus.plug(push)
push.push("lol")
expect(values).to.deep.equal(["lol"])
dispose()
verifyCleanup()
describe "works with looped streams", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
bus.plug(Bacon.later(t(2), "lol"))
bus.plug(bus.filter((value) => "lol" == value).map(=> "wut"))
Bacon.later(t(4)).onValue(=> bus.end())
bus
["lol", "wut"])
it "dispose works with looped streams", ->
bus = new Bacon.Bus()
bus.plug(Bacon.later(t(2), "lol"))
bus.plug(bus.filter((value) => "lol" == value).map(=> "wut"))
dispose = bus.onValue(=>)
dispose()
it "Removes input from input list on End event", ->
subscribed = 0
bus = new Bacon.Bus()
input = new Bacon.Bus()
# override subscribe to increase the subscribed-count
inputSubscribe = input.subscribe
input.subscribe = (sink) ->
subscribed++
inputSubscribe(sink)
bus.plug(input)
dispose = bus.onValue(=>)
input.end()
dispose()
bus.onValue(=>) # this latter subscription should not go to the ended source anymore
expect(subscribed).to.deep.equal(1)
it "unsubscribes inputs on end() call", ->
bus = new Bacon.Bus()
input = new Bacon.Bus()
events = []
bus.plug(input)
bus.subscribe((e) => events.push(e))
input.push("a")
bus.end()
input.push("b")
expect(toValues(events)).to.deep.equal(["a", "<end>"])
it "handles cold single-event streams correctly (bug fix)", ->
values = []
bus = new Bacon.Bus()
bus.plug(Bacon.once("x"))
bus.plug(Bacon.once("y"))
bus.onValue((x) -> values.push(x))
expect(values).to.deep.equal(["x", "y"])
it "handles end() calls even when there are no subscribers", ->
bus = new Bacon.Bus()
bus.end()
describe "delivers pushed events and errors", ->
expectStreamEvents(
->
s = new Bacon.Bus()
s.push "pullMe"
soon ->
s.push "pushMe"
# test that it works regardless of "this"
s.push.call(null, "pushSomeMore")
s.error()
s.end()
s
["pushMe", "pushSomeMore", error()])
it "does not deliver pushed events after end() call", ->
called = false
bus = new Bacon.Bus()
bus.onValue(-> called = true)
bus.end()
bus.push("LOL")
expect(called).to.deep.equal(false)
it "does not plug after end() call", ->
plugged = false
bus = new Bacon.Bus()
bus.end()
bus.plug(new Bacon.EventStream((sink) -> plugged = true; (->)))
bus.onValue(->)
expect(plugged).to.deep.equal(false)
it "returns unplug function from plug", ->
values = []
bus = new Bacon.Bus()
src = new Bacon.Bus()
unplug = bus.plug(src)
bus.onValue((x) -> values.push(x))
src.push("x")
unplug()
src.push("y")
expect(values).to.deep.equal(["x"])
it "allows consumers to re-subscribe after other consumers have unsubscribed (bug fix)", ->
bus = new Bacon.Bus
otherBus = new Bacon.Bus
otherBus.plug(bus)
unsub = otherBus.onValue ->
unsub()
o = []
otherBus.onValue (v) -> o.push(v)
bus.push("foo")
expect(o).to.deep.equal(["foo"])
it "toString", ->
expect(new Bacon.Bus().toString()).to.equal("Bacon.Bus()")
describe "EventStream", ->
describe "works with functions as values (bug fix)", ->
expectStreamEvents(
-> Bacon.once(-> "hello").map((f) -> f())
["hello"])
expectStreamEvents(
-> Bacon.once(-> "hello").flatMap(Bacon.once).map((f) -> f())
["hello"])
expectPropertyEvents(
-> Bacon.constant(-> "hello").map((f) -> f())
["hello"])
expectStreamEvents(
-> Bacon.constant(-> "hello").flatMap(Bacon.once).map((f) -> f())
["hello"])
it "handles one subscriber added twice just like two separate subscribers (case Bacon.noMore)", ->
values = []
bus = new Bacon.Bus()
f = (v) ->
if v.hasValue()
values.push(v.value())
return Bacon.noMore
bus.subscribe(f)
bus.subscribe(f)
bus.push("bacon")
expect(values).to.deep.equal(["bacon", "bacon"])
it "handles one subscriber added twice just like two separate subscribers (case unsub)", ->
values = []
bus = new Bacon.Bus()
f = (v) ->
if v.hasValue()
values.push(v.value())
bus.subscribe(f)
unsub = bus.subscribe(f)
unsub()
bus.push("bacon")
expect(values).to.deep.equal(["bacon"])
describe "Bacon.fromBinder", ->
describe "Provides an easier alternative to the EventStream constructor, allowing sending multiple events at a time", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink([new Bacon.Next(1), new Bacon.End()])
(->)
[1])
describe "Allows sending unwrapped values as well as events", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink([1, new Bacon.End()])
(->)
[1])
describe "Allows sending single value without wrapping array", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink(1)
sink(new Bacon.End())
(->)
[1])
it "toString", ->
expect(Bacon.fromBinder(->).toString()).to.equal("Bacon.fromBinder(function,function)")
describe "String presentations", ->
describe "Initial(1).toString", ->
it "is 1", ->
expect(new Bacon.Initial(1).toString()).to.equal("1")
describe "Next({a:1i}).toString", ->
it "is {a:1}", ->
expect(new Bacon.Next({a:1}).toString()).to.equal("{a:1}")
describe "Error({a:1}).toString", ->
it "is <error> {a:1}", ->
expect(new Bacon.Error({a:1}).toString()).to.equal("<error> {a:1}")
describe "End.toString", ->
it "is <end>", ->
expect(new Bacon.End().toString()).to.equal("<end>")
describe "inspect", ->
it "is the same as toString", ->
expect(new Bacon.Initial(1).inspect()).to.equal("1")
describe "Observable.name", ->
it "sets return value of toString and inspect", ->
expect(Bacon.once(1).name("one").toString()).to.equal("one")
expect(Bacon.once(1).name("one").inspect()).to.equal("one")
it "modifies the stream in place", ->
obs = Bacon.once(1)
obs.name("one")
expect(obs.toString()).to.equal("one")
it "supports composition", ->
expect(Bacon.once("raimohanska").name("raimo").take(1).inspect()).to.equal("raimo.take(1)")
describe "Bacon.spy", ->
testSpy = (expectedCount, f) ->
calls = 0
spy = (obs) -> calls++
Bacon.spy spy
f()
expect(calls).to.equal(expectedCount)
describe "calls spy function for all created Observables", ->
it "EventStream", ->
testSpy 1, -> Bacon.once(1)
it "Property", ->
testSpy 1, -> Bacon.constant(1)
it "map", ->
testSpy 2, -> Bacon.once(1).map(->)
it "combineTemplate (also called for the intermediate combineAsArray property)", ->
testSpy 4, -> Bacon.combineTemplate(Bacon.once(1), Bacon.constant(2))
describe "Infinite synchronous sequences", ->
describe "Limiting length with take(n)", ->
expectStreamEvents(
-> endlessly(1,2,3).take(4)
[1,2,3,1], unstable)
expectStreamEvents(
-> endlessly(1,2,3).take(4).concat(Bacon.once(5))
[1,2,3,1,5], unstable)
expectStreamEvents(
-> endlessly(1,2,3).take(4).concat(endlessly(5, 6).take(2))
[1,2,3,1,5,6], unstable)
describe "With flatMap", ->
expectStreamEvents(
-> Bacon.fromArray([1,2]).flatMap((x) -> endlessly(x)).take(2)
[1,1])
expectStreamEvents(
-> endlessly(1,2).flatMap((x) -> endlessly(x)).take(2)
[1,1])
endlessly = (values...) ->
index = 0
Bacon.fromSynchronousGenerator -> new Bacon.Next(-> values[index++ % values.length])
Bacon.fromGenerator = (generator) ->
Bacon.fromBinder (sink) ->
unsubd = false
push = (events) ->
events = Bacon._.toArray(events)
for event in events
return if unsubd
reply = sink event
return if event.isEnd() or reply == Bacon.noMore
generator(push)
push []
-> unsubd = true
Bacon.fromSynchronousGenerator = (generator) ->
Bacon.fromGenerator (push) ->
push generator()
lessThan = (limit) ->
(x) -> x < limit
times = (x, y) -> x * y
add = (x, y) -> x + y
id = (x) -> x
| 6917 | expect = require("chai").expect
Bacon = require("../src/Bacon").Bacon
Mocks = require( "./Mock")
TickScheduler = require("./TickScheduler").TickScheduler
mock = Mocks.mock
mockFunction = Mocks.mockFunction
EventEmitter = require("events").EventEmitter
th = require("./SpecHelper")
t = th.t
expectStreamEvents = th.expectStreamEvents
expectPropertyEvents = th.expectPropertyEvents
verifyCleanup = th.verifyCleanup
error = th.error
soon = th.soon
series = th.series
repeat = th.repeat
toValues = th.toValues
sc = TickScheduler()
Bacon.scheduler = sc
# Some streams are unstable when testing with verifySwitching2.
# Generally, all flatMap-based streams are unstable because flatMap discards
# child streams on unsubscribe.
unstable = {unstable:true}
describe "Bacon._", ->
_ = Bacon._
describe "head", ->
expect(_.head([5,2,9])).to.equal(5)
expect(_.head([])).to.equal(undefined)
expect(_.head(5)).to.equal(undefined)
describe "always", -> expect(_.always(5)("francis")).to.equal(5)
describe "negate", ->
expect(_.negate(_.always(true))("timanttikobra")).to.be.false
describe "empty", ->
expect(_.empty([])).to.be.true
expect(_.empty("")).to.be.true
expect(_.empty([1])).to.be.false
expect(_.empty("1")).to.be.false
describe "tail", ->
expect(_.tail([1,2,3])).to.deep.equal([2,3])
expect(_.tail([1])).to.deep.equal([])
expect(_.tail([])).to.deep.equal([])
describe "filter", ->
expect(_.filter(_.empty, ["","1",[],[2]])).to.deep.equal(["",[]])
describe "map", ->
expect(_.map(_.head, [
[], [1], [2,2], [3,3,3]
])).to.deep.equal([
undefined, 1, 2, 3
])
describe "flatMap", ->
expect(_.flatMap(((x) -> [x, x]), [1,2,3])).to.deep.equal([1,1,2,2,3,3])
describe "each", ->
it "provides key and value to iterator", ->
expectKeyVals = (x, expectedKeys, expectedValues) ->
keys = []
values = []
_.each(x, (key, value) ->
keys.push(key)
values.push(value)
)
expect([keys, values]).to.deep.equal([expectedKeys, expectedValues])
expectKeyVals(
{cat:"furry",bird:"feathery"}, ["cat","bird"], ["furry","feathery"]
)
expectKeyVals([1,2,3], ["0","1","2"], [1,2,3])
describe "toArray", ->
expect(_.toArray(2)).to.deep.equal([2])
it "ignores rest of arguments", ->
expect(_.toArray(1,1,2)).to.deep.equal([1])
it "should, when given an array, return it back (not a copy)", ->
arr = []
expect(_.toArray(arr)).to.equal(arr)
describe "indexOf", ->
expect(_.indexOf([1,2], 1)).to.equal(0)
expect(_.indexOf([1,2], 2)).to.equal(1)
expect(_.indexOf([1,2], 3)).to.equal(-1)
describe "contains", ->
expect(_.contains("abc", "c")).to.be.true
expect(_.contains("abc", "x")).to.be.false
expect(_.contains([2,4,6], 4)).to.be.true
expect(_.contains([2,4,6], 3)).to.be.false
describe "id", ->
obj = {}
expect(_.id(obj)).to.equal(obj)
describe "last", ->
expect(_.last([2,4])).to.equal(4)
expect(_.last("last")).to.equal("t")
describe "all", ->
expect(_.all([ [false,true], [true,true] ], _.head)).to.be.false
expect(_.all([ [true,false], [true,true] ], _.head)).to.be.true
it "should test truthiness if no function given", ->
expect(_.all([true, false, true])).to.be.false
expect(_.all([true, true, true])).to.be.true
expect(_.all([1, true, 1])).to.be.true
describe "any", ->
expect(_.any([ [false,true], [true,true] ], _.head)).to.be.true
expect(_.any([ [false,false], [false,true] ], _.head)).to.be.false
it "should test truthiness if no function given", ->
expect(_.any([false, false, false])).to.be.false
expect(_.any([true, false, true])).to.be.true
describe "without", ->
expect(_.without("apple", ["bacon","apple","apple","omelette"]))
.to.deep.equal(["bacon","omelette"])
describe "remove", ->
expect(_.remove("apple", ["bacon","apple","apple","omelette"]))
.to.deep.equal(["apple"])
expect(_.remove("raisin", ["bacon","apple","apple","omelette"]))
.to.deep.equal(undefined)
describe "fold", ->
expect(_.fold([1,2,3,4,5], 0, (s, n) -> s + n)).to.equal(15)
describe "toString", ->
it "for booleans", ->
expect(_.toString(true)).to.equal("true")
it "for numbers", ->
expect(_.toString(1)).to.equal("1")
expect(_.toString(1.1)).to.equal("1.1")
it "for undefined and null", ->
expect(_.toString(undefined)).to.equal("undefined")
expect(_.toString(null)).to.equal("undefined")
it "for strings", ->
expect(_.toString("lol")).to.equal("lol")
it "for dates", ->
expect(_.toString(new Date(0))).to.contain("1970")
it "for arrays", ->
expect(_.toString([1,2,3])).to.equal("[1,2,3]")
it "for objects", ->
expect(_.toString({a: "b"})).to.equal("{a:b}")
expect(_.toString({a: "b", c: "d"})).to.equal("{a:b,c:d}")
it "for circular refs", ->
obj = { name : "nasty" }
obj.self = obj
expect(_.toString(obj).length).to.be.below(100)
it "works even when enumerable properties throw errors on access", ->
obj = { "name": "madcow" }
Object.defineProperty obj, "prop",
enumerable: true
get: ->
throw new Error "an error"
expect(_.toString(obj)).to.equal("{name:madcow,prop:Error: an error}")
describe "Bacon.later", ->
describe "should send single event and end", ->
expectStreamEvents(
-> Bacon.later(t(1), "lol")
["lol"])
describe "supports sending an Error event as well", ->
expectStreamEvents(
-> Bacon.later(t(1), new Bacon.Error("oops"))
[error()])
it "toString", ->
expect(Bacon.later(1, "wat").toString()).to.equal("Bacon.later(1,wat)")
it "inspect", ->
expect(Bacon.later(1, "wat").inspect()).to.equal("Bacon.later(1,wat)")
describe "Bacon.sequentially", ->
describe "should send given events and end", ->
expectStreamEvents(
-> Bacon.sequentially(t(1), ["lol", "wut"])
["lol", "wut"])
describe "include error events", ->
expectStreamEvents(
-> Bacon.sequentially(t(1), [error(), "lol"])
[error(), "lol"])
describe "will stop properly even when exception thrown by subscriber", ->
expectStreamEvents(
->
s = Bacon.sequentially(t(1), ["lol", "wut"])
s.onValue (value) ->
throw "testing"
s
[])
it "toString", ->
expect(Bacon.sequentially(1, [2]).toString()).to.equal("Bacon.sequentially(1,[2])")
describe "Bacon.repeatedly", ->
describe "repeats given sequence forever", ->
expectStreamEvents(
-> Bacon.repeatedly(1, [1,2]).take(5)
[1,2,1,2,1])
it "toString", ->
expect(Bacon.repeatedly(1, [1]).toString()).to.equal("Bacon.repeatedly(1,[1])")
describe "Bacon.interval", ->
describe "repeats single element indefinitely", ->
expectStreamEvents(
-> Bacon.interval(t(1), "x").take(3)
["x", "x", "x"])
it "toString", ->
expect(Bacon.interval(1, 2).toString()).to.equal("Bacon.interval(1,2)")
describe "Bacon.fromPoll", ->
describe "repeatedly polls given function for values", ->
expectStreamEvents(
-> Bacon.fromPoll(1, (-> "lol")).take(2)
["lol", "lol"])
it "toString", ->
expect(Bacon.fromPoll(1, (->)).toString()).to.equal("Bacon.fromPoll(1,function)")
testLiftedCallback = (src, liftedCallback) ->
input = [
Bacon.constant('a')
'x'
Bacon.constant('b').toProperty()
'y'
]
output = ['a', 'x', 'b', 'y']
expectStreamEvents(
-> liftedCallback(src, input...)
[output]
)
describe "Bacon.fromCallback", ->
describe "makes an EventStream from function that takes a callback", ->
expectStreamEvents(
->
src = (callback) -> callback("lol")
stream = Bacon.fromCallback(src)
["lol"])
describe "supports partial application", ->
expectStreamEvents(
->
src = (param, callback) -> callback(param)
stream = Bacon.fromCallback(src, "lol")
["lol"])
describe "supports partial application with Observable arguments", ->
testLiftedCallback(
(values..., callback) -> callback(values)
Bacon.fromCallback
)
describe "supports object, methodName, partial application", ->
expectStreamEvents(
->
src = {
"go": (param, callback) -> callback(param + " " + this.name)
"name": "bob"
}
stream = Bacon.fromCallback(src, "go", "hello")
["hello <NAME>"])
it "toString", ->
expect(Bacon.fromCallback((->), "lol").toString()).to.equal("Bacon.fromCallback(function,lol)")
describe "Bacon.fromNodeCallback", ->
describe "makes an EventStream from function that takes a node-style callback", ->
expectStreamEvents(
->
src = (callback) -> callback(null, "lol")
stream = Bacon.fromNodeCallback(src)
["lol"])
describe "handles error parameter correctly", ->
expectStreamEvents(
->
src = (callback) -> callback('errortxt', null)
stream = Bacon.fromNodeCallback(src)
[error()])
describe "supports partial application", ->
expectStreamEvents(
->
src = (param, callback) -> callback(null, param)
stream = Bacon.fromNodeCallback(src, "lol")
["lol"])
describe "supports partial application with Observable arguments", ->
testLiftedCallback(
(values..., callback) -> callback(null, values)
Bacon.fromNodeCallback
)
describe "supports object, methodName, partial application", ->
expectStreamEvents(
->
src = {
"go": (param, callback) -> callback(null, param + " " + this.name)
"name": "<NAME>"
}
stream = Bacon.fromNodeCallback(src, "go", "hello")
["hello <NAME>"])
it "toString", ->
expect(Bacon.fromNodeCallback((->), "lol").toString()).to.equal("Bacon.fromNodeCallback(function,lol)")
# Wrap EventEmitter as EventTarget
toEventTarget = (emitter) ->
addEventListener: (event, handler) ->
emitter.addListener(event, handler)
removeEventListener: (event, handler) -> emitter.removeListener(event, handler)
describe "Bacon.fromEventTarget", ->
soon = (f) -> setTimeout f, 0
describe "should create EventStream from DOM object", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "click", "x"
element = toEventTarget emitter
Bacon.fromEventTarget(element, "click").take(1)
["x"]
)
describe "should create EventStream from EventEmitter", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "data", "x"
Bacon.fromEventTarget(emitter, "data").take(1)
["x"]
)
describe "should allow a custom map function for EventStream from EventEmitter", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "data", "x", "y"
Bacon.fromEventTarget(emitter, "data", (x, y) => [x, y]).take(1)
[["x", "y"]]
)
it "should clean up event listeners from EventEmitter", ->
emitter = new EventEmitter()
Bacon.fromEventTarget(emitter, "data").take(1).subscribe ->
emitter.emit "data", "x"
expect(emitter.listeners("data").length).to.deep.equal(0)
it "should clean up event listeners from DOM object", ->
emitter = new EventEmitter()
element = toEventTarget emitter
dispose = Bacon.fromEventTarget(element, "click").subscribe ->
dispose()
expect(emitter.listeners("click").length).to.deep.equal(0)
it "toString", ->
expect(Bacon.fromEventTarget({}, "click").toString()).to.equal("Bacon.fromEventTarget({},click)")
describe "Observable.log", ->
preservingLog = (f) ->
originalConsole = console
originalLog = console.log
try
f()
finally
global.console = originalConsole
console.log = originalLog
it "does not crash", ->
preservingLog ->
console.log = ->
Bacon.constant(1).log()
it "does not crash in case console.log is not defined", ->
preservingLog ->
console.log = undefined
Bacon.constant(1).log()
it "toString", ->
expect(Bacon.never().log().toString()).to.equal("Bacon.never()")
describe "Observable.slidingWindow", ->
describe "slides the window for EventStreams", ->
expectPropertyEvents(
-> series(1, [1,2,3]).slidingWindow(2)
[[], [1], [1,2], [2,3]])
describe "slides the window for Properties", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty().slidingWindow(2)
[[], [1], [1,2], [2,3]])
describe "accepts second parameter for minimum amount of values", ->
expectPropertyEvents(
-> series(1, [1,2,3,4]).slidingWindow(3, 2)
[[1,2], [1,2,3], [2,3,4]])
expectPropertyEvents(
-> series(1, [1,2,3,4]).toProperty(0).slidingWindow(3, 2)
[[0,1], [0, 1, 2], [1,2,3], [2,3,4]])
it "toString", ->
expect(Bacon.never().slidingWindow(2).toString()).to.equal("Bacon.never().slidingWindow(2,0)")
describe "EventStream.filter", ->
describe "should filter values", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3]).filter(lessThan(3))
[1, 2, error()])
describe "extracts field values", ->
expectStreamEvents(
-> series(1, [{good:true, value:"yes"}, {good:false, value:"no"}]).filter(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.filter(odd)
[1,1,3,7])
it "toString", ->
expect(Bacon.never().filter(false).toString()).to.equal("Bacon.never().filter(function)")
describe "EventStream.map", ->
describe "should map with given function", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).map(times, 2)
[2, 4, 6])
describe "also accepts a constant value", ->
expectStreamEvents(
-> series(1, [1, 2, 3,]).map("lol")
["lol", "lol", "lol"])
describe "extracts property from value object", ->
o = { lol : "wut" }
expectStreamEvents(
-> repeat(1, [o]).take(3).map(".lol")
["wut", "wut", "wut"])
describe "extracts a nested property too", ->
o = { lol : { wut : "wat" } }
expectStreamEvents(
-> Bacon.once(o).map(".lol.wut")
["wat"])
describe "in case of a function property, calls the function with no args", ->
expectStreamEvents(
-> Bacon.once([1,2,3]).map(".length")
[3])
describe "allows arguments for methods", ->
thing = { square: (x) -> x * x }
expectStreamEvents(
-> Bacon.once(thing).map(".square", 2)
[4])
describe "works with method call on given object, with partial application", ->
multiplier = { multiply: (x, y) -> x * y }
expectStreamEvents(
-> series(1, [1,2,3]).map(multiplier, "multiply", 2)
[2,4,6])
describe "can map to a Property value", ->
expectStreamEvents(
-> series(1, [1,2,3]).map(Bacon.constant(2))
[2,2,2])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.fromArray([1,2,3,4,5]).map(id).skip(4).onValue()
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.once(1).map(true).toString()).to.equal("Bacon.once(1).map(function)")
describe "EventStream.mapError", ->
describe "should map error events with given function", ->
expectStreamEvents(
-> repeat(1, [1, error("OOPS")]).mapError(id).take(2)
[1, "OOPS"])
describe "also accepts a constant value", ->
expectStreamEvents(
-> repeat(1, [1, error()]).mapError("ERR").take(2)
[1, "ERR"])
it "toString", ->
expect(Bacon.never().mapError(true).toString()).to.equal("Bacon.never().mapError(function)")
describe "EventStream.doAction", ->
it "calls function before sending value to listeners", ->
called = []
bus = new Bacon.Bus()
s = bus.doAction((x) -> called.push(x))
s.onValue(->)
s.onValue(->)
bus.push(1)
expect(called).to.deep.equal([1])
describe "does not alter the stream", ->
expectStreamEvents(
-> series(1, [1, 2]).doAction(->)
[1, 2])
it "toString", ->
expect(Bacon.never().doAction((->)).toString()).to.equal("Bacon.never().doAction(function)")
describe "EventStream.mapEnd", ->
describe "produces an extra element on stream end", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd("the end")
["1", error(), "the end"])
describe "accepts either a function or a constant value", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd(-> "the end")
["1", error(), "the end"])
describe "works with undefined value as well", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd()
["1", error(), undefined])
it "toString", ->
expect(Bacon.never().mapEnd(true).toString()).to.equal("Bacon.never().mapEnd(function)")
describe "EventStream.take", ->
describe "takes N first elements", ->
expectStreamEvents(
-> series(1, [1,2,3,4]).take(2)
[1,2])
describe "works with N=0", ->
expectStreamEvents(
-> series(1, [1,2,3,4]).take(0)
[])
describe "will stop properly even when exception thrown by subscriber", ->
expectStreamEvents(
->
s = Bacon.repeatedly(t(1), ["lol", "wut"]).take(2)
s.onValue (value) ->
throw "testing"
s
[])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3,4]).take(2)
[1,2])
it "toString", ->
expect(Bacon.never().take(1).toString()).to.equal("Bacon.never().take(1)")
describe "EventStream.takeWhile", ->
describe "takes while predicate is true", ->
expectStreamEvents(
-> repeat(1, [1, error("wat"), 2, 3]).takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "extracts field values", ->
expectStreamEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.takeWhile(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.takeWhile(odd)
[1,1])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).takeWhile(lessThan(3))
[1, 2])
it "toString", ->
expect(Bacon.never().takeWhile(true).toString()).to.equal("Bacon.never().takeWhile(function)")
describe "EventStream.skip", ->
describe "should skip first N items", ->
expectStreamEvents(
-> series(1, [1, error(), 2, error(), 3]).skip(1)
[error(), 2, error(), 3])
describe "accepts N <= 0", ->
expectStreamEvents(
-> series(1, [1, 2]).skip(-1)
[1, 2])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).skip(1)
[2, 3])
it "toString", ->
expect(Bacon.never().skip(1).toString()).to.equal("Bacon.never().skip(1)")
describe "EventStream.skipWhile", ->
describe "skips filter predicate holds true", ->
expectStreamEvents(
-> series(1, [1, error(), 2, error(), 3, 2]).skipWhile(lessThan(3))
[error(), error(), 3, 2])
describe "extracts field values", ->
expectStreamEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.skipWhile(".good").map(".value")
["no"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.skipWhile(odd)
[2,3,4,4,8,7])
describe "for synchronous sources", ->
describe "skips filter predicate holds true", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 2]).skipWhile(lessThan(3))
[3, 2])
it "toString", ->
expect(Bacon.never().skipWhile(1).toString()).to.equal("Bacon.never().skipWhile(function)")
describe "EventStream.skipUntil", ->
describe "skips events until one appears in given starter stream", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
src.onValue(->) # to start "time" immediately instead of on subscribe
starter = series(4, ["start"])
src.skipUntil(starter)
[2,3])
describe "works with self-derived starter", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
starter = src.filter((x) -> x == 3)
src.skipUntil(starter)
[3])
describe "works with self-derived starter with an evil twist", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
data = src.map((x) -> x)
data.onValue(->)
starter = src.filter((x) -> x == 3)
data.skipUntil(starter)
[3])
it "toString", ->
expect(Bacon.never().skipUntil(Bacon.once(1)).toString()).to.equal("Bacon.never().skipUntil(Bacon.once(1))")
describe "EventStream.skipDuplicates", ->
it "Drops duplicates with subscribers with non-overlapping subscription time (#211)", ->
b = new Bacon.Bus()
noDups = b.skipDuplicates()
round = (expected) ->
values = []
noDups.take(1).onValue (x) -> values.push(x)
b.push 1
expect(values).to.deep.equal(expected)
round([1])
round([])
round([])
describe "drops duplicates", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 2, 3, 1]).skipDuplicates()
[1, 2, error(), 3, 1])
describe "allows undefined as initial value", ->
expectStreamEvents(
-> series(1, [undefined, undefined, 1, 2]).skipDuplicates()
[undefined, 1, 2])
describe "works with custom isEqual function", ->
a = {x: 1}; b = {x: 2}; c = {x: 2}; d = {x: 3}; e = {x: 1}
isEqual = (a, b) -> a?.x == b?.x
expectStreamEvents(
-> series(1, [a, b, error(), c, d, e]).skipDuplicates(isEqual)
[a, b, error(), d, e])
describe "works with synchrounous sources", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 2, 3, 1]).skipDuplicates()
[1, 2, 3, 1], unstable)
it "toString", ->
expect(Bacon.never().skipDuplicates().toString()).to.equal("Bacon.never().skipDuplicates()")
describe "EventStream.flatMap", ->
describe "should spawn new stream for each value and collect results into a single stream", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.sequentially(t(2), [value, error(), value])
[1, 2, error(), error(), 1, 2], unstable)
describe "should pass source errors through to the result", ->
expectStreamEvents(
-> series(1, [error(), 1]).flatMap (value) ->
Bacon.later(t(1), value)
[error(), 1])
describe "should work with a spawned stream responding synchronously", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value))
[1, 2], unstable)
expectStreamEvents(
-> series(1, [1,2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value)).concat(Bacon.once("lol"))
[1, "lol", 2, "lol"], unstable)
describe "should work with a source stream responding synchronously", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2]).flatMap (value) ->
Bacon.once(value)
[1, 2])
expectStreamEvents(
-> Bacon.fromArray([1, 2]).flatMap (value) ->
Bacon.fromArray([value, value*10])
[1, 10, 2, 20])
expectStreamEvents(
-> Bacon.once(1).flatMap (value) ->
Bacon.later(0, value)
[1])
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap(Bacon.constant)
[1,2], unstable)
describe "Works also when f returns a constant value instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> x)
[1,2], unstable)
describe "Works also when f returns an Error instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> new Bacon.Error(x))
[new Bacon.Error(1), new Bacon.Error(2)], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.constant("bacon"))
["bacon"])
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.once("bacon"))
["bacon"])
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("<NAME>")}).flatMap(".bacon")
["<NAME>"])
expectStreamEvents(
-> Bacon.once({ bacon: "<NAME>"}).flatMap(".bacon")
["<NAME>"])
expectStreamEvents(
->
glorify = (x, y) -> Bacon.fromArray([x, y])
Bacon.once("francis").flatMap(glorify, "sir")
["sir", "fr<NAME>"])
it "toString", ->
expect(Bacon.never().flatMap(->).toString()).to.equal("Bacon.never().flatMap(function)")
describe "Property.flatMap", ->
describe "should spawn new stream for all events including Init", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
series(1, [1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant)
[1,2], unstable)
expectPropertyEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant).toProperty()
[1,2], unstable)
describe "works for synchronous source", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
Bacon.fromArray([1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMap(->).toString()).to.equal("Bacon.constant(1).flatMap(function)")
describe "EventStream.flatMapLatest", ->
describe "spawns new streams but collects values from the latest spawned stream only", ->
expectStreamEvents(
-> series(3, [1, 2]).flatMapLatest (value) ->
Bacon.sequentially(t(2), [value, error(), value])
[1, 2, error(), 2], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMapLatest(Bacon.constant("bacon"))
["bacon"], unstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("<NAME>")}).flatMapLatest(".bacon")
["<NAME>"])
expectStreamEvents(
-> Bacon.once({ bacon: "<NAME>"}).flatMapLatest(".bacon")
["<NAME>"])
it "toString", ->
expect(Bacon.never().flatMapLatest(->).toString()).to.equal("Bacon.never().flatMapLatest(function)")
describe "Property.flatMapLatest", ->
describe "spawns new streams but collects values from the latest spawned stream only", ->
expectStreamEvents(
-> series(3, [1, 2]).toProperty(0).flatMapLatest (value) ->
Bacon.sequentially(t(2), [value, value])
[0, 1, 2, 2], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.constant("asdf").flatMapLatest(Bacon.constant("bacon"))
["bacon"], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMapLatest(->).toString()).to.equal("Bacon.constant(1).flatMapLatest(function)")
describe "EventStream.flatMapFirst", ->
describe "spawns new streams and ignores source events until current spawned stream has ended", ->
expectStreamEvents(
-> series(2, [2, 4, 6, 8]).flatMapFirst (value) ->
series(1, ["a" + value, "b" + value, "c" + value])
["a2", "b2", "c2", "a6", "b6", "c6"], unstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("<NAME>")}).flatMapFirst(".bacon")
["<NAME>"])
expectStreamEvents(
-> Bacon.once({ bacon: "<NAME>"}).flatMapFirst(".bacon")
["<NAME>"])
it "toString", ->
expect(Bacon.never().flatMapFirst(->).toString()).to.equal("Bacon.never().flatMapFirst(function)")
describe "EventStream.merge", ->
describe "merges two streams and ends when both are exhausted", ->
expectStreamEvents(
->
left = series(1, [1, error(), 2, 3])
right = series(1, [4, 5, 6]).delay(t(4))
left.merge(right)
[1, error(), 2, 3, 4, 5, 6], unstable)
describe "respects subscriber return value", ->
expectStreamEvents(
->
left = repeat(2, [1, 3]).take(3)
right = repeat(3, [2]).take(3)
left.merge(right).takeWhile(lessThan(2))
[1])
describe "does not duplicate same error from two streams", ->
expectStreamEvents(
->
src = series(1, [1, error(), 2, error(), 3])
left = src.map((x) -> x)
right = src.map((x) -> x * 2)
left.merge(right)
[1, 2, error(), 2, 4, error(), 3, 6])
describe "works with synchronous sources", ->
expectStreamEvents(
-> Bacon.fromArray([1,2]).merge(Bacon.fromArray([3,4]))
[1,2,3,4])
it "toString", ->
expect(Bacon.once(1).merge(Bacon.once(2)).toString()).to.equal("Bacon.once(1).merge(Bacon.once(2))")
describe "EventStream.delay", ->
describe "delays all events (except errors) by given delay in milliseconds", ->
expectStreamEvents(
->
left = series(2, [1, 2, 3])
right = series(1, [error(), 4, 5, 6]).delay(t(6))
left.merge(right)
[error(), 1, 2, 3, 4, 5, 6], unstable)
describe "works with synchronous streams", ->
expectStreamEvents(
->
left = Bacon.fromArray([1, 2, 3])
right = Bacon.fromArray([4, 5, 6]).delay(t(6))
left.merge(right)
[1, 2, 3, 4, 5, 6], unstable)
it "toString", ->
expect(Bacon.never().delay(1).toString()).to.equal("Bacon.never().delay(1)")
describe "EventStream.debounce", ->
describe "throttles input by given delay, passing-through errors", ->
expectStreamEvents(
-> series(2, [1, error(), 2]).debounce(t(7))
[error(), 2])
describe "waits for a quiet period before outputing anything", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3, 4]).debounce(t(3))
[[11, 4]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 4]).debounce(t(3))
[4])
describe "works in combination with scan", ->
count = 0
expectPropertyEvents(
-> series(2, [1,2,3]).debounce(1).scan(0, (x,y) -> count++; x + y)
[0, 1, 3, 6]
)
it "calls accumulator once per value", ->
expect(count).to.equal(3)
it "toString", ->
expect(Bacon.never().debounce(1).toString()).to.equal("Bacon.never().debounce(1)")
describe "EventStream.debounceImmediate(delay)", ->
describe "outputs first event immediately, then ignores events for given amount of milliseconds", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3, 4]).debounceImmediate(t(3))
[[2, 1], [6, 3]], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 4]).debounceImmediate(t(3))
[1])
it "toString", ->
expect(Bacon.never().debounceImmediate(1).toString()).to.equal("Bacon.never().debounceImmediate(1)")
describe "EventStream.throttle(delay)", ->
describe "outputs at steady intervals, without waiting for quiet period", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3]).throttle(t(3))
[[5, 2], [8, 3]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).throttle(t(3))
[3])
it "toString", ->
expect(Bacon.never().throttle(1).toString()).to.equal("Bacon.never().throttle(1)")
describe "EventStream.bufferWithTime", ->
describe "returns events in bursts, passing through errors", ->
expectStreamEvents(
-> series(2, [error(), 1, 2, 3, 4, 5, 6, 7]).bufferWithTime(t(7))
[error(), [1, 2, 3, 4], [5, 6, 7]])
describe "keeps constant output rate even when input is sporadical", ->
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [3, "b"], [5, "c"]]).bufferWithTime(t(2))
[[2, ["a"]], [4, ["b"]], [6, ["c"]]]
unstable
)
describe "works with empty stream", ->
expectStreamEvents(
-> Bacon.never().bufferWithTime(t(1))
[])
describe "allows custom defer-function", ->
fast = (f) -> sc.setTimeout(f, 0)
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [2, "b"]]).bufferWithTime(fast)
[[0, ["a"]], [2, ["b"]]])
describe "works with synchronous defer-function", ->
sync = (f) -> f()
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [2, "b"]]).bufferWithTime(sync)
[[0, ["a"]], [2, ["b"]]])
describe "works with synchronous source", ->
expectStreamEvents(
-> series(2, [1,2,3]).bufferWithTime(t(7))
[[1,2,3]])
it "toString", ->
expect(Bacon.never().bufferWithTime(1).toString()).to.equal("Bacon.never().bufferWithTime(1)")
describe "EventStream.bufferWithCount", ->
describe "returns events in chunks of fixed size, passing through errors", ->
expectStreamEvents(
-> series(1, [1, 2, 3, error(), 4, 5]).bufferWithCount(2)
[[1, 2], error(), [3, 4], [5]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3,4,5]).bufferWithCount(2)
[[1, 2], [3, 4], [5]])
it "toString", ->
expect(Bacon.never().bufferWithCount(1).toString()).to.equal("Bacon.never().bufferWithCount(1)")
describe "EventStream.bufferWithTimeOrCount", ->
describe "flushes on count", ->
expectStreamEvents(
-> series(1, [1, 2, 3, error(), 4, 5]).bufferWithTimeOrCount(t(10), 2)
[[1, 2], error(), [3, 4], [5]])
describe "flushes on timeout", ->
expectStreamEvents(
-> series(2, [error(), 1, 2, 3, 4, 5, 6, 7]).bufferWithTimeOrCount(t(7), 10)
[error(), [1, 2, 3, 4], [5, 6, 7]])
it "toString", ->
expect(Bacon.never().bufferWithTimeOrCount(1, 2).toString()).to.equal("Bacon.never().bufferWithTimeOrCount(1,2)")
describe "EventStream.takeUntil", ->
describe "takes elements from source until an event appears in the other stream", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = repeat(7, ["stop!"])
src.takeUntil(stopper)
[1, 2], unstable)
describe "works on self-derived stopper", ->
expectStreamEvents(
->
src = repeat(3, [3, 2, 1])
stopper = src.filter(lessThan(3))
src.takeUntil(stopper)
[3])
describe "works on self-derived stopper with an evil twist", ->
expectStreamEvents(
->
src = repeat(3, [3, 2, 1])
data = src.map((x) -> x)
data.take(3).onValue(->)
stopper = src.filter(lessThan(3))
data.takeUntil(stopper)
[3])
describe "includes source errors, ignores stopper errors", ->
expectStreamEvents(
->
src = repeat(2, [1, error(), 2, 3])
stopper = repeat(7, ["stop!"]).merge(repeat(1, [error()]))
src.takeUntil(stopper)
[1, error(), 2], unstable)
describe "works with Property as stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = repeat(7, ["stop!"]).toProperty()
src.takeUntil(stopper)
[1, 2], unstable)
describe "considers Property init value as stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = Bacon.constant("stop")
src.takeUntil(stopper)
[])
describe "ends immediately with synchronous stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = Bacon.once("stop")
src.takeUntil(stopper)
[])
describe "ends properly with a never-ending stopper", ->
expectStreamEvents(
->
src = series(1, [1,2,3])
stopper = new Bacon.Bus()
src.takeUntil(stopper)
[1,2,3])
describe "ends properly with a never-ending stopper and synchronous source", ->
expectStreamEvents(
->
src = Bacon.fromArray([1,2,3]).mapEnd("finito")
stopper = new Bacon.Bus()
src.takeUntil(stopper)
[1,2,3, "finito"])
describe "unsubscribes its source as soon as possible", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(20)
.onUnsub(->
expect(sc.now()).to.equal(startTick + 1))
.takeUntil Bacon.later(1)
[])
describe "it should unsubscribe its stopper on end", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(1,'x').takeUntil(Bacon.later(20).onUnsub(->
expect(sc.now()).to.equal(startTick + 1)))
['x'])
describe "it should unsubscribe its stopper on no more", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(1,'x').takeUntil(Bacon.later(20).onUnsub(->
expect(sc.now()).to.equal(startTick + 1)))
['x'])
### TODO does not pass
describe "works with synchronous self-derived sources", ->
expectStreamEvents(
->
a = Bacon.fromArray [1,2]
b = a.filter((x) -> x >= 2)
a.takeUntil b
[1])
###
it "toString", ->
expect(Bacon.later(1, "a").takeUntil(Bacon.later(2, "b")).toString()).to.equal("Bacon.later(1,a).takeUntil(Bacon.later(2,b))")
describe "When an Event triggers another one in the same stream, while dispatching", ->
it "Delivers triggered events correctly", ->
bus = new Bacon.Bus
values = []
bus.take(2).onValue (v) ->
bus.push "A"
bus.push "B"
bus.onValue (v) ->
values.push(v)
bus.push "a"
bus.push "b"
expect(values).to.deep.equal(["a", "A", "B", "A", "B", "b"])
it "EventStream.take(1) works correctly (bug fix)", ->
bus = new Bacon.Bus
values = []
bus.take(1).onValue (v) ->
bus.push("onValue triggers a side-effect here")
values.push(v)
bus.push("foo")
expect(values).to.deep.equal(["foo"])
describe "EventStream.awaiting(other)", ->
describe "indicates whether s1 has produced output after s2 (or only the former has output so far)", ->
expectPropertyEvents(
-> series(2, [1, 1]).awaiting(series(3, [2]))
[false, true, false, true])
describe "supports Properties", ->
expectPropertyEvents(
-> series(2, [1, 1]).awaiting(series(3, [2]).toProperty())
[false, true, false, true])
describe "supports simultaneouts events", ->
expectPropertyEvents(
->
src = Bacon.later(1, 1)
src.awaiting(src.map(->))
[false])
expectPropertyEvents(
->
src = Bacon.later(1, 1)
src.map(->).awaiting(src)
[false])
it "toString", ->
expect(Bacon.never().awaiting(Bacon.once(1)).toString()).to.equal("Bacon.never().awaiting(Bacon.once(1))")
describe "EventStream.endOnError", ->
describe "terminates on error", ->
expectStreamEvents(
-> repeat(1, [1, 2, error(), 3]).endOnError()
[1, 2, error()])
describe "accepts predicate function", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3, new Bacon.Error({serious:true}), 4]).endOnError((e) -> e?.serious)
[1,2,error(),3,error()])
describe "accepts extractor string", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3, new Bacon.Error({serious:true}), 4]).endOnError(".serious")
[1,2,error(),3,error()])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, error(), 3]).endOnError()
[1, 2, error()])
it "toString", ->
expect(Bacon.never().endOnError().toString()).to.equal("Bacon.never().endOnError()")
describe "Bacon.constant", ->
describe "creates a constant property", ->
expectPropertyEvents(
-> Bacon.constant("lol")
["lol"])
it "ignores unsubscribe", ->
Bacon.constant("lol").onValue(=>)()
describe "provides same value to all listeners", ->
c = Bacon.constant("lol")
expectPropertyEvents((-> c), ["lol"])
it "check check", ->
f = mockFunction()
c.onValue(f)
f.verify("lol")
it "provides same value to all listeners, when mapped (bug fix)", ->
c = Bacon.constant("lol").map(id)
f = mockFunction()
c.onValue(f)
f.verify("lol")
c.onValue(f)
f.verify("lol")
it "toString", ->
expect(Bacon.constant(1).toString()).to.equal("Bacon.constant(1)")
describe "Bacon.never", ->
describe "should send just end", ->
expectStreamEvents(
-> Bacon.never()
[])
describe "Bacon.once", ->
describe "should send single event and end", ->
expectStreamEvents(
-> Bacon.once("pow")
["pow"])
describe "accepts an Error event as parameter", ->
expectStreamEvents(
-> Bacon.once(new Bacon.Error("oop"))
[error()])
describe "Allows wrapped events, for instance, Bacon.Error", ->
expectStreamEvents(
-> Bacon.once(error())
[error()])
describe "Bacon.fromArray", ->
describe "Turns an empty array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([])
[])
describe "Turns a single-element array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([1])
[1])
describe "Turns a longer array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3])
[1, 2, 3])
describe "Allows wrapped events, for instance, Bacon.Error", ->
expectStreamEvents(
-> Bacon.fromArray([error(), 1])
[error(), 1])
describe "EventStream.concat", ->
describe "provides values from streams in given order and ends when both are exhausted", ->
expectStreamEvents(
->
left = series(2, [1, error(), 2, 3])
right = series(1, [4, 5, 6])
left.concat(right)
[1, error(), 2, 3, 4, 5, 6], unstable)
describe "respects subscriber return value when providing events from left stream", ->
expectStreamEvents(
->
left = repeat(3, [1, 3]).take(3)
right = repeat(2, [1]).take(3)
left.concat(right).takeWhile(lessThan(2))
[1])
describe "respects subscriber return value when providing events from right stream", ->
expectStreamEvents(
->
left = series(3, [1, 2])
right = series(2, [2, 4, 6])
left.concat(right).takeWhile(lessThan(4))
[1, 2, 2])
describe "works with Bacon.never()", ->
expectStreamEvents(
-> Bacon.never().concat(Bacon.never())
[])
describe "works with Bacon.once()", ->
expectStreamEvents(
-> Bacon.once(2).concat(Bacon.once(1))
[2, 1])
describe "works with Bacon.once() and Bacon.never()", ->
expectStreamEvents(
-> Bacon.once(1).concat(Bacon.never())
[1])
describe "works with Bacon.never() and Bacon.once()", ->
expectStreamEvents(
-> Bacon.never().concat(Bacon.once(1))
[1])
describe "works with Bacon.once() and async source", ->
expectStreamEvents(
-> Bacon.once(1).concat(series(1, [2, 3]))
[1, 2, 3])
describe "works with Bacon.once() and Bacon.fromArray()", ->
expectStreamEvents(
-> Bacon.once(1).concat(Bacon.fromArray([2, 3]))
[1, 2, 3], unstable)
describe "Works with synchronized left stream and doAction", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
stream = Bacon.fromArray([1,2]).flatMapLatest (x) ->
Bacon.once(x).concat(Bacon.later(10, x).doAction((x) -> bus.push(x); bus.end()))
stream.onValue ->
bus
[2])
it "toString", ->
expect(Bacon.once(1).concat(Bacon.once(2)).toString()).to.equal("Bacon.once(1).concat(Bacon.once(2))")
describe "EventStream.startWith", ->
describe "provides seed value, then the rest", ->
expectStreamEvents(
->
left = series(1, [1, 2, 3])
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
->
left = Bacon.fromArray([1, 2, 3])
left.startWith('pow')
['pow', 1, 2, 3], unstable)
it "toString", ->
expect(Bacon.never().startWith(0).toString()).to.equal("Bacon.never().startWith(0)")
describe "Property.startWith", ->
describe "starts with given value if the Property doesn't have an initial value", ->
expectPropertyEvents(
->
left = series(1, [1, 2, 3]).toProperty()
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "works with synchronous source", ->
expectPropertyEvents(
->
left = Bacon.fromArray([1, 2, 3]).toProperty()
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "starts with the initial value of the Property if any", ->
expectPropertyEvents(
->
left = series(1, [1, 2, 3]).toProperty(0)
left.startWith('pow')
[0, 1, 2, 3], unstable)
describe "works with combineAsArray", ->
result = null
a = Bacon.constant("lolbal")
result = Bacon.combineAsArray([a.map(true), a.map(true)]).map("right").startWith("wrong")
result.onValue((x) -> result = x)
expect(result).to.equal("right")
it "toString", ->
expect(Bacon.constant(2).startWith(1).toString()).to.equal("Bacon.constant(2).startWith(1)")
describe "EventStream.toProperty", ->
describe "delivers current value and changes to subscribers", ->
expectPropertyEvents(
->
s = new Bacon.Bus()
p = s.toProperty("a")
soon ->
s.push "b"
s.end()
p
["a", "b"])
describe "passes through also Errors", ->
expectPropertyEvents(
-> series(1, [1, error(), 2]).toProperty()
[1, error(), 2])
describe "supports null as value", ->
expectPropertyEvents(
-> series(1, [null, 1, null]).toProperty(null)
[null, null, 1, null])
describe "does not get messed-up by a transient subscriber (bug fix)", ->
expectPropertyEvents(
->
prop = series(1, [1,2,3]).toProperty(0)
prop.subscribe (event) =>
Bacon.noMore
prop
[0, 1, 2, 3])
describe "works with synchronous source", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty()
[1,2,3])
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty(0)
[0,1,2,3])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.fromArray([1,2,3,4,5]).map(id).toProperty().skip(4).onValue()
expect(calls).to.equal(1)
describe "Property.toEventStream", ->
describe "creates a stream that starts with current property value", ->
expectStreamEvents(
-> series(1, [1, 2]).toProperty(0).toEventStream()
[0, 1, 2], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2]).toProperty(0).toEventStream()
[0, 1, 2], unstable)
describe "Property.toProperty", ->
describe "returns the same Property", ->
expectPropertyEvents(
-> Bacon.constant(1).toProperty()
[1])
it "rejects arguments", ->
try
Bacon.constant(1).toProperty(0)
fail()
catch e
describe "Property.map", ->
describe "maps property values", ->
expectPropertyEvents(
->
s = new Bacon.Bus()
p = s.toProperty(1).map(times, 2)
soon ->
s.push 2
s.error()
s.end()
p
[2, 4, error()])
describe "Property.filter", ->
describe "should filter values", ->
expectPropertyEvents(
-> series(1, [1, error(), 2, 3]).toProperty().filter(lessThan(3))
[1, error(), 2])
it "preserves old current value if the updated value is non-matching", ->
s = new Bacon.Bus()
p = s.toProperty().filter(lessThan(2))
p.onValue(=>) # to ensure that property is actualy updated
s.push(1)
s.push(2)
values = []
p.onValue((v) => values.push(v))
expect(values).to.deep.equal([1])
describe "can filter by Property value", ->
expectPropertyEvents(
->
src = series(2, [1, 2, 3, 4]).delay(t(1)).toProperty()
ok = series(2, [false, true, true, false]).toProperty()
src.filter(ok)
[2, 3])
describe "Property.take(1)", ->
describe "takes the Initial event", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).take(1)
[0])
describe "takes the first Next event, if no Initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty().take(1)
[1])
describe "works for constants", ->
expectPropertyEvents(
-> Bacon.constant(1)
[1])
describe "works for never-ending Property", ->
expectPropertyEvents(
-> repeat(1, [1,2,3]).toProperty(0).take(1)
[0])
expectPropertyEvents(
-> repeat(1, [1,2,3]).toProperty().take(1)
[1])
describe "Bacon.once().take(1)", ->
describe "works", ->
expectStreamEvents(
-> Bacon.once(1).take(1)
[1])
describe "Property.takeWhile", ->
describe "takes while predicate is true", ->
expectPropertyEvents(
->
series(1, [1, error("wat"), 2, 3])
.toProperty().takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "extracts field values", ->
expectPropertyEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.toProperty().takeWhile(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectPropertyEvents(
->
src = series(1, [1,1,2,3,4,4,8,7]).toProperty()
odd = src.map((x) -> x % 2)
src.takeWhile(odd)
[1,1])
describe "works with never-ending Property", ->
expectPropertyEvents(
->
repeat(1, [1, error("wat"), 2, 3])
.toProperty().takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "Property.takeUntil", ->
describe "takes elements from source until an event appears in the other stream", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty().takeUntil(Bacon.later(t(3)))
[1])
describe "works with errors", ->
expectPropertyEvents(
->
src = repeat(2, [1, error(), 3])
stopper = repeat(5, ["stop!"])
src.toProperty(0).takeUntil(stopper)
[0, 1, error()])
it "toString", ->
expect(Bacon.constant(1).takeUntil(Bacon.never()).toString()).to.equal("Bacon.constant(1).takeUntil(Bacon.never())")
describe "Property.delay", ->
describe "delivers initial value and changes", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).delay(t(1))
[0,1,2,3])
describe "delays changes", ->
expectStreamEvents(
->
series(2, [1,2,3])
.toProperty()
.delay(t(2)).changes().takeUntil(Bacon.later(t(5)))
[1], unstable)
describe "does not delay initial value", ->
expectPropertyEvents(
-> series(3, [1]).toProperty(0).delay(1).takeUntil(Bacon.later(t(2)))
[0])
it "toString", ->
expect(Bacon.constant(0).delay(1).toString()).to.equal("Bacon.constant(0).delay(1)")
describe "Property.debounce", ->
describe "delivers initial value and changes", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty(0).debounce(t(1))
[0,1,2,3])
describe "throttles changes, but not initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).debounce(t(4))
[0,3])
describe "works without initial value", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty().debounce(t(4))
[3])
describe "works with Bacon.constant (bug fix)", ->
expectPropertyEvents(
-> Bacon.constant(1).debounce(1)
[1])
it "toString", ->
expect(Bacon.constant(0).debounce(1).toString()).to.equal("Bacon.constant(0).debounce(1)")
describe "Property.throttle", ->
describe "throttles changes, but not initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).throttle(t(4))
[0,3])
describe "works with Bacon.once (bug fix)", ->
expectPropertyEvents(
-> Bacon.once(1).toProperty().throttle(1)
[1])
it "toString", ->
expect(Bacon.constant(0).throttle(1).toString()).to.equal("Bacon.constant(0).throttle(1)")
describe "Property.endOnError", ->
describe "terminates on Error", ->
expectPropertyEvents(
-> series(2, [1, error(), 2]).toProperty().endOnError()
[1, error()])
describe "Property.awaiting(other)", ->
describe "indicates whether p1 has produced output after p2 (or only the former has output so far)", ->
expectPropertyEvents(
-> series(2, [1, 1]).toProperty().awaiting(series(3, [2]))
[false, true, false, true])
describe "Property.skipDuplicates", ->
describe "drops duplicates", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 2, 3, 1]).toProperty(0).skipDuplicates()
[0, 1, 2, error(), 3, 1])
describe "Doesn't skip initial value (bug fix #211)", ->
b = new Bacon.Bus()
p = b.toProperty()
p.onValue -> # force property update
s = p.skipDuplicates()
b.push 'foo'
describe "series 1", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "series 2", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "series 3", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "Property.changes", ->
describe "sends property change events", ->
expectStreamEvents(
->
s = new Bacon.Bus()
p = s.toProperty("a").changes()
soon ->
s.push "b"
s.error()
s.end()
p
["b", error()])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).toProperty(0).changes()
[1, 2, 3])
describe "Property.combine", ->
describe "combines latest values of two properties, with given combinator function, passing through errors", ->
expectPropertyEvents(
->
left = series(2, [1, error(), 2, 3]).toProperty()
right = series(2, [4, error(), 5, 6]).delay(t(1)).toProperty()
left.combine(right, add)
[5, error(), error(), 6, 7, 8, 9])
describe "also accepts a field name instead of combinator function", ->
expectPropertyEvents(
->
left = series(1, [[1]]).toProperty()
right = series(2, [[2]]).toProperty()
left.combine(right, ".concat")
[[1, 2]])
describe "combines with null values", ->
expectPropertyEvents(
->
left = series(1, [null]).toProperty()
right = series(1, [null]).toProperty()
left.combine(right, (l, r)-> [l, r])
[[null, null]])
it "unsubscribes when initial value callback returns Bacon.noMore", ->
calls = 0
bus = new Bacon.Bus()
other = Bacon.constant(["rolfcopter"])
bus.toProperty(["lollerskates"]).combine(other, ".concat").subscribe (e) ->
if !e.isInitial()
calls += 1
Bacon.noMore
bus.push(["fail whale"])
expect(calls).to.equal 0
describe "does not duplicate same error from two streams", ->
expectPropertyEvents(
->
src = series(1, ["same", error()])
Bacon.combineAsArray(src, src)
[["same", "same"], error()])
it "toString", ->
expect(Bacon.constant(1).combine(Bacon.constant(2), (->)).toString()).to.equal("Bacon.constant(1).combine(Bacon.constant(2),function)")
describe "with random methods on Array.prototype", ->
it "doesn't throw exceptions", ->
try
Array.prototype.foo = "bar"
events = []
Bacon.once("a").combine(Bacon.once("b"), (a,b) -> [a,b]).onValue (v) ->
events.push(v)
expect(events).to.deep.equal([["a", "b"]])
finally
delete Array.prototype.foo
describe "EventStream.combine", ->
describe "converts stream to Property, then combines", ->
expectPropertyEvents(
->
left = series(2, [1, error(), 2, 3])
right = series(2, [4, error(), 5, 6]).delay(t(1)).toProperty()
left.combine(right, add)
[5, error(), error(), 6, 7, 8, 9])
describe "Bacon.groupSimultaneous", ->
describe "groups simultaneous values in to arrays", ->
expectStreamEvents(
->
src = series(1, [1,2])
stream = src.merge(src.map((x) -> x * 2))
Bacon.groupSimultaneous(stream)
[[[1, 2]], [[2,4]]])
describe "groups simultaneous values from multiple sources in to arrays", ->
expectStreamEvents(
->
src = series(1, [1,2])
stream = src.merge(src.map((x) -> x * 2))
stream2 = src.map (x) -> x * 4
Bacon.groupSimultaneous(stream, stream2)
[[[1, 2], [4]], [[2,4], [8]]])
describe "accepts an array or multiple args", ->
expectStreamEvents(
-> Bacon.groupSimultaneous([Bacon.later(1, 1), Bacon.later(2, 2)])
[[[1],[]], [[], [2]]])
describe "returns empty stream for zero sources", ->
expectStreamEvents(
-> Bacon.groupSimultaneous()
[])
expectStreamEvents(
-> Bacon.groupSimultaneous([])
[])
describe "works with synchronous sources", ->
expectStreamEvents(
-> Bacon.groupSimultaneous(Bacon.fromArray([1,2]))
[[[1]], [[2]]])
expectStreamEvents(
-> Bacon.groupSimultaneous(Bacon.fromArray([1,2]).mapEnd(3))
[[[1]], [[2]], [[3]]])
it "toString", ->
expect(Bacon.groupSimultaneous(Bacon.never()).toString()).to.equal("Bacon.groupSimultaneous(Bacon.never())")
describe "Property update is atomic", ->
describe "in a diamond-shaped combine() network", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
describe "in a triangle-shaped combine() network", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map (x) -> x
a.combine(b, (x, y) -> x + y)
[2, 4])
describe "when filter is involved", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map((x) -> x).filter(true)
a.combine(b, (x, y) -> x + y)
[2, 4])
describe "when root property is based on combine*", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty().combine(Bacon.constant(0), (x, y) -> x)
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
describe "when root is not a Property", ->
expectPropertyEvents(
->
a = series(1, [1, 2])
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
it "calls combinator function for valid combos only", ->
calls = 0
results = []
combinator = (x,y) ->
calls++
x+y
src = new Bacon.Bus()
prop = src.toProperty()
out = prop.map((x) -> x)
.combine(prop.map((x) -> x * 2), combinator)
.doAction(->)
.combine(prop, (x,y) -> x)
out.onValue((x) -> results.push(x))
src.push(1)
src.push(2)
expect(results).to.deep.equal([3,6])
expect(calls).to.equal(2)
describe "yet respects subscriber return values (bug fix)", ->
expectStreamEvents(
-> Bacon.repeatedly(t(1), [1, 2, 3]).toProperty().changes().take(1)
[1])
describe "independent observables created within the dispatch loop", ->
it "combineAsArray", ->
calls = 0
Bacon.once(1).onValue ->
Bacon.combineAsArray([Bacon.constant(1)]).onValue ->
calls++
expect(calls).to.equal(1)
it "combineAsArray.startWith", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = Bacon.combineAsArray([a, a]).map("right").startWith("wrong");
s.onValue((x) -> result = x)
expect(result).to.equal("right")
it "stream.startWith", ->
result = null
Bacon.once(1).onValue ->
s = Bacon.later(1).startWith(0)
s.onValue((x) -> result = x)
expect(result).to.equal(0)
it "combineAsArray.changes.startWith", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = Bacon.combineAsArray([a, a]).changes().startWith("right")
s.onValue((x) -> result = x)
expect(result).to.equal("right")
it "flatMap", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = a.flatMap((x) -> Bacon.once(x))
s.onValue((x) -> result = x)
expect(result).to.equal("lolbal")
it "awaiting", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant(1)
s = a.awaiting(a.map(->))
s.onValue((x) -> result = x)
expect(result).to.equal(false)
it "concat", ->
result = []
Bacon.once(1).onValue ->
s = Bacon.once(1).concat(Bacon.once(2))
s.onValue((x) -> result.push(x))
expect(result).to.deep.equal([1,2])
it "Property.delay", ->
result = []
Bacon.once(1).onValue ->
c = Bacon.constant(1)
s = Bacon.combineAsArray([c, c]).delay(1).map(".0")
s.onValue((x) -> result.push(x))
expect(result).to.deep.equal([1])
describe "when subscribing within the dispatch loop", ->
describe "single subscriber", ->
describe "up-to-date values are used (skipped bounce)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.toProperty()
value.onValue ->
trigger.flatMap ->
value.take(1)
[1,2])
describe "delayed bounce (TODO: how to name better)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.filter((x) -> x == 1).toProperty(0)
value.onValue ->
trigger.flatMap ->
value.take(1)
[0, 1])
describe "multiple subscribers", ->
describe "up-to-date values are used (skipped bounce)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.toProperty()
value.onValue ->
trigger.flatMap ->
value.onValue(->)
value.take(1)
[1,2])
describe "delayed bounce (TODO: how to name better)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.filter((x) -> x == 1).toProperty(0)
value.onValue ->
trigger.flatMap ->
value.onValue(->)
value.take(1)
[0, 1])
describe "delayed bounce in case Property ended (bug fix)", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
root = Bacon.once(0).toProperty()
root.onValue ->
Bacon.later(1).onValue ->
root.map(-> 1).subscribe (event) ->
if event.isEnd()
bus.end()
else
bus.push(event.value())
bus
[1])
describe "poking for errors 2", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
root = Bacon.sequentially(1, [1,2]).toProperty()
root.subscribe (event) ->
outdatedChild = root.filter((x) -> x == 1).map((x) -> x)
outdatedChild.onValue(->) # sets value but will be outdated at value 2
Bacon.later(3).onValue ->
outdatedChild.subscribe (event) ->
if event.isEnd()
bus.end()
else
bus.push(event.value())
bus
[1]
)
describe "Bacon.combineAsArray", ->
describe "initial value", ->
event = null
before ->
prop = Bacon.constant(1)
Bacon.combineAsArray(prop).subscribe (x) ->
event = x if x.hasValue()
it "is output as Initial event", ->
expect(event.isInitial()).to.equal(true)
describe "combines properties and latest values of streams, into a Property having arrays as values", ->
expectPropertyEvents(
->
stream = series(1, ["a", "b"])
Bacon.combineAsArray([Bacon.constant(1), Bacon.constant(2), stream])
[[1, 2, "a"], [1, 2, "b"]])
describe "Works with streams provided as a list of arguments as well as with a single array arg", ->
expectPropertyEvents(
->
stream = series(1, ["a", "b"])
Bacon.combineAsArray(Bacon.constant(1), Bacon.constant(2), stream)
[[1, 2, "a"], [1, 2, "b"]])
describe "works with single property", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant(1)])
[[1]])
describe "works with single stream", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.once(1)])
[[1]])
describe "works with arrays as values, with first array being empty (bug fix)", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant([]), Bacon.constant([1])])
([[[], [1]]]))
describe "works with arrays as values, with first array being non-empty (bug fix)", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant([1]), Bacon.constant([2])])
([[[1], [2]]]))
describe "works with empty array", ->
expectPropertyEvents(
-> Bacon.combineAsArray([])
[[]])
describe "works with empty args list", ->
expectPropertyEvents(
-> Bacon.combineAsArray()
[[]])
describe "accepts constant values instead of Observables", ->
expectPropertyEvents(
-> Bacon.combineAsArray(Bacon.constant(1), 2, 3)
[[1,2,3]])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.combineAsArray(Bacon.fromArray([1,2,3,4,5]).map(id)).skip(4).onValue()
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.combineAsArray(Bacon.never()).toString()).to.equal("Bacon.combineAsArray(Bacon.never())")
describe "Bacon.combineWith", ->
describe "combines n properties, streams and constants using an n-ary function", ->
expectPropertyEvents(
->
stream = series(1, [1, 2])
f = (x, y, z) -> x + y + z
Bacon.combineWith(f, stream, Bacon.constant(10), 100)
[111, 112])
describe "works with single input", ->
expectPropertyEvents(
->
stream = series(1, [1, 2])
f = (x) -> x * 2
Bacon.combineWith(f, stream)
[2, 4])
describe "works with 0 inputs (results to a constant)", ->
expectPropertyEvents(
->
Bacon.combineWith(-> 1)
[1])
it "toString", ->
expect(Bacon.combineWith((->), Bacon.never()).toString()).to.equal("Bacon.combineWith(function,Bacon.never())")
describe "Boolean logic", ->
describe "combines Properties with and()", ->
expectPropertyEvents(
-> Bacon.constant(true).and(Bacon.constant(false))
[false])
describe "combines Properties with or()", ->
expectPropertyEvents(
-> Bacon.constant(true).or(Bacon.constant(false))
[true])
describe "inverts property with not()", ->
expectPropertyEvents(
-> Bacon.constant(true).not()
[false])
describe "accepts constants instead of properties", ->
describe "true and false", ->
expectPropertyEvents(
-> Bacon.constant(true).and(false)
[false])
describe "true and true", ->
expectPropertyEvents(
-> Bacon.constant(true).and(true)
[true])
describe "true or false", ->
expectPropertyEvents(
-> Bacon.constant(true).or(false)
[true])
it "toString", ->
expect(Bacon.constant(1).and(Bacon.constant(2).not()).or(Bacon.constant(3)).toString()).to.equal("Bacon.constant(1).and(Bacon.constant(2).not()).or(Bacon.constant(3))")
describe "Bacon.mergeAll", ->
describe ("merges all given streams"), ->
expectStreamEvents(
->
Bacon.mergeAll([
series(3, [1, 2])
series(3, [3, 4]).delay(t(1))
series(3, [5, 6]).delay(t(2))])
[1, 3, 5, 2, 4, 6], unstable)
describe ("supports n-ary syntax"), ->
expectStreamEvents(
->
Bacon.mergeAll(
series(3, [1, 2])
series(3, [3, 4]).delay(t(1))
series(3, [5, 6]).delay(t(2)))
[1, 3, 5, 2, 4, 6], unstable)
describe "works with a single stream", ->
expectStreamEvents(
-> Bacon.mergeAll([Bacon.once(1)])
[1])
expectStreamEvents(
-> Bacon.mergeAll(Bacon.once(1))
[1])
describe "returns empty stream for zero input", ->
expectStreamEvents(
-> Bacon.mergeAll([])
[])
expectStreamEvents(
-> Bacon.mergeAll()
[])
it "toString", ->
expect(Bacon.mergeAll(Bacon.never()).toString()).to.equal("Bacon.mergeAll(Bacon.never())")
describe "Property.sampledBy(stream)", ->
describe "samples property at events, resulting to EventStream", ->
expectStreamEvents(
->
prop = series(2, [1, 2]).toProperty()
stream = repeat(3, ["troll"]).take(4)
prop.sampledBy(stream)
[1, 2, 2, 2])
describe "includes errors from both Property and EventStream", ->
expectStreamEvents(
->
prop = series(2, [error(), 2]).toProperty()
stream = series(3, [error(), "troll"])
prop.sampledBy(stream)
[error(), error(), 2])
describe "ends when sampling stream ends", ->
expectStreamEvents(
->
prop = repeat(2, [1, 2]).toProperty()
stream = repeat(2, [""]).delay(t(1)).take(4)
prop.sampledBy(stream)
[1, 2, 1, 2])
describe "accepts optional combinator function f(Vp, Vs)", ->
expectStreamEvents(
->
prop = series(2, ["a", "b"]).toProperty()
stream = series(2, ["1", "2", "1", "2"]).delay(t(1))
prop.sampledBy(stream, add)
["a1", "b2", "b1", "b2"])
describe "allows method name instead of function too", ->
expectStreamEvents(
->
Bacon.constant([1]).sampledBy(Bacon.once([2]), ".concat")
[[1, 2]])
describe "works with same origin", ->
expectStreamEvents(
->
src = series(2, [1, 2])
src.toProperty().sampledBy(src)
[1, 2])
expectStreamEvents(
->
src = series(2, [1, 2])
src.toProperty().sampledBy(src.map(times, 2))
[1, 2])
describe "uses updated property after combine", ->
latter = (a, b) -> b
expectPropertyEvents(
->
src = series(2, ["b", "c"]).toProperty("a")
combined = Bacon.constant().combine(src, latter)
src.sampledBy(combined, add)
["aa", "bb", "cc"])
describe "uses updated property after combine with subscriber", ->
latter = (a, b) -> b
expectPropertyEvents(
->
src = series(2, ["b", "c"]).toProperty("a")
combined = Bacon.constant().combine(src, latter)
combined.onValue(->)
src.sampledBy(combined, add)
["aa", "bb", "cc"])
describe "skips samplings that occur before the property gets its first value", ->
expectStreamEvents(
->
p = series(5, [1]).toProperty()
p.sampledBy(series(3, [0]))
[])
expectStreamEvents(
->
p = series(5, [1, 2]).toProperty()
p.sampledBy(series(3, [0, 0, 0, 0]))
[1, 1, 2], unstable)
expectPropertyEvents(
->
p = series(5, [1, 2]).toProperty()
p.sampledBy(series(3, [0, 0, 0, 0]).toProperty())
[1, 1, 2], unstable)
describe "works with stream of functions", ->
f = ->
expectStreamEvents(
->
p = series(1, [f]).toProperty()
p.sampledBy(series(1, [1, 2, 3]))
[f, f, f])
describe "works with synchronous sampler stream", ->
expectStreamEvents(
-> Bacon.constant(1).sampledBy(Bacon.fromArray([1,2,3]))
[1,1,1], unstable)
expectStreamEvents(
-> Bacon.later(1, 1).toProperty().sampledBy(Bacon.fromArray([1,2,3]))
[])
describe "laziness", ->
calls = 0
before (done) ->
id = (x) ->
calls++
x
sampler = Bacon.later(5).map(id)
property = repeat(1, [1]).toProperty().map(id)
sampled = property.sampledBy sampler
sampled.onValue()
sampled.onEnd(done)
it "preserves laziness", ->
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.constant(0).sampledBy(Bacon.never()).toString()).to.equal("Bacon.constant(0).sampledBy(Bacon.never(),function)")
describe "Property.sampledBy(property)", ->
describe "samples property at events, resulting to a Property", ->
expectPropertyEvents(
->
prop = series(2, [1, 2]).toProperty()
sampler = repeat(3, ["troll"]).take(4).toProperty()
prop.sampledBy(sampler)
[1, 2, 2, 2])
describe "works on an event stream by automatically converting to property", ->
expectPropertyEvents(
->
stream = series(2, [1, 2])
sampler = repeat(3, ["troll"]).take(4).toProperty()
stream.sampledBy(sampler)
[1, 2, 2, 2])
describe "accepts optional combinator function f(Vp, Vs)", ->
expectPropertyEvents(
->
prop = series(2, ["a", "b"]).toProperty()
sampler = series(2, ["1", "2", "1", "2"]).delay(t(1)).toProperty()
prop.sampledBy(sampler, add)
["a1", "b2", "b1", "b2"])
describe "Property.sample", ->
describe "samples property by given interval", ->
expectStreamEvents(
->
prop = series(2, [1, 2]).toProperty()
prop.sample(t(3)).take(4)
[1, 2, 2, 2])
describe "includes all errors", ->
expectStreamEvents(
->
prop = series(2, [1, error(), 2]).toProperty()
prop.sample(t(5)).take(2)
[error(), 1, 2], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
->
prop = Bacon.constant(1)
prop.sample(t(3)).take(4)
[1, 1, 1, 1])
it "toString", ->
expect(Bacon.constant(0).sample(1).toString()).to.equal("Bacon.constant(0).sample(1)")
describe "EventStream.errors", ->
describe "Includes errors only", ->
expectStreamEvents(
-> series(1, [1, error(), 2]).errors()
[error()])
it "toString", ->
expect(Bacon.never().errors().toString()).to.equal("Bacon.never().errors()")
describe "EventStream.scan", ->
describe "accumulates values with given seed and accumulator function, passing through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).scan(0, add)
[0, 1, 3, error(), 6])
describe "also works with method name", ->
expectPropertyEvents(
-> series(1, [[1], [2]]).scan([], ".concat")
[[], [1], [1, 2]])
it "yields the seed value immediately", ->
outputs = []
bus = new Bacon.Bus()
bus.scan(0, -> 1).onValue((value) -> outputs.push(value))
expect(outputs).to.deep.equal([0])
describe "yields null seed value", ->
expectPropertyEvents(
-> series(1, [1]).scan(null, ->1)
[null, 1])
describe "works with synchronous streams", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).scan(0, ((x,y)->x+y))
[0,1,3,6])
describe "calls accumulator function once per value", ->
count = 0
expectPropertyEvents(
-> series(2, [1,2,3]).scan(0, (x,y) -> count++; x + y)
[0, 1, 3, 6]
)
it "calls accumulator once per value", ->
expect(count).to.equal(3)
describe "EventStream.fold", ->
describe "folds stream into a single-valued Property, passes through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "has reduce as synonym", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "works with synchronous source", ->
expectPropertyEvents(
-> Bacon.fromArray([1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "Property.scan", ->
describe "with Init value, starts with f(seed, init)", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).scan(0, add)
[1, 3, 6])
describe "without Init value, starts with seed", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty().scan(0, add)
[0, 2, 5])
describe "treats null seed value like any other value", ->
expectPropertyEvents(
-> series(1, [1]).toProperty().scan(null, add)
[null, 1])
expectPropertyEvents(
-> series(1, [2]).toProperty(1).scan(null, add)
[1, 3])
describe "for synchronous source", ->
describe "with Init value, starts with f(seed, init)", ->
expectPropertyEvents(
-> Bacon.fromArray([2,3]).toProperty(1).scan(0, add)
[1, 3, 6])
describe "without Init value, starts with seed", ->
expectPropertyEvents(
-> Bacon.fromArray([2,3]).toProperty().scan(0, add)
[0, 2, 5])
describe "works with synchronously responding empty source", ->
expectPropertyEvents(
-> Bacon.never().toProperty(1).scan(0, add)
[1])
describe "EventStream.withStateMachine", ->
f = (sum, event) ->
if event.hasValue()
[sum + event.value(), []]
else if event.isEnd()
[sum, [new Bacon.Next(-> sum), event]]
else
[sum, [event]]
describe "runs state machine on the stream", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3]).withStateMachine(0, f)
[6])
describe "Property.withStateMachine", ->
describe "runs state machine on the stream", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty().withStateMachine(0, (sum, event) ->
if event.hasValue()
[sum + event.value(), []]
else if event.isEnd()
[sum, [new Bacon.Next(-> sum), event]]
else
[sum, [event]])
[6])
describe "Property.fold", ->
describe "Folds Property into a single-valued one", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).fold(0, add)
[6])
describe "EventStream.diff", ->
describe "apply diff function to previous and current values, passing through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).diff(0, add)
[1, 3, error(), 5])
describe "also works with method name", ->
expectPropertyEvents(
-> series(1, [[1], [2]]).diff([0], ".concat")
[[0, 1], [1, 2]])
it "does not yields the start value immediately", ->
outputs = []
bus = new Bacon.Bus()
bus.diff(0, -> 1).onValue((value) -> outputs.push(value))
expect(outputs).to.deep.equal([])
it "toString", ->
expect(Bacon.once(1).diff(0, (->)).toString()).to.equal("Bacon.once(1).diff(0,function)")
describe "Property.diff", ->
describe "with Init value, starts with f(start, init)", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).diff(0, add)
[1, 3, 5])
describe "without Init value, waits for the first value", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty().diff(0, add)
[2, 5])
describe "treats null start value like any other value", ->
expectPropertyEvents(
-> series(1, [1]).toProperty().diff(null, add)
[1])
expectPropertyEvents(
-> series(1, [2]).toProperty(1).diff(null, add)
[1, 3])
describe "EventStream.zip", ->
describe "pairwise combines values from two streams", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).zip(series(1, ['a', 'b', 'c']))
[[1, 'a'], [2, 'b'], [3, 'c']])
describe "passes through errors", ->
expectStreamEvents(
-> series(2, [1, error(), 2]).zip(series(2, ['a', 'b']).delay(1))
[[1, 'a'], error(), [2, 'b']])
describe "completes as soon as possible", ->
expectStreamEvents(
-> series(1, [1]).zip(series(1, ['a', 'b', 'c']))
[[1, 'a']])
describe "can zip an observable with itself", ->
expectStreamEvents(
->
obs = series(1, ['a', 'b', 'c'])
obs.zip(obs.skip(1))
[['a', 'b'], ['b', 'c']])
it "toString", ->
expect(Bacon.never().zip(Bacon.once(1)).toString()).to.equal("Bacon.never().zip(Bacon.once(1))")
describe "Property.zip", ->
describe "pairwise combines values from two properties", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).toProperty().zip(series(1, ['a', 'b', 'c']).toProperty())
[[1, 'a'], [2, 'b'], [3, 'c']], { unstable })
describe "Bacon.zipAsArray", ->
describe "zips an array of streams into a stream of arrays", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray([obs, obs.skip(1), obs.skip(2)])
[[1 , 2 , 3], [2 , 3 , 4]])
describe "supports n-ary syntax", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray(obs, obs.skip(1))
[[1 , 2], [2 , 3], [3, 4]])
describe "accepts Properties as well as EventStreams", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray(obs, obs.skip(1), Bacon.constant(5))
[[1 , 2, 5]])
describe "works with single stream", ->
expectStreamEvents(
->
obs = series(1, [1, 2])
Bacon.zipAsArray([obs])
[[1], [2]])
expectStreamEvents(
->
obs = series(1, [1, 2])
Bacon.zipAsArray(obs)
[[1], [2]])
describe "works with 0 streams (=Bacon.never())", ->
expectStreamEvents(
-> Bacon.zipAsArray([])
[])
expectStreamEvents(
-> Bacon.zipAsArray()
[])
it "toString", ->
expect(Bacon.zipAsArray(Bacon.never(), Bacon.never()).toString()).to.equal("Bacon.zipAsArray(Bacon.never(),Bacon.never())")
describe "Bacon.zipWith", ->
describe "zips an array of streams with given function", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipWith([obs, obs.skip(1), obs.skip(2)], ((x,y,z) -> (x + y + z)))
[1 + 2 + 3, 2 + 3 + 4])
describe "supports n-ary syntax", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
f = ((x,y,z) -> (x + y + z))
Bacon.zipWith(f, obs, obs.skip(1), obs.skip(2))
[1 + 2 + 3, 2 + 3 + 4])
describe "works with single stream", ->
expectStreamEvents(
->
obs = series(1, [1,2])
f = (x) -> x * 2
Bacon.zipWith(f, obs)
[1 * 2, 2 * 2])
describe "works with 0 streams (=Bacon.never())", ->
expectStreamEvents(
->
Bacon.zipWith([], ->)
[])
expectStreamEvents(
->
Bacon.zipWith(->)
[])
it "toString", ->
expect(Bacon.zipWith((->), Bacon.never()).toString()).to.equal("Bacon.zipWith(function,Bacon.never())")
describe "Bacon.when", ->
describe "synchronizes on join patterns", ->
expectStreamEvents(
->
[a,b,_] = ['a','b','_']
as = series(1, [a, _, a, a, _, a, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, b, _, b, b, _, _]).filter((x) -> x == b)
Bacon.when(
[as, bs], (a,b) -> a + b,
[as], (a) -> a)
['a', 'ab', 'a', 'ab', 'ab', 'ab'], unstable)
describe "consider the join patterns from top to bottom", ->
expectStreamEvents(
->
[a,b,_] = ['a','b','_']
as = series(1, [a, _, a, a, _, a, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, b, _, b, b, _, _]).filter((x) -> x == b)
Bacon.when(
[as], (a) -> a,
[as, bs], (a,b) -> a + b)
['a', 'a', 'a', 'a', 'a', 'a'])
describe "handles any number of join patterns", ->
expectStreamEvents(
->
[a,b,c,_] = ['a','b','c','_']
as = series(1, [a, _, a, _, a, _, a, _, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, _, b, _, b, _, b, _, _]).filter((x) -> x == b)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, _]).filter((x) -> x == c)
Bacon.when(
[as, bs, cs], (a,b,c) -> a + b + c,
[as, bs], (a,b) -> a + b,
[as], (a) -> a)
['a', 'ab', 'a', 'abc', 'abc', 'ab'], unstable)
describe "does'nt synchronize on properties", ->
expectStreamEvents(
->
p = repeat(1, ["p"]).take(100).toProperty()
s = series(3, ["1", "2", "3"])
Bacon.when(
[p,s], (p, s) -> p + s)
["p1", "p2", "p3"])
expectStreamEvents(
->
p = series(3, ["p"]).toProperty()
s = series(1, ["1"])
Bacon.when(
[p,s], (p, s) -> p + s)
[])
expectStreamEvents(
->
p = repeat(1, ["p"]).take(100).toProperty()
s = series(3, ["1", "2", "3"]).toProperty()
Bacon.when(
[p,s], (p, s) -> p + s)
[])
expectStreamEvents(
->
[a,b,c,_] = ['a','b','c','_']
as = series(1, [a, _, a, _, a, _, a, _, _, _, a, _, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, _, b, _, b, _, b, _, _, _]).filter((x) -> x == b)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, c, _]).filter((x) -> x == c).map(1).scan 0, ((x,y) -> x + y)
Bacon.when(
[as, bs, cs], (a,b,c) -> a + b + c,
[as], (a) -> a)
['a', 'ab0', 'a', 'ab1', 'ab2', 'ab3'], unstable)
describe "doesn't output before properties have values", ->
expectStreamEvents(
->
p = series(2, ["p"])
s = series(1, ["s"])
Bacon.when(
[s, p], (s, p) -> s + p)
["sp"])
describe "returns Bacon.never() on the empty list of patterns", ->
expectStreamEvents(
->
Bacon.when()
[])
describe "returns Bacon.never() when all patterns are zero-length", ->
expectStreamEvents(
->
Bacon.when([], ->)
[])
describe "works with empty patterns", ->
expectStreamEvents(
-> Bacon.when(
[Bacon.once(1)], (x) -> x,
[], ->)
[1])
describe "works with empty patterns (2)", ->
expectStreamEvents(
-> Bacon.when(
[], ->,
[Bacon.once(1)], (x) -> x)
[1])
describe "works with single stream", ->
expectStreamEvents(
-> Bacon.when([Bacon.once(1)], (x) -> x)
[1])
describe "works with multiples of streams", ->
expectStreamEvents(
->
[h,o,c,_] = ['h','o','c','_']
hs = series(1, [h, _, h, _, h, _, h, _, _, _, h, _, h]).filter((x) -> x == h)
os = series(1, [_, o, _, _, _, o, _, o, _, o, _, _, _]).filter((x) -> x == o)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, c, _]).filter((x) -> x == c)
Bacon.when(
[hs, hs, os], (h1,h2,o) -> [h1,h2,o],
[cs, os], (c,o) -> [c,o])
[['h', 'h', 'o'], ['c', 'o'], ['h', 'h', 'o'], ['c', 'o']], unstable)
describe "works with multiples of properties", ->
expectStreamEvents(
->
c = Bacon.constant("c")
Bacon.when(
[c, c, Bacon.once(1)], (c1, c2, _) -> c1 + c2)
["cc"])
describe "accepts constants instead of functions too", ->
expectStreamEvents(
-> Bacon.when(Bacon.once(1), 2)
[2])
describe "works with synchronous sources", ->
expectStreamEvents(
->
xs = Bacon.once "x"
ys = Bacon.once "y"
Bacon.when(
[xs, ys], (x, y) -> x + y
)
["xy"])
it "toString", ->
expect(Bacon.when([Bacon.never()], (->)).toString()).to.equal("Bacon.when([Bacon.never()],function)")
describe "Bacon.update", ->
describe "works like Bacon.when, but produces a property, and can be defined in terms of a current value", ->
expectPropertyEvents(
->
[r,i,_] = ['r','i',0]
incr = series(1, [1, _, 1, _, 2, _, 1, _, _, _, 2, _, 1]).filter((x) -> x != _)
reset = series(1, [_, r, _, _, _, r, _, r, _, r, _, _, _]).filter((x) -> x == r)
Bacon.update(
0,
[reset], 0,
[incr], (i,c) -> i+c)
[0, 1, 0, 1, 3, 0, 1, 0, 0, 2, 3])
describe "Correctly handles multiple arguments in parameter list, and synchronous sources", ->
expectPropertyEvents(
->
one = Bacon.once(1)
two = Bacon.once(2)
Bacon.update(
0,
[one, two], (i, a, b) -> [i,a,b])
[0, [0,1,2]])
it "toString", ->
expect(Bacon.update(0, [Bacon.never()], (->)).toString()).to.equal("Bacon.update(0,[Bacon.never()],function)")
describe "combineTemplate", ->
describe "combines streams according to a template object", ->
expectPropertyEvents(
->
firstName = Bacon.constant("ju<NAME>")
lastName = Bacon.constant("<NAME>")
userName = Bacon.constant("mr.bacon")
Bacon.combineTemplate({ userName: userName, password: "<PASSWORD>*****", fullName: { firstName: firstName, lastName: lastName }})
[{ userName: "mr.bacon", password: "<PASSWORD>*****", fullName: { firstName: "juha", lastName: "pa<NAME>" } }])
describe "works with a single-stream template", ->
expectPropertyEvents(
->
bacon = Bacon.constant("bacon")
Bacon.combineTemplate({ favoriteFood: bacon })
[{ favoriteFood: "bacon" }])
describe "works when dynamic part is not the last part (bug fix)", ->
expectPropertyEvents(
->
username = Bacon.constant("raimohanska")
password = Bacon.constant("<PASSWORD>")
Bacon.combineTemplate({url: "/user/login",
data: { username: username, password: <PASSWORD> }, type: "post"})
[url: "/user/login", data: {username: "raimohanska", password: "<PASSWORD>"}, type: "post"])
describe "works with arrays as data (bug fix)", ->
expectPropertyEvents(
-> Bacon.combineTemplate( { x : Bacon.constant([]), y : Bacon.constant([[]]), z : Bacon.constant(["z"])})
[{ x : [], y : [[]], z : ["z"]}])
describe "supports empty object", ->
expectPropertyEvents(
-> Bacon.combineTemplate({})
[{}])
it "supports arrays", ->
value = {key: [{ x: 1 }, { x: 2 }]}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x.key instanceof Array).to.deep.equal(true) # seems that the former passes even if x is not an array
value = [{ x: 1 }, { x: 2 }]
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x instanceof Array).to.deep.equal(true)
value = {key: [{ x: 1 }, { x: 2 }], key2: {}}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x.key instanceof Array).to.deep.equal(true)
value = {key: [{ x: 1 }, { x: Bacon.constant(2) }]}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal({key: [{ x: 1 }, { x: 2 }]})
expect(x.key instanceof Array).to.deep.equal(true) # seems that the former passes even if x is not an array
it "supports nulls", ->
value = {key: null}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports NaNs", ->
value = {key: NaN}
Bacon.combineTemplate(value).onValue (x) ->
expect(isNaN(x.key)).to.deep.equal(true)
it "supports dates", ->
value = {key: new Date()}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports regexps", ->
value = {key: /[0-0]/i}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports functions", ->
value = {key: ->}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "toString", ->
expect(Bacon.combineTemplate({ thing: Bacon.never(), const: "a" }).toString()).to.equal("Bacon.combineTemplate({thing:Bacon.never(),const:a})")
describe "Property.decode", ->
describe "switches between source Properties based on property value", ->
expectPropertyEvents(
->
a = Bacon.constant("a")
b = Bacon.constant("b")
c = Bacon.constant("c")
series(1, [1,2,3]).toProperty().decode({1: a, 2: b, 3: c})
["a", "b", "c"])
it "toString", ->
expect(Bacon.constant(1).decode({1: "lol"}).toString()).to.equal("Bacon.constant(1).decode({1:lol})")
describe "EventStream.decode", ->
describe "switches between source Properties based on property value", ->
expectPropertyEvents(
->
a = Bacon.constant("a")
b = Bacon.constant("b")
c = Bacon.constant("c")
series(1, [1,2,3]).decode({1: a, 2: b, 3: c})
["a", "b", "c"])
describe "Observable.onValues", ->
it "splits value array to callback arguments", ->
f = mockFunction()
Bacon.constant([1,2,3]).onValues(f)
f.verify(1,2,3)
describe "Bacon.onValues", ->
it "is a shorthand for combineAsArray.onValues", ->
f = mockFunction()
Bacon.onValues(1, 2, 3, f)
f.verify(1,2,3)
describe "Observable.subscribe and onValue", ->
it "returns a dispose() for unsubscribing", ->
s = new Bacon.Bus()
values = []
dispose = s.onValue (value) -> values.push value
s.push "lol"
dispose()
s.push "wut"
expect(values).to.deep.equal(["lol"])
describe "Observable.onEnd", ->
it "is called on stream end", ->
s = new Bacon.Bus()
ended = false
s.onEnd(-> ended = true)
s.push("LOL")
expect(ended).to.deep.equal(false)
s.end()
expect(ended).to.deep.equal(true)
describe "Field value extraction", ->
describe "extracts field value", ->
expectStreamEvents(
-> Bacon.once({lol:"wut"}).map(".lol")
["wut"])
describe "extracts nested field value", ->
expectStreamEvents(
-> Bacon.once({lol:{wut: "wat"}}).map(".lol.wut")
["wat"])
describe "yields 'undefined' if any value on the path is 'undefined'", ->
expectStreamEvents(
-> Bacon.once({}).map(".lol.wut")
[undefined])
it "if field value is method, it does a method call", ->
context = null
result = null
object = {
method: ->
context = this
"result"
}
Bacon.once(object).map(".method").onValue((x) -> result = x)
expect(result).to.deep.equal("result")
expect(context).to.deep.equal(object)
testSideEffects = (wrapper, method) ->
->
it "(f) calls function with property value", ->
f = mockFunction()
wrapper("kaboom")[method](f)
f.verify("kaboom")
it "(f, param) calls function, partially applied with param", ->
f = mockFunction()
wrapper("kaboom")[method](f, "pow")
f.verify("pow", "kaboom")
it "('.method') calls event value object method", ->
value = mock("get")
value.when().get().thenReturn("pow")
wrapper(value)[method](".get")
value.verify().get()
it "('.method', param) calls event value object method with param", ->
value = mock("get")
value.when().get("value").thenReturn("pow")
wrapper(value)[method](".get", "value")
value.verify().get("value")
it "(object, method) calls object method with property value", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow")
target.verify().pow("kaboom")
it "(object, method, param) partially applies object method with param", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow", "smack")
target.verify().pow("smack", "kaboom")
it "(object, method, param1, param2) partially applies with 2 args", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow", "smack", "whack")
target.verify().pow("smack", "whack", "kaboom")
describe "Property.onValue", testSideEffects(Bacon.constant, "onValue")
describe "Property.assign", testSideEffects(Bacon.constant, "assign")
describe "EventStream.onValue", testSideEffects(Bacon.once, "onValue")
describe "Property.assign", ->
it "calls given objects given method with property values", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow")
target.verify().pow("kaboom")
it "allows partial application of method (i.e. adding fixed args)", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow", "smack")
target.verify().pow("smack", "kaboom")
it "allows partial application of method with 2 args (i.e. adding fixed args)", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow", "smack", "whack")
target.verify().pow("smack", "whack", "kaboom")
describe "Bacon.Bus", ->
it "merges plugged-in streams", ->
bus = new Bacon.Bus()
values = []
dispose = bus.onValue (value) -> values.push value
push = new Bacon.Bus()
bus.plug(push)
push.push("lol")
expect(values).to.deep.equal(["lol"])
dispose()
verifyCleanup()
describe "works with looped streams", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
bus.plug(Bacon.later(t(2), "lol"))
bus.plug(bus.filter((value) => "lol" == value).map(=> "wut"))
Bacon.later(t(4)).onValue(=> bus.end())
bus
["lol", "wut"])
it "dispose works with looped streams", ->
bus = new Bacon.Bus()
bus.plug(Bacon.later(t(2), "lol"))
bus.plug(bus.filter((value) => "lol" == value).map(=> "wut"))
dispose = bus.onValue(=>)
dispose()
it "Removes input from input list on End event", ->
subscribed = 0
bus = new Bacon.Bus()
input = new Bacon.Bus()
# override subscribe to increase the subscribed-count
inputSubscribe = input.subscribe
input.subscribe = (sink) ->
subscribed++
inputSubscribe(sink)
bus.plug(input)
dispose = bus.onValue(=>)
input.end()
dispose()
bus.onValue(=>) # this latter subscription should not go to the ended source anymore
expect(subscribed).to.deep.equal(1)
it "unsubscribes inputs on end() call", ->
bus = new Bacon.Bus()
input = new Bacon.Bus()
events = []
bus.plug(input)
bus.subscribe((e) => events.push(e))
input.push("a")
bus.end()
input.push("b")
expect(toValues(events)).to.deep.equal(["a", "<end>"])
it "handles cold single-event streams correctly (bug fix)", ->
values = []
bus = new Bacon.Bus()
bus.plug(Bacon.once("x"))
bus.plug(Bacon.once("y"))
bus.onValue((x) -> values.push(x))
expect(values).to.deep.equal(["x", "y"])
it "handles end() calls even when there are no subscribers", ->
bus = new Bacon.Bus()
bus.end()
describe "delivers pushed events and errors", ->
expectStreamEvents(
->
s = new Bacon.Bus()
s.push "pullMe"
soon ->
s.push "pushMe"
# test that it works regardless of "this"
s.push.call(null, "pushSomeMore")
s.error()
s.end()
s
["pushMe", "pushSomeMore", error()])
it "does not deliver pushed events after end() call", ->
called = false
bus = new Bacon.Bus()
bus.onValue(-> called = true)
bus.end()
bus.push("LOL")
expect(called).to.deep.equal(false)
it "does not plug after end() call", ->
plugged = false
bus = new Bacon.Bus()
bus.end()
bus.plug(new Bacon.EventStream((sink) -> plugged = true; (->)))
bus.onValue(->)
expect(plugged).to.deep.equal(false)
it "returns unplug function from plug", ->
values = []
bus = new Bacon.Bus()
src = new Bacon.Bus()
unplug = bus.plug(src)
bus.onValue((x) -> values.push(x))
src.push("x")
unplug()
src.push("y")
expect(values).to.deep.equal(["x"])
it "allows consumers to re-subscribe after other consumers have unsubscribed (bug fix)", ->
bus = new Bacon.Bus
otherBus = new Bacon.Bus
otherBus.plug(bus)
unsub = otherBus.onValue ->
unsub()
o = []
otherBus.onValue (v) -> o.push(v)
bus.push("foo")
expect(o).to.deep.equal(["foo"])
it "toString", ->
expect(new Bacon.Bus().toString()).to.equal("Bacon.Bus()")
describe "EventStream", ->
describe "works with functions as values (bug fix)", ->
expectStreamEvents(
-> Bacon.once(-> "hello").map((f) -> f())
["hello"])
expectStreamEvents(
-> Bacon.once(-> "hello").flatMap(Bacon.once).map((f) -> f())
["hello"])
expectPropertyEvents(
-> Bacon.constant(-> "hello").map((f) -> f())
["hello"])
expectStreamEvents(
-> Bacon.constant(-> "hello").flatMap(Bacon.once).map((f) -> f())
["hello"])
it "handles one subscriber added twice just like two separate subscribers (case Bacon.noMore)", ->
values = []
bus = new Bacon.Bus()
f = (v) ->
if v.hasValue()
values.push(v.value())
return Bacon.noMore
bus.subscribe(f)
bus.subscribe(f)
bus.push("bacon")
expect(values).to.deep.equal(["bacon", "bacon"])
it "handles one subscriber added twice just like two separate subscribers (case unsub)", ->
values = []
bus = new Bacon.Bus()
f = (v) ->
if v.hasValue()
values.push(v.value())
bus.subscribe(f)
unsub = bus.subscribe(f)
unsub()
bus.push("bacon")
expect(values).to.deep.equal(["bacon"])
describe "Bacon.fromBinder", ->
describe "Provides an easier alternative to the EventStream constructor, allowing sending multiple events at a time", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink([new Bacon.Next(1), new Bacon.End()])
(->)
[1])
describe "Allows sending unwrapped values as well as events", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink([1, new Bacon.End()])
(->)
[1])
describe "Allows sending single value without wrapping array", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink(1)
sink(new Bacon.End())
(->)
[1])
it "toString", ->
expect(Bacon.fromBinder(->).toString()).to.equal("Bacon.fromBinder(function,function)")
describe "String presentations", ->
describe "Initial(1).toString", ->
it "is 1", ->
expect(new Bacon.Initial(1).toString()).to.equal("1")
describe "Next({a:1i}).toString", ->
it "is {a:1}", ->
expect(new Bacon.Next({a:1}).toString()).to.equal("{a:1}")
describe "Error({a:1}).toString", ->
it "is <error> {a:1}", ->
expect(new Bacon.Error({a:1}).toString()).to.equal("<error> {a:1}")
describe "End.toString", ->
it "is <end>", ->
expect(new Bacon.End().toString()).to.equal("<end>")
describe "inspect", ->
it "is the same as toString", ->
expect(new Bacon.Initial(1).inspect()).to.equal("1")
describe "Observable.name", ->
it "sets return value of toString and inspect", ->
expect(Bacon.once(1).name("one").toString()).to.equal("one")
expect(Bacon.once(1).name("one").inspect()).to.equal("one")
it "modifies the stream in place", ->
obs = Bacon.once(1)
obs.name("one")
expect(obs.toString()).to.equal("one")
it "supports composition", ->
expect(Bacon.once("raimohanska").name("raimo").take(1).inspect()).to.equal("raimo.take(1)")
describe "Bacon.spy", ->
testSpy = (expectedCount, f) ->
calls = 0
spy = (obs) -> calls++
Bacon.spy spy
f()
expect(calls).to.equal(expectedCount)
describe "calls spy function for all created Observables", ->
it "EventStream", ->
testSpy 1, -> Bacon.once(1)
it "Property", ->
testSpy 1, -> Bacon.constant(1)
it "map", ->
testSpy 2, -> Bacon.once(1).map(->)
it "combineTemplate (also called for the intermediate combineAsArray property)", ->
testSpy 4, -> Bacon.combineTemplate(Bacon.once(1), Bacon.constant(2))
describe "Infinite synchronous sequences", ->
describe "Limiting length with take(n)", ->
expectStreamEvents(
-> endlessly(1,2,3).take(4)
[1,2,3,1], unstable)
expectStreamEvents(
-> endlessly(1,2,3).take(4).concat(Bacon.once(5))
[1,2,3,1,5], unstable)
expectStreamEvents(
-> endlessly(1,2,3).take(4).concat(endlessly(5, 6).take(2))
[1,2,3,1,5,6], unstable)
describe "With flatMap", ->
expectStreamEvents(
-> Bacon.fromArray([1,2]).flatMap((x) -> endlessly(x)).take(2)
[1,1])
expectStreamEvents(
-> endlessly(1,2).flatMap((x) -> endlessly(x)).take(2)
[1,1])
endlessly = (values...) ->
index = 0
Bacon.fromSynchronousGenerator -> new Bacon.Next(-> values[index++ % values.length])
Bacon.fromGenerator = (generator) ->
Bacon.fromBinder (sink) ->
unsubd = false
push = (events) ->
events = Bacon._.toArray(events)
for event in events
return if unsubd
reply = sink event
return if event.isEnd() or reply == Bacon.noMore
generator(push)
push []
-> unsubd = true
Bacon.fromSynchronousGenerator = (generator) ->
Bacon.fromGenerator (push) ->
push generator()
lessThan = (limit) ->
(x) -> x < limit
times = (x, y) -> x * y
add = (x, y) -> x + y
id = (x) -> x
| true | expect = require("chai").expect
Bacon = require("../src/Bacon").Bacon
Mocks = require( "./Mock")
TickScheduler = require("./TickScheduler").TickScheduler
mock = Mocks.mock
mockFunction = Mocks.mockFunction
EventEmitter = require("events").EventEmitter
th = require("./SpecHelper")
t = th.t
expectStreamEvents = th.expectStreamEvents
expectPropertyEvents = th.expectPropertyEvents
verifyCleanup = th.verifyCleanup
error = th.error
soon = th.soon
series = th.series
repeat = th.repeat
toValues = th.toValues
sc = TickScheduler()
Bacon.scheduler = sc
# Some streams are unstable when testing with verifySwitching2.
# Generally, all flatMap-based streams are unstable because flatMap discards
# child streams on unsubscribe.
unstable = {unstable:true}
describe "Bacon._", ->
_ = Bacon._
describe "head", ->
expect(_.head([5,2,9])).to.equal(5)
expect(_.head([])).to.equal(undefined)
expect(_.head(5)).to.equal(undefined)
describe "always", -> expect(_.always(5)("francis")).to.equal(5)
describe "negate", ->
expect(_.negate(_.always(true))("timanttikobra")).to.be.false
describe "empty", ->
expect(_.empty([])).to.be.true
expect(_.empty("")).to.be.true
expect(_.empty([1])).to.be.false
expect(_.empty("1")).to.be.false
describe "tail", ->
expect(_.tail([1,2,3])).to.deep.equal([2,3])
expect(_.tail([1])).to.deep.equal([])
expect(_.tail([])).to.deep.equal([])
describe "filter", ->
expect(_.filter(_.empty, ["","1",[],[2]])).to.deep.equal(["",[]])
describe "map", ->
expect(_.map(_.head, [
[], [1], [2,2], [3,3,3]
])).to.deep.equal([
undefined, 1, 2, 3
])
describe "flatMap", ->
expect(_.flatMap(((x) -> [x, x]), [1,2,3])).to.deep.equal([1,1,2,2,3,3])
describe "each", ->
it "provides key and value to iterator", ->
expectKeyVals = (x, expectedKeys, expectedValues) ->
keys = []
values = []
_.each(x, (key, value) ->
keys.push(key)
values.push(value)
)
expect([keys, values]).to.deep.equal([expectedKeys, expectedValues])
expectKeyVals(
{cat:"furry",bird:"feathery"}, ["cat","bird"], ["furry","feathery"]
)
expectKeyVals([1,2,3], ["0","1","2"], [1,2,3])
describe "toArray", ->
expect(_.toArray(2)).to.deep.equal([2])
it "ignores rest of arguments", ->
expect(_.toArray(1,1,2)).to.deep.equal([1])
it "should, when given an array, return it back (not a copy)", ->
arr = []
expect(_.toArray(arr)).to.equal(arr)
describe "indexOf", ->
expect(_.indexOf([1,2], 1)).to.equal(0)
expect(_.indexOf([1,2], 2)).to.equal(1)
expect(_.indexOf([1,2], 3)).to.equal(-1)
describe "contains", ->
expect(_.contains("abc", "c")).to.be.true
expect(_.contains("abc", "x")).to.be.false
expect(_.contains([2,4,6], 4)).to.be.true
expect(_.contains([2,4,6], 3)).to.be.false
describe "id", ->
obj = {}
expect(_.id(obj)).to.equal(obj)
describe "last", ->
expect(_.last([2,4])).to.equal(4)
expect(_.last("last")).to.equal("t")
describe "all", ->
expect(_.all([ [false,true], [true,true] ], _.head)).to.be.false
expect(_.all([ [true,false], [true,true] ], _.head)).to.be.true
it "should test truthiness if no function given", ->
expect(_.all([true, false, true])).to.be.false
expect(_.all([true, true, true])).to.be.true
expect(_.all([1, true, 1])).to.be.true
describe "any", ->
expect(_.any([ [false,true], [true,true] ], _.head)).to.be.true
expect(_.any([ [false,false], [false,true] ], _.head)).to.be.false
it "should test truthiness if no function given", ->
expect(_.any([false, false, false])).to.be.false
expect(_.any([true, false, true])).to.be.true
describe "without", ->
expect(_.without("apple", ["bacon","apple","apple","omelette"]))
.to.deep.equal(["bacon","omelette"])
describe "remove", ->
expect(_.remove("apple", ["bacon","apple","apple","omelette"]))
.to.deep.equal(["apple"])
expect(_.remove("raisin", ["bacon","apple","apple","omelette"]))
.to.deep.equal(undefined)
describe "fold", ->
expect(_.fold([1,2,3,4,5], 0, (s, n) -> s + n)).to.equal(15)
describe "toString", ->
it "for booleans", ->
expect(_.toString(true)).to.equal("true")
it "for numbers", ->
expect(_.toString(1)).to.equal("1")
expect(_.toString(1.1)).to.equal("1.1")
it "for undefined and null", ->
expect(_.toString(undefined)).to.equal("undefined")
expect(_.toString(null)).to.equal("undefined")
it "for strings", ->
expect(_.toString("lol")).to.equal("lol")
it "for dates", ->
expect(_.toString(new Date(0))).to.contain("1970")
it "for arrays", ->
expect(_.toString([1,2,3])).to.equal("[1,2,3]")
it "for objects", ->
expect(_.toString({a: "b"})).to.equal("{a:b}")
expect(_.toString({a: "b", c: "d"})).to.equal("{a:b,c:d}")
it "for circular refs", ->
obj = { name : "nasty" }
obj.self = obj
expect(_.toString(obj).length).to.be.below(100)
it "works even when enumerable properties throw errors on access", ->
obj = { "name": "madcow" }
Object.defineProperty obj, "prop",
enumerable: true
get: ->
throw new Error "an error"
expect(_.toString(obj)).to.equal("{name:madcow,prop:Error: an error}")
describe "Bacon.later", ->
describe "should send single event and end", ->
expectStreamEvents(
-> Bacon.later(t(1), "lol")
["lol"])
describe "supports sending an Error event as well", ->
expectStreamEvents(
-> Bacon.later(t(1), new Bacon.Error("oops"))
[error()])
it "toString", ->
expect(Bacon.later(1, "wat").toString()).to.equal("Bacon.later(1,wat)")
it "inspect", ->
expect(Bacon.later(1, "wat").inspect()).to.equal("Bacon.later(1,wat)")
describe "Bacon.sequentially", ->
describe "should send given events and end", ->
expectStreamEvents(
-> Bacon.sequentially(t(1), ["lol", "wut"])
["lol", "wut"])
describe "include error events", ->
expectStreamEvents(
-> Bacon.sequentially(t(1), [error(), "lol"])
[error(), "lol"])
describe "will stop properly even when exception thrown by subscriber", ->
expectStreamEvents(
->
s = Bacon.sequentially(t(1), ["lol", "wut"])
s.onValue (value) ->
throw "testing"
s
[])
it "toString", ->
expect(Bacon.sequentially(1, [2]).toString()).to.equal("Bacon.sequentially(1,[2])")
describe "Bacon.repeatedly", ->
describe "repeats given sequence forever", ->
expectStreamEvents(
-> Bacon.repeatedly(1, [1,2]).take(5)
[1,2,1,2,1])
it "toString", ->
expect(Bacon.repeatedly(1, [1]).toString()).to.equal("Bacon.repeatedly(1,[1])")
describe "Bacon.interval", ->
describe "repeats single element indefinitely", ->
expectStreamEvents(
-> Bacon.interval(t(1), "x").take(3)
["x", "x", "x"])
it "toString", ->
expect(Bacon.interval(1, 2).toString()).to.equal("Bacon.interval(1,2)")
describe "Bacon.fromPoll", ->
describe "repeatedly polls given function for values", ->
expectStreamEvents(
-> Bacon.fromPoll(1, (-> "lol")).take(2)
["lol", "lol"])
it "toString", ->
expect(Bacon.fromPoll(1, (->)).toString()).to.equal("Bacon.fromPoll(1,function)")
testLiftedCallback = (src, liftedCallback) ->
input = [
Bacon.constant('a')
'x'
Bacon.constant('b').toProperty()
'y'
]
output = ['a', 'x', 'b', 'y']
expectStreamEvents(
-> liftedCallback(src, input...)
[output]
)
describe "Bacon.fromCallback", ->
describe "makes an EventStream from function that takes a callback", ->
expectStreamEvents(
->
src = (callback) -> callback("lol")
stream = Bacon.fromCallback(src)
["lol"])
describe "supports partial application", ->
expectStreamEvents(
->
src = (param, callback) -> callback(param)
stream = Bacon.fromCallback(src, "lol")
["lol"])
describe "supports partial application with Observable arguments", ->
testLiftedCallback(
(values..., callback) -> callback(values)
Bacon.fromCallback
)
describe "supports object, methodName, partial application", ->
expectStreamEvents(
->
src = {
"go": (param, callback) -> callback(param + " " + this.name)
"name": "bob"
}
stream = Bacon.fromCallback(src, "go", "hello")
["hello PI:NAME:<NAME>END_PI"])
it "toString", ->
expect(Bacon.fromCallback((->), "lol").toString()).to.equal("Bacon.fromCallback(function,lol)")
describe "Bacon.fromNodeCallback", ->
describe "makes an EventStream from function that takes a node-style callback", ->
expectStreamEvents(
->
src = (callback) -> callback(null, "lol")
stream = Bacon.fromNodeCallback(src)
["lol"])
describe "handles error parameter correctly", ->
expectStreamEvents(
->
src = (callback) -> callback('errortxt', null)
stream = Bacon.fromNodeCallback(src)
[error()])
describe "supports partial application", ->
expectStreamEvents(
->
src = (param, callback) -> callback(null, param)
stream = Bacon.fromNodeCallback(src, "lol")
["lol"])
describe "supports partial application with Observable arguments", ->
testLiftedCallback(
(values..., callback) -> callback(null, values)
Bacon.fromNodeCallback
)
describe "supports object, methodName, partial application", ->
expectStreamEvents(
->
src = {
"go": (param, callback) -> callback(null, param + " " + this.name)
"name": "PI:NAME:<NAME>END_PI"
}
stream = Bacon.fromNodeCallback(src, "go", "hello")
["hello PI:NAME:<NAME>END_PI"])
it "toString", ->
expect(Bacon.fromNodeCallback((->), "lol").toString()).to.equal("Bacon.fromNodeCallback(function,lol)")
# Wrap EventEmitter as EventTarget
toEventTarget = (emitter) ->
addEventListener: (event, handler) ->
emitter.addListener(event, handler)
removeEventListener: (event, handler) -> emitter.removeListener(event, handler)
describe "Bacon.fromEventTarget", ->
soon = (f) -> setTimeout f, 0
describe "should create EventStream from DOM object", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "click", "x"
element = toEventTarget emitter
Bacon.fromEventTarget(element, "click").take(1)
["x"]
)
describe "should create EventStream from EventEmitter", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "data", "x"
Bacon.fromEventTarget(emitter, "data").take(1)
["x"]
)
describe "should allow a custom map function for EventStream from EventEmitter", ->
expectStreamEvents(
->
emitter = new EventEmitter()
emitter.on "newListener", ->
soon -> emitter.emit "data", "x", "y"
Bacon.fromEventTarget(emitter, "data", (x, y) => [x, y]).take(1)
[["x", "y"]]
)
it "should clean up event listeners from EventEmitter", ->
emitter = new EventEmitter()
Bacon.fromEventTarget(emitter, "data").take(1).subscribe ->
emitter.emit "data", "x"
expect(emitter.listeners("data").length).to.deep.equal(0)
it "should clean up event listeners from DOM object", ->
emitter = new EventEmitter()
element = toEventTarget emitter
dispose = Bacon.fromEventTarget(element, "click").subscribe ->
dispose()
expect(emitter.listeners("click").length).to.deep.equal(0)
it "toString", ->
expect(Bacon.fromEventTarget({}, "click").toString()).to.equal("Bacon.fromEventTarget({},click)")
describe "Observable.log", ->
preservingLog = (f) ->
originalConsole = console
originalLog = console.log
try
f()
finally
global.console = originalConsole
console.log = originalLog
it "does not crash", ->
preservingLog ->
console.log = ->
Bacon.constant(1).log()
it "does not crash in case console.log is not defined", ->
preservingLog ->
console.log = undefined
Bacon.constant(1).log()
it "toString", ->
expect(Bacon.never().log().toString()).to.equal("Bacon.never()")
describe "Observable.slidingWindow", ->
describe "slides the window for EventStreams", ->
expectPropertyEvents(
-> series(1, [1,2,3]).slidingWindow(2)
[[], [1], [1,2], [2,3]])
describe "slides the window for Properties", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty().slidingWindow(2)
[[], [1], [1,2], [2,3]])
describe "accepts second parameter for minimum amount of values", ->
expectPropertyEvents(
-> series(1, [1,2,3,4]).slidingWindow(3, 2)
[[1,2], [1,2,3], [2,3,4]])
expectPropertyEvents(
-> series(1, [1,2,3,4]).toProperty(0).slidingWindow(3, 2)
[[0,1], [0, 1, 2], [1,2,3], [2,3,4]])
it "toString", ->
expect(Bacon.never().slidingWindow(2).toString()).to.equal("Bacon.never().slidingWindow(2,0)")
describe "EventStream.filter", ->
describe "should filter values", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3]).filter(lessThan(3))
[1, 2, error()])
describe "extracts field values", ->
expectStreamEvents(
-> series(1, [{good:true, value:"yes"}, {good:false, value:"no"}]).filter(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.filter(odd)
[1,1,3,7])
it "toString", ->
expect(Bacon.never().filter(false).toString()).to.equal("Bacon.never().filter(function)")
describe "EventStream.map", ->
describe "should map with given function", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).map(times, 2)
[2, 4, 6])
describe "also accepts a constant value", ->
expectStreamEvents(
-> series(1, [1, 2, 3,]).map("lol")
["lol", "lol", "lol"])
describe "extracts property from value object", ->
o = { lol : "wut" }
expectStreamEvents(
-> repeat(1, [o]).take(3).map(".lol")
["wut", "wut", "wut"])
describe "extracts a nested property too", ->
o = { lol : { wut : "wat" } }
expectStreamEvents(
-> Bacon.once(o).map(".lol.wut")
["wat"])
describe "in case of a function property, calls the function with no args", ->
expectStreamEvents(
-> Bacon.once([1,2,3]).map(".length")
[3])
describe "allows arguments for methods", ->
thing = { square: (x) -> x * x }
expectStreamEvents(
-> Bacon.once(thing).map(".square", 2)
[4])
describe "works with method call on given object, with partial application", ->
multiplier = { multiply: (x, y) -> x * y }
expectStreamEvents(
-> series(1, [1,2,3]).map(multiplier, "multiply", 2)
[2,4,6])
describe "can map to a Property value", ->
expectStreamEvents(
-> series(1, [1,2,3]).map(Bacon.constant(2))
[2,2,2])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.fromArray([1,2,3,4,5]).map(id).skip(4).onValue()
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.once(1).map(true).toString()).to.equal("Bacon.once(1).map(function)")
describe "EventStream.mapError", ->
describe "should map error events with given function", ->
expectStreamEvents(
-> repeat(1, [1, error("OOPS")]).mapError(id).take(2)
[1, "OOPS"])
describe "also accepts a constant value", ->
expectStreamEvents(
-> repeat(1, [1, error()]).mapError("ERR").take(2)
[1, "ERR"])
it "toString", ->
expect(Bacon.never().mapError(true).toString()).to.equal("Bacon.never().mapError(function)")
describe "EventStream.doAction", ->
it "calls function before sending value to listeners", ->
called = []
bus = new Bacon.Bus()
s = bus.doAction((x) -> called.push(x))
s.onValue(->)
s.onValue(->)
bus.push(1)
expect(called).to.deep.equal([1])
describe "does not alter the stream", ->
expectStreamEvents(
-> series(1, [1, 2]).doAction(->)
[1, 2])
it "toString", ->
expect(Bacon.never().doAction((->)).toString()).to.equal("Bacon.never().doAction(function)")
describe "EventStream.mapEnd", ->
describe "produces an extra element on stream end", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd("the end")
["1", error(), "the end"])
describe "accepts either a function or a constant value", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd(-> "the end")
["1", error(), "the end"])
describe "works with undefined value as well", ->
expectStreamEvents(
-> series(1, ["1", error()]).mapEnd()
["1", error(), undefined])
it "toString", ->
expect(Bacon.never().mapEnd(true).toString()).to.equal("Bacon.never().mapEnd(function)")
describe "EventStream.take", ->
describe "takes N first elements", ->
expectStreamEvents(
-> series(1, [1,2,3,4]).take(2)
[1,2])
describe "works with N=0", ->
expectStreamEvents(
-> series(1, [1,2,3,4]).take(0)
[])
describe "will stop properly even when exception thrown by subscriber", ->
expectStreamEvents(
->
s = Bacon.repeatedly(t(1), ["lol", "wut"]).take(2)
s.onValue (value) ->
throw "testing"
s
[])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3,4]).take(2)
[1,2])
it "toString", ->
expect(Bacon.never().take(1).toString()).to.equal("Bacon.never().take(1)")
describe "EventStream.takeWhile", ->
describe "takes while predicate is true", ->
expectStreamEvents(
-> repeat(1, [1, error("wat"), 2, 3]).takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "extracts field values", ->
expectStreamEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.takeWhile(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.takeWhile(odd)
[1,1])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).takeWhile(lessThan(3))
[1, 2])
it "toString", ->
expect(Bacon.never().takeWhile(true).toString()).to.equal("Bacon.never().takeWhile(function)")
describe "EventStream.skip", ->
describe "should skip first N items", ->
expectStreamEvents(
-> series(1, [1, error(), 2, error(), 3]).skip(1)
[error(), 2, error(), 3])
describe "accepts N <= 0", ->
expectStreamEvents(
-> series(1, [1, 2]).skip(-1)
[1, 2])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).skip(1)
[2, 3])
it "toString", ->
expect(Bacon.never().skip(1).toString()).to.equal("Bacon.never().skip(1)")
describe "EventStream.skipWhile", ->
describe "skips filter predicate holds true", ->
expectStreamEvents(
-> series(1, [1, error(), 2, error(), 3, 2]).skipWhile(lessThan(3))
[error(), error(), 3, 2])
describe "extracts field values", ->
expectStreamEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.skipWhile(".good").map(".value")
["no"])
describe "can filter by Property value", ->
expectStreamEvents(
->
src = series(1, [1,1,2,3,4,4,8,7])
odd = src.map((x) -> x % 2).toProperty()
src.skipWhile(odd)
[2,3,4,4,8,7])
describe "for synchronous sources", ->
describe "skips filter predicate holds true", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 2]).skipWhile(lessThan(3))
[3, 2])
it "toString", ->
expect(Bacon.never().skipWhile(1).toString()).to.equal("Bacon.never().skipWhile(function)")
describe "EventStream.skipUntil", ->
describe "skips events until one appears in given starter stream", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
src.onValue(->) # to start "time" immediately instead of on subscribe
starter = series(4, ["start"])
src.skipUntil(starter)
[2,3])
describe "works with self-derived starter", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
starter = src.filter((x) -> x == 3)
src.skipUntil(starter)
[3])
describe "works with self-derived starter with an evil twist", ->
expectStreamEvents(
->
src = series(3, [1,2,3])
data = src.map((x) -> x)
data.onValue(->)
starter = src.filter((x) -> x == 3)
data.skipUntil(starter)
[3])
it "toString", ->
expect(Bacon.never().skipUntil(Bacon.once(1)).toString()).to.equal("Bacon.never().skipUntil(Bacon.once(1))")
describe "EventStream.skipDuplicates", ->
it "Drops duplicates with subscribers with non-overlapping subscription time (#211)", ->
b = new Bacon.Bus()
noDups = b.skipDuplicates()
round = (expected) ->
values = []
noDups.take(1).onValue (x) -> values.push(x)
b.push 1
expect(values).to.deep.equal(expected)
round([1])
round([])
round([])
describe "drops duplicates", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 2, 3, 1]).skipDuplicates()
[1, 2, error(), 3, 1])
describe "allows undefined as initial value", ->
expectStreamEvents(
-> series(1, [undefined, undefined, 1, 2]).skipDuplicates()
[undefined, 1, 2])
describe "works with custom isEqual function", ->
a = {x: 1}; b = {x: 2}; c = {x: 2}; d = {x: 3}; e = {x: 1}
isEqual = (a, b) -> a?.x == b?.x
expectStreamEvents(
-> series(1, [a, b, error(), c, d, e]).skipDuplicates(isEqual)
[a, b, error(), d, e])
describe "works with synchrounous sources", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 2, 3, 1]).skipDuplicates()
[1, 2, 3, 1], unstable)
it "toString", ->
expect(Bacon.never().skipDuplicates().toString()).to.equal("Bacon.never().skipDuplicates()")
describe "EventStream.flatMap", ->
describe "should spawn new stream for each value and collect results into a single stream", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.sequentially(t(2), [value, error(), value])
[1, 2, error(), error(), 1, 2], unstable)
describe "should pass source errors through to the result", ->
expectStreamEvents(
-> series(1, [error(), 1]).flatMap (value) ->
Bacon.later(t(1), value)
[error(), 1])
describe "should work with a spawned stream responding synchronously", ->
expectStreamEvents(
-> series(1, [1, 2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value))
[1, 2], unstable)
expectStreamEvents(
-> series(1, [1,2]).flatMap (value) ->
Bacon.never().concat(Bacon.once(value)).concat(Bacon.once("lol"))
[1, "lol", 2, "lol"], unstable)
describe "should work with a source stream responding synchronously", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2]).flatMap (value) ->
Bacon.once(value)
[1, 2])
expectStreamEvents(
-> Bacon.fromArray([1, 2]).flatMap (value) ->
Bacon.fromArray([value, value*10])
[1, 10, 2, 20])
expectStreamEvents(
-> Bacon.once(1).flatMap (value) ->
Bacon.later(0, value)
[1])
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap(Bacon.constant)
[1,2], unstable)
describe "Works also when f returns a constant value instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> x)
[1,2], unstable)
describe "Works also when f returns an Error instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).flatMap((x) -> new Bacon.Error(x))
[new Bacon.Error(1), new Bacon.Error(2)], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.constant("bacon"))
["bacon"])
expectStreamEvents(
-> Bacon.once("asdf").flatMap(Bacon.once("bacon"))
["bacon"])
describe "Respects function construction rules", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("PI:NAME:<NAME>END_PI")}).flatMap(".bacon")
["PI:NAME:<NAME>END_PI"])
expectStreamEvents(
-> Bacon.once({ bacon: "PI:NAME:<NAME>END_PI"}).flatMap(".bacon")
["PI:NAME:<NAME>END_PI"])
expectStreamEvents(
->
glorify = (x, y) -> Bacon.fromArray([x, y])
Bacon.once("francis").flatMap(glorify, "sir")
["sir", "frPI:NAME:<NAME>END_PI"])
it "toString", ->
expect(Bacon.never().flatMap(->).toString()).to.equal("Bacon.never().flatMap(function)")
describe "Property.flatMap", ->
describe "should spawn new stream for all events including Init", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
series(1, [1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
describe "Works also when f returns a Property instead of an EventStream", ->
expectStreamEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant)
[1,2], unstable)
expectPropertyEvents(
-> series(1, [1,2]).toProperty().flatMap(Bacon.constant).toProperty()
[1,2], unstable)
describe "works for synchronous source", ->
expectStreamEvents(
->
once = (x) -> Bacon.once(x)
Bacon.fromArray([1, 2]).toProperty(0).flatMap(once)
[0, 1, 2], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMap(->).toString()).to.equal("Bacon.constant(1).flatMap(function)")
describe "EventStream.flatMapLatest", ->
describe "spawns new streams but collects values from the latest spawned stream only", ->
expectStreamEvents(
-> series(3, [1, 2]).flatMapLatest (value) ->
Bacon.sequentially(t(2), [value, error(), value])
[1, 2, error(), 2], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.once("asdf").flatMapLatest(Bacon.constant("bacon"))
["bacon"], unstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("PI:NAME:<NAME>END_PI")}).flatMapLatest(".bacon")
["PI:NAME:<NAME>END_PI"])
expectStreamEvents(
-> Bacon.once({ bacon: "PI:NAME:<NAME>END_PI"}).flatMapLatest(".bacon")
["PI:NAME:<NAME>END_PI"])
it "toString", ->
expect(Bacon.never().flatMapLatest(->).toString()).to.equal("Bacon.never().flatMapLatest(function)")
describe "Property.flatMapLatest", ->
describe "spawns new streams but collects values from the latest spawned stream only", ->
expectStreamEvents(
-> series(3, [1, 2]).toProperty(0).flatMapLatest (value) ->
Bacon.sequentially(t(2), [value, value])
[0, 1, 2, 2], unstable)
describe "Accepts a constant EventStream/Property as an alternative to a function", ->
expectStreamEvents(
-> Bacon.constant("asdf").flatMapLatest(Bacon.constant("bacon"))
["bacon"], unstable)
it "toString", ->
expect(Bacon.constant(1).flatMapLatest(->).toString()).to.equal("Bacon.constant(1).flatMapLatest(function)")
describe "EventStream.flatMapFirst", ->
describe "spawns new streams and ignores source events until current spawned stream has ended", ->
expectStreamEvents(
-> series(2, [2, 4, 6, 8]).flatMapFirst (value) ->
series(1, ["a" + value, "b" + value, "c" + value])
["a2", "b2", "c2", "a6", "b6", "c6"], unstable)
describe "Accepts a field extractor string instead of function", ->
expectStreamEvents(
-> Bacon.once({ bacon: Bacon.once("PI:NAME:<NAME>END_PI")}).flatMapFirst(".bacon")
["PI:NAME:<NAME>END_PI"])
expectStreamEvents(
-> Bacon.once({ bacon: "PI:NAME:<NAME>END_PI"}).flatMapFirst(".bacon")
["PI:NAME:<NAME>END_PI"])
it "toString", ->
expect(Bacon.never().flatMapFirst(->).toString()).to.equal("Bacon.never().flatMapFirst(function)")
describe "EventStream.merge", ->
describe "merges two streams and ends when both are exhausted", ->
expectStreamEvents(
->
left = series(1, [1, error(), 2, 3])
right = series(1, [4, 5, 6]).delay(t(4))
left.merge(right)
[1, error(), 2, 3, 4, 5, 6], unstable)
describe "respects subscriber return value", ->
expectStreamEvents(
->
left = repeat(2, [1, 3]).take(3)
right = repeat(3, [2]).take(3)
left.merge(right).takeWhile(lessThan(2))
[1])
describe "does not duplicate same error from two streams", ->
expectStreamEvents(
->
src = series(1, [1, error(), 2, error(), 3])
left = src.map((x) -> x)
right = src.map((x) -> x * 2)
left.merge(right)
[1, 2, error(), 2, 4, error(), 3, 6])
describe "works with synchronous sources", ->
expectStreamEvents(
-> Bacon.fromArray([1,2]).merge(Bacon.fromArray([3,4]))
[1,2,3,4])
it "toString", ->
expect(Bacon.once(1).merge(Bacon.once(2)).toString()).to.equal("Bacon.once(1).merge(Bacon.once(2))")
describe "EventStream.delay", ->
describe "delays all events (except errors) by given delay in milliseconds", ->
expectStreamEvents(
->
left = series(2, [1, 2, 3])
right = series(1, [error(), 4, 5, 6]).delay(t(6))
left.merge(right)
[error(), 1, 2, 3, 4, 5, 6], unstable)
describe "works with synchronous streams", ->
expectStreamEvents(
->
left = Bacon.fromArray([1, 2, 3])
right = Bacon.fromArray([4, 5, 6]).delay(t(6))
left.merge(right)
[1, 2, 3, 4, 5, 6], unstable)
it "toString", ->
expect(Bacon.never().delay(1).toString()).to.equal("Bacon.never().delay(1)")
describe "EventStream.debounce", ->
describe "throttles input by given delay, passing-through errors", ->
expectStreamEvents(
-> series(2, [1, error(), 2]).debounce(t(7))
[error(), 2])
describe "waits for a quiet period before outputing anything", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3, 4]).debounce(t(3))
[[11, 4]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 4]).debounce(t(3))
[4])
describe "works in combination with scan", ->
count = 0
expectPropertyEvents(
-> series(2, [1,2,3]).debounce(1).scan(0, (x,y) -> count++; x + y)
[0, 1, 3, 6]
)
it "calls accumulator once per value", ->
expect(count).to.equal(3)
it "toString", ->
expect(Bacon.never().debounce(1).toString()).to.equal("Bacon.never().debounce(1)")
describe "EventStream.debounceImmediate(delay)", ->
describe "outputs first event immediately, then ignores events for given amount of milliseconds", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3, 4]).debounceImmediate(t(3))
[[2, 1], [6, 3]], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3, 4]).debounceImmediate(t(3))
[1])
it "toString", ->
expect(Bacon.never().debounceImmediate(1).toString()).to.equal("Bacon.never().debounceImmediate(1)")
describe "EventStream.throttle(delay)", ->
describe "outputs at steady intervals, without waiting for quiet period", ->
th.expectStreamTimings(
-> series(2, [1, 2, 3]).throttle(t(3))
[[5, 2], [8, 3]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).throttle(t(3))
[3])
it "toString", ->
expect(Bacon.never().throttle(1).toString()).to.equal("Bacon.never().throttle(1)")
describe "EventStream.bufferWithTime", ->
describe "returns events in bursts, passing through errors", ->
expectStreamEvents(
-> series(2, [error(), 1, 2, 3, 4, 5, 6, 7]).bufferWithTime(t(7))
[error(), [1, 2, 3, 4], [5, 6, 7]])
describe "keeps constant output rate even when input is sporadical", ->
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [3, "b"], [5, "c"]]).bufferWithTime(t(2))
[[2, ["a"]], [4, ["b"]], [6, ["c"]]]
unstable
)
describe "works with empty stream", ->
expectStreamEvents(
-> Bacon.never().bufferWithTime(t(1))
[])
describe "allows custom defer-function", ->
fast = (f) -> sc.setTimeout(f, 0)
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [2, "b"]]).bufferWithTime(fast)
[[0, ["a"]], [2, ["b"]]])
describe "works with synchronous defer-function", ->
sync = (f) -> f()
th.expectStreamTimings(
-> th.atGivenTimes([[0, "a"], [2, "b"]]).bufferWithTime(sync)
[[0, ["a"]], [2, ["b"]]])
describe "works with synchronous source", ->
expectStreamEvents(
-> series(2, [1,2,3]).bufferWithTime(t(7))
[[1,2,3]])
it "toString", ->
expect(Bacon.never().bufferWithTime(1).toString()).to.equal("Bacon.never().bufferWithTime(1)")
describe "EventStream.bufferWithCount", ->
describe "returns events in chunks of fixed size, passing through errors", ->
expectStreamEvents(
-> series(1, [1, 2, 3, error(), 4, 5]).bufferWithCount(2)
[[1, 2], error(), [3, 4], [5]])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3,4,5]).bufferWithCount(2)
[[1, 2], [3, 4], [5]])
it "toString", ->
expect(Bacon.never().bufferWithCount(1).toString()).to.equal("Bacon.never().bufferWithCount(1)")
describe "EventStream.bufferWithTimeOrCount", ->
describe "flushes on count", ->
expectStreamEvents(
-> series(1, [1, 2, 3, error(), 4, 5]).bufferWithTimeOrCount(t(10), 2)
[[1, 2], error(), [3, 4], [5]])
describe "flushes on timeout", ->
expectStreamEvents(
-> series(2, [error(), 1, 2, 3, 4, 5, 6, 7]).bufferWithTimeOrCount(t(7), 10)
[error(), [1, 2, 3, 4], [5, 6, 7]])
it "toString", ->
expect(Bacon.never().bufferWithTimeOrCount(1, 2).toString()).to.equal("Bacon.never().bufferWithTimeOrCount(1,2)")
describe "EventStream.takeUntil", ->
describe "takes elements from source until an event appears in the other stream", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = repeat(7, ["stop!"])
src.takeUntil(stopper)
[1, 2], unstable)
describe "works on self-derived stopper", ->
expectStreamEvents(
->
src = repeat(3, [3, 2, 1])
stopper = src.filter(lessThan(3))
src.takeUntil(stopper)
[3])
describe "works on self-derived stopper with an evil twist", ->
expectStreamEvents(
->
src = repeat(3, [3, 2, 1])
data = src.map((x) -> x)
data.take(3).onValue(->)
stopper = src.filter(lessThan(3))
data.takeUntil(stopper)
[3])
describe "includes source errors, ignores stopper errors", ->
expectStreamEvents(
->
src = repeat(2, [1, error(), 2, 3])
stopper = repeat(7, ["stop!"]).merge(repeat(1, [error()]))
src.takeUntil(stopper)
[1, error(), 2], unstable)
describe "works with Property as stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = repeat(7, ["stop!"]).toProperty()
src.takeUntil(stopper)
[1, 2], unstable)
describe "considers Property init value as stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = Bacon.constant("stop")
src.takeUntil(stopper)
[])
describe "ends immediately with synchronous stopper", ->
expectStreamEvents(
->
src = repeat(3, [1, 2, 3])
stopper = Bacon.once("stop")
src.takeUntil(stopper)
[])
describe "ends properly with a never-ending stopper", ->
expectStreamEvents(
->
src = series(1, [1,2,3])
stopper = new Bacon.Bus()
src.takeUntil(stopper)
[1,2,3])
describe "ends properly with a never-ending stopper and synchronous source", ->
expectStreamEvents(
->
src = Bacon.fromArray([1,2,3]).mapEnd("finito")
stopper = new Bacon.Bus()
src.takeUntil(stopper)
[1,2,3, "finito"])
describe "unsubscribes its source as soon as possible", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(20)
.onUnsub(->
expect(sc.now()).to.equal(startTick + 1))
.takeUntil Bacon.later(1)
[])
describe "it should unsubscribe its stopper on end", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(1,'x').takeUntil(Bacon.later(20).onUnsub(->
expect(sc.now()).to.equal(startTick + 1)))
['x'])
describe "it should unsubscribe its stopper on no more", ->
expectStreamEvents(
->
startTick = sc.now()
Bacon.later(1,'x').takeUntil(Bacon.later(20).onUnsub(->
expect(sc.now()).to.equal(startTick + 1)))
['x'])
### TODO does not pass
describe "works with synchronous self-derived sources", ->
expectStreamEvents(
->
a = Bacon.fromArray [1,2]
b = a.filter((x) -> x >= 2)
a.takeUntil b
[1])
###
it "toString", ->
expect(Bacon.later(1, "a").takeUntil(Bacon.later(2, "b")).toString()).to.equal("Bacon.later(1,a).takeUntil(Bacon.later(2,b))")
describe "When an Event triggers another one in the same stream, while dispatching", ->
it "Delivers triggered events correctly", ->
bus = new Bacon.Bus
values = []
bus.take(2).onValue (v) ->
bus.push "A"
bus.push "B"
bus.onValue (v) ->
values.push(v)
bus.push "a"
bus.push "b"
expect(values).to.deep.equal(["a", "A", "B", "A", "B", "b"])
it "EventStream.take(1) works correctly (bug fix)", ->
bus = new Bacon.Bus
values = []
bus.take(1).onValue (v) ->
bus.push("onValue triggers a side-effect here")
values.push(v)
bus.push("foo")
expect(values).to.deep.equal(["foo"])
describe "EventStream.awaiting(other)", ->
describe "indicates whether s1 has produced output after s2 (or only the former has output so far)", ->
expectPropertyEvents(
-> series(2, [1, 1]).awaiting(series(3, [2]))
[false, true, false, true])
describe "supports Properties", ->
expectPropertyEvents(
-> series(2, [1, 1]).awaiting(series(3, [2]).toProperty())
[false, true, false, true])
describe "supports simultaneouts events", ->
expectPropertyEvents(
->
src = Bacon.later(1, 1)
src.awaiting(src.map(->))
[false])
expectPropertyEvents(
->
src = Bacon.later(1, 1)
src.map(->).awaiting(src)
[false])
it "toString", ->
expect(Bacon.never().awaiting(Bacon.once(1)).toString()).to.equal("Bacon.never().awaiting(Bacon.once(1))")
describe "EventStream.endOnError", ->
describe "terminates on error", ->
expectStreamEvents(
-> repeat(1, [1, 2, error(), 3]).endOnError()
[1, 2, error()])
describe "accepts predicate function", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3, new Bacon.Error({serious:true}), 4]).endOnError((e) -> e?.serious)
[1,2,error(),3,error()])
describe "accepts extractor string", ->
expectStreamEvents(
-> series(1, [1, 2, error(), 3, new Bacon.Error({serious:true}), 4]).endOnError(".serious")
[1,2,error(),3,error()])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, error(), 3]).endOnError()
[1, 2, error()])
it "toString", ->
expect(Bacon.never().endOnError().toString()).to.equal("Bacon.never().endOnError()")
describe "Bacon.constant", ->
describe "creates a constant property", ->
expectPropertyEvents(
-> Bacon.constant("lol")
["lol"])
it "ignores unsubscribe", ->
Bacon.constant("lol").onValue(=>)()
describe "provides same value to all listeners", ->
c = Bacon.constant("lol")
expectPropertyEvents((-> c), ["lol"])
it "check check", ->
f = mockFunction()
c.onValue(f)
f.verify("lol")
it "provides same value to all listeners, when mapped (bug fix)", ->
c = Bacon.constant("lol").map(id)
f = mockFunction()
c.onValue(f)
f.verify("lol")
c.onValue(f)
f.verify("lol")
it "toString", ->
expect(Bacon.constant(1).toString()).to.equal("Bacon.constant(1)")
describe "Bacon.never", ->
describe "should send just end", ->
expectStreamEvents(
-> Bacon.never()
[])
describe "Bacon.once", ->
describe "should send single event and end", ->
expectStreamEvents(
-> Bacon.once("pow")
["pow"])
describe "accepts an Error event as parameter", ->
expectStreamEvents(
-> Bacon.once(new Bacon.Error("oop"))
[error()])
describe "Allows wrapped events, for instance, Bacon.Error", ->
expectStreamEvents(
-> Bacon.once(error())
[error()])
describe "Bacon.fromArray", ->
describe "Turns an empty array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([])
[])
describe "Turns a single-element array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([1])
[1])
describe "Turns a longer array into an EventStream", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3])
[1, 2, 3])
describe "Allows wrapped events, for instance, Bacon.Error", ->
expectStreamEvents(
-> Bacon.fromArray([error(), 1])
[error(), 1])
describe "EventStream.concat", ->
describe "provides values from streams in given order and ends when both are exhausted", ->
expectStreamEvents(
->
left = series(2, [1, error(), 2, 3])
right = series(1, [4, 5, 6])
left.concat(right)
[1, error(), 2, 3, 4, 5, 6], unstable)
describe "respects subscriber return value when providing events from left stream", ->
expectStreamEvents(
->
left = repeat(3, [1, 3]).take(3)
right = repeat(2, [1]).take(3)
left.concat(right).takeWhile(lessThan(2))
[1])
describe "respects subscriber return value when providing events from right stream", ->
expectStreamEvents(
->
left = series(3, [1, 2])
right = series(2, [2, 4, 6])
left.concat(right).takeWhile(lessThan(4))
[1, 2, 2])
describe "works with Bacon.never()", ->
expectStreamEvents(
-> Bacon.never().concat(Bacon.never())
[])
describe "works with Bacon.once()", ->
expectStreamEvents(
-> Bacon.once(2).concat(Bacon.once(1))
[2, 1])
describe "works with Bacon.once() and Bacon.never()", ->
expectStreamEvents(
-> Bacon.once(1).concat(Bacon.never())
[1])
describe "works with Bacon.never() and Bacon.once()", ->
expectStreamEvents(
-> Bacon.never().concat(Bacon.once(1))
[1])
describe "works with Bacon.once() and async source", ->
expectStreamEvents(
-> Bacon.once(1).concat(series(1, [2, 3]))
[1, 2, 3])
describe "works with Bacon.once() and Bacon.fromArray()", ->
expectStreamEvents(
-> Bacon.once(1).concat(Bacon.fromArray([2, 3]))
[1, 2, 3], unstable)
describe "Works with synchronized left stream and doAction", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
stream = Bacon.fromArray([1,2]).flatMapLatest (x) ->
Bacon.once(x).concat(Bacon.later(10, x).doAction((x) -> bus.push(x); bus.end()))
stream.onValue ->
bus
[2])
it "toString", ->
expect(Bacon.once(1).concat(Bacon.once(2)).toString()).to.equal("Bacon.once(1).concat(Bacon.once(2))")
describe "EventStream.startWith", ->
describe "provides seed value, then the rest", ->
expectStreamEvents(
->
left = series(1, [1, 2, 3])
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
->
left = Bacon.fromArray([1, 2, 3])
left.startWith('pow')
['pow', 1, 2, 3], unstable)
it "toString", ->
expect(Bacon.never().startWith(0).toString()).to.equal("Bacon.never().startWith(0)")
describe "Property.startWith", ->
describe "starts with given value if the Property doesn't have an initial value", ->
expectPropertyEvents(
->
left = series(1, [1, 2, 3]).toProperty()
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "works with synchronous source", ->
expectPropertyEvents(
->
left = Bacon.fromArray([1, 2, 3]).toProperty()
left.startWith('pow')
['pow', 1, 2, 3], unstable)
describe "starts with the initial value of the Property if any", ->
expectPropertyEvents(
->
left = series(1, [1, 2, 3]).toProperty(0)
left.startWith('pow')
[0, 1, 2, 3], unstable)
describe "works with combineAsArray", ->
result = null
a = Bacon.constant("lolbal")
result = Bacon.combineAsArray([a.map(true), a.map(true)]).map("right").startWith("wrong")
result.onValue((x) -> result = x)
expect(result).to.equal("right")
it "toString", ->
expect(Bacon.constant(2).startWith(1).toString()).to.equal("Bacon.constant(2).startWith(1)")
describe "EventStream.toProperty", ->
describe "delivers current value and changes to subscribers", ->
expectPropertyEvents(
->
s = new Bacon.Bus()
p = s.toProperty("a")
soon ->
s.push "b"
s.end()
p
["a", "b"])
describe "passes through also Errors", ->
expectPropertyEvents(
-> series(1, [1, error(), 2]).toProperty()
[1, error(), 2])
describe "supports null as value", ->
expectPropertyEvents(
-> series(1, [null, 1, null]).toProperty(null)
[null, null, 1, null])
describe "does not get messed-up by a transient subscriber (bug fix)", ->
expectPropertyEvents(
->
prop = series(1, [1,2,3]).toProperty(0)
prop.subscribe (event) =>
Bacon.noMore
prop
[0, 1, 2, 3])
describe "works with synchronous source", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty()
[1,2,3])
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty(0)
[0,1,2,3])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.fromArray([1,2,3,4,5]).map(id).toProperty().skip(4).onValue()
expect(calls).to.equal(1)
describe "Property.toEventStream", ->
describe "creates a stream that starts with current property value", ->
expectStreamEvents(
-> series(1, [1, 2]).toProperty(0).toEventStream()
[0, 1, 2], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2]).toProperty(0).toEventStream()
[0, 1, 2], unstable)
describe "Property.toProperty", ->
describe "returns the same Property", ->
expectPropertyEvents(
-> Bacon.constant(1).toProperty()
[1])
it "rejects arguments", ->
try
Bacon.constant(1).toProperty(0)
fail()
catch e
describe "Property.map", ->
describe "maps property values", ->
expectPropertyEvents(
->
s = new Bacon.Bus()
p = s.toProperty(1).map(times, 2)
soon ->
s.push 2
s.error()
s.end()
p
[2, 4, error()])
describe "Property.filter", ->
describe "should filter values", ->
expectPropertyEvents(
-> series(1, [1, error(), 2, 3]).toProperty().filter(lessThan(3))
[1, error(), 2])
it "preserves old current value if the updated value is non-matching", ->
s = new Bacon.Bus()
p = s.toProperty().filter(lessThan(2))
p.onValue(=>) # to ensure that property is actualy updated
s.push(1)
s.push(2)
values = []
p.onValue((v) => values.push(v))
expect(values).to.deep.equal([1])
describe "can filter by Property value", ->
expectPropertyEvents(
->
src = series(2, [1, 2, 3, 4]).delay(t(1)).toProperty()
ok = series(2, [false, true, true, false]).toProperty()
src.filter(ok)
[2, 3])
describe "Property.take(1)", ->
describe "takes the Initial event", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).take(1)
[0])
describe "takes the first Next event, if no Initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty().take(1)
[1])
describe "works for constants", ->
expectPropertyEvents(
-> Bacon.constant(1)
[1])
describe "works for never-ending Property", ->
expectPropertyEvents(
-> repeat(1, [1,2,3]).toProperty(0).take(1)
[0])
expectPropertyEvents(
-> repeat(1, [1,2,3]).toProperty().take(1)
[1])
describe "Bacon.once().take(1)", ->
describe "works", ->
expectStreamEvents(
-> Bacon.once(1).take(1)
[1])
describe "Property.takeWhile", ->
describe "takes while predicate is true", ->
expectPropertyEvents(
->
series(1, [1, error("wat"), 2, 3])
.toProperty().takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "extracts field values", ->
expectPropertyEvents(
->
series(1, [{good:true, value:"yes"}, {good:false, value:"no"}])
.toProperty().takeWhile(".good").map(".value")
["yes"])
describe "can filter by Property value", ->
expectPropertyEvents(
->
src = series(1, [1,1,2,3,4,4,8,7]).toProperty()
odd = src.map((x) -> x % 2)
src.takeWhile(odd)
[1,1])
describe "works with never-ending Property", ->
expectPropertyEvents(
->
repeat(1, [1, error("wat"), 2, 3])
.toProperty().takeWhile(lessThan(3))
[1, error("wat"), 2])
describe "Property.takeUntil", ->
describe "takes elements from source until an event appears in the other stream", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty().takeUntil(Bacon.later(t(3)))
[1])
describe "works with errors", ->
expectPropertyEvents(
->
src = repeat(2, [1, error(), 3])
stopper = repeat(5, ["stop!"])
src.toProperty(0).takeUntil(stopper)
[0, 1, error()])
it "toString", ->
expect(Bacon.constant(1).takeUntil(Bacon.never()).toString()).to.equal("Bacon.constant(1).takeUntil(Bacon.never())")
describe "Property.delay", ->
describe "delivers initial value and changes", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).delay(t(1))
[0,1,2,3])
describe "delays changes", ->
expectStreamEvents(
->
series(2, [1,2,3])
.toProperty()
.delay(t(2)).changes().takeUntil(Bacon.later(t(5)))
[1], unstable)
describe "does not delay initial value", ->
expectPropertyEvents(
-> series(3, [1]).toProperty(0).delay(1).takeUntil(Bacon.later(t(2)))
[0])
it "toString", ->
expect(Bacon.constant(0).delay(1).toString()).to.equal("Bacon.constant(0).delay(1)")
describe "Property.debounce", ->
describe "delivers initial value and changes", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty(0).debounce(t(1))
[0,1,2,3])
describe "throttles changes, but not initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).debounce(t(4))
[0,3])
describe "works without initial value", ->
expectPropertyEvents(
-> series(2, [1,2,3]).toProperty().debounce(t(4))
[3])
describe "works with Bacon.constant (bug fix)", ->
expectPropertyEvents(
-> Bacon.constant(1).debounce(1)
[1])
it "toString", ->
expect(Bacon.constant(0).debounce(1).toString()).to.equal("Bacon.constant(0).debounce(1)")
describe "Property.throttle", ->
describe "throttles changes, but not initial value", ->
expectPropertyEvents(
-> series(1, [1,2,3]).toProperty(0).throttle(t(4))
[0,3])
describe "works with Bacon.once (bug fix)", ->
expectPropertyEvents(
-> Bacon.once(1).toProperty().throttle(1)
[1])
it "toString", ->
expect(Bacon.constant(0).throttle(1).toString()).to.equal("Bacon.constant(0).throttle(1)")
describe "Property.endOnError", ->
describe "terminates on Error", ->
expectPropertyEvents(
-> series(2, [1, error(), 2]).toProperty().endOnError()
[1, error()])
describe "Property.awaiting(other)", ->
describe "indicates whether p1 has produced output after p2 (or only the former has output so far)", ->
expectPropertyEvents(
-> series(2, [1, 1]).toProperty().awaiting(series(3, [2]))
[false, true, false, true])
describe "Property.skipDuplicates", ->
describe "drops duplicates", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 2, 3, 1]).toProperty(0).skipDuplicates()
[0, 1, 2, error(), 3, 1])
describe "Doesn't skip initial value (bug fix #211)", ->
b = new Bacon.Bus()
p = b.toProperty()
p.onValue -> # force property update
s = p.skipDuplicates()
b.push 'foo'
describe "series 1", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "series 2", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "series 3", ->
expectPropertyEvents((-> s.take(1)), ["foo"])
describe "Property.changes", ->
describe "sends property change events", ->
expectStreamEvents(
->
s = new Bacon.Bus()
p = s.toProperty("a").changes()
soon ->
s.push "b"
s.error()
s.end()
p
["b", error()])
describe "works with synchronous source", ->
expectStreamEvents(
-> Bacon.fromArray([1, 2, 3]).toProperty(0).changes()
[1, 2, 3])
describe "Property.combine", ->
describe "combines latest values of two properties, with given combinator function, passing through errors", ->
expectPropertyEvents(
->
left = series(2, [1, error(), 2, 3]).toProperty()
right = series(2, [4, error(), 5, 6]).delay(t(1)).toProperty()
left.combine(right, add)
[5, error(), error(), 6, 7, 8, 9])
describe "also accepts a field name instead of combinator function", ->
expectPropertyEvents(
->
left = series(1, [[1]]).toProperty()
right = series(2, [[2]]).toProperty()
left.combine(right, ".concat")
[[1, 2]])
describe "combines with null values", ->
expectPropertyEvents(
->
left = series(1, [null]).toProperty()
right = series(1, [null]).toProperty()
left.combine(right, (l, r)-> [l, r])
[[null, null]])
it "unsubscribes when initial value callback returns Bacon.noMore", ->
calls = 0
bus = new Bacon.Bus()
other = Bacon.constant(["rolfcopter"])
bus.toProperty(["lollerskates"]).combine(other, ".concat").subscribe (e) ->
if !e.isInitial()
calls += 1
Bacon.noMore
bus.push(["fail whale"])
expect(calls).to.equal 0
describe "does not duplicate same error from two streams", ->
expectPropertyEvents(
->
src = series(1, ["same", error()])
Bacon.combineAsArray(src, src)
[["same", "same"], error()])
it "toString", ->
expect(Bacon.constant(1).combine(Bacon.constant(2), (->)).toString()).to.equal("Bacon.constant(1).combine(Bacon.constant(2),function)")
describe "with random methods on Array.prototype", ->
it "doesn't throw exceptions", ->
try
Array.prototype.foo = "bar"
events = []
Bacon.once("a").combine(Bacon.once("b"), (a,b) -> [a,b]).onValue (v) ->
events.push(v)
expect(events).to.deep.equal([["a", "b"]])
finally
delete Array.prototype.foo
describe "EventStream.combine", ->
describe "converts stream to Property, then combines", ->
expectPropertyEvents(
->
left = series(2, [1, error(), 2, 3])
right = series(2, [4, error(), 5, 6]).delay(t(1)).toProperty()
left.combine(right, add)
[5, error(), error(), 6, 7, 8, 9])
describe "Bacon.groupSimultaneous", ->
describe "groups simultaneous values in to arrays", ->
expectStreamEvents(
->
src = series(1, [1,2])
stream = src.merge(src.map((x) -> x * 2))
Bacon.groupSimultaneous(stream)
[[[1, 2]], [[2,4]]])
describe "groups simultaneous values from multiple sources in to arrays", ->
expectStreamEvents(
->
src = series(1, [1,2])
stream = src.merge(src.map((x) -> x * 2))
stream2 = src.map (x) -> x * 4
Bacon.groupSimultaneous(stream, stream2)
[[[1, 2], [4]], [[2,4], [8]]])
describe "accepts an array or multiple args", ->
expectStreamEvents(
-> Bacon.groupSimultaneous([Bacon.later(1, 1), Bacon.later(2, 2)])
[[[1],[]], [[], [2]]])
describe "returns empty stream for zero sources", ->
expectStreamEvents(
-> Bacon.groupSimultaneous()
[])
expectStreamEvents(
-> Bacon.groupSimultaneous([])
[])
describe "works with synchronous sources", ->
expectStreamEvents(
-> Bacon.groupSimultaneous(Bacon.fromArray([1,2]))
[[[1]], [[2]]])
expectStreamEvents(
-> Bacon.groupSimultaneous(Bacon.fromArray([1,2]).mapEnd(3))
[[[1]], [[2]], [[3]]])
it "toString", ->
expect(Bacon.groupSimultaneous(Bacon.never()).toString()).to.equal("Bacon.groupSimultaneous(Bacon.never())")
describe "Property update is atomic", ->
describe "in a diamond-shaped combine() network", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
describe "in a triangle-shaped combine() network", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map (x) -> x
a.combine(b, (x, y) -> x + y)
[2, 4])
describe "when filter is involved", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty()
b = a.map((x) -> x).filter(true)
a.combine(b, (x, y) -> x + y)
[2, 4])
describe "when root property is based on combine*", ->
expectPropertyEvents(
->
a = series(1, [1, 2]).toProperty().combine(Bacon.constant(0), (x, y) -> x)
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
describe "when root is not a Property", ->
expectPropertyEvents(
->
a = series(1, [1, 2])
b = a.map (x) -> x
c = a.map (x) -> x
b.combine(c, (x, y) -> x + y)
[2, 4])
it "calls combinator function for valid combos only", ->
calls = 0
results = []
combinator = (x,y) ->
calls++
x+y
src = new Bacon.Bus()
prop = src.toProperty()
out = prop.map((x) -> x)
.combine(prop.map((x) -> x * 2), combinator)
.doAction(->)
.combine(prop, (x,y) -> x)
out.onValue((x) -> results.push(x))
src.push(1)
src.push(2)
expect(results).to.deep.equal([3,6])
expect(calls).to.equal(2)
describe "yet respects subscriber return values (bug fix)", ->
expectStreamEvents(
-> Bacon.repeatedly(t(1), [1, 2, 3]).toProperty().changes().take(1)
[1])
describe "independent observables created within the dispatch loop", ->
it "combineAsArray", ->
calls = 0
Bacon.once(1).onValue ->
Bacon.combineAsArray([Bacon.constant(1)]).onValue ->
calls++
expect(calls).to.equal(1)
it "combineAsArray.startWith", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = Bacon.combineAsArray([a, a]).map("right").startWith("wrong");
s.onValue((x) -> result = x)
expect(result).to.equal("right")
it "stream.startWith", ->
result = null
Bacon.once(1).onValue ->
s = Bacon.later(1).startWith(0)
s.onValue((x) -> result = x)
expect(result).to.equal(0)
it "combineAsArray.changes.startWith", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = Bacon.combineAsArray([a, a]).changes().startWith("right")
s.onValue((x) -> result = x)
expect(result).to.equal("right")
it "flatMap", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant("lolbal")
s = a.flatMap((x) -> Bacon.once(x))
s.onValue((x) -> result = x)
expect(result).to.equal("lolbal")
it "awaiting", ->
result = null
Bacon.once(1).onValue ->
a = Bacon.constant(1)
s = a.awaiting(a.map(->))
s.onValue((x) -> result = x)
expect(result).to.equal(false)
it "concat", ->
result = []
Bacon.once(1).onValue ->
s = Bacon.once(1).concat(Bacon.once(2))
s.onValue((x) -> result.push(x))
expect(result).to.deep.equal([1,2])
it "Property.delay", ->
result = []
Bacon.once(1).onValue ->
c = Bacon.constant(1)
s = Bacon.combineAsArray([c, c]).delay(1).map(".0")
s.onValue((x) -> result.push(x))
expect(result).to.deep.equal([1])
describe "when subscribing within the dispatch loop", ->
describe "single subscriber", ->
describe "up-to-date values are used (skipped bounce)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.toProperty()
value.onValue ->
trigger.flatMap ->
value.take(1)
[1,2])
describe "delayed bounce (TODO: how to name better)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.filter((x) -> x == 1).toProperty(0)
value.onValue ->
trigger.flatMap ->
value.take(1)
[0, 1])
describe "multiple subscribers", ->
describe "up-to-date values are used (skipped bounce)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.toProperty()
value.onValue ->
trigger.flatMap ->
value.onValue(->)
value.take(1)
[1,2])
describe "delayed bounce (TODO: how to name better)", ->
expectStreamEvents(
->
src = series(1, [1,2])
trigger = src.map((x) -> x)
trigger.onValue ->
value = src.filter((x) -> x == 1).toProperty(0)
value.onValue ->
trigger.flatMap ->
value.onValue(->)
value.take(1)
[0, 1])
describe "delayed bounce in case Property ended (bug fix)", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
root = Bacon.once(0).toProperty()
root.onValue ->
Bacon.later(1).onValue ->
root.map(-> 1).subscribe (event) ->
if event.isEnd()
bus.end()
else
bus.push(event.value())
bus
[1])
describe "poking for errors 2", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
root = Bacon.sequentially(1, [1,2]).toProperty()
root.subscribe (event) ->
outdatedChild = root.filter((x) -> x == 1).map((x) -> x)
outdatedChild.onValue(->) # sets value but will be outdated at value 2
Bacon.later(3).onValue ->
outdatedChild.subscribe (event) ->
if event.isEnd()
bus.end()
else
bus.push(event.value())
bus
[1]
)
describe "Bacon.combineAsArray", ->
describe "initial value", ->
event = null
before ->
prop = Bacon.constant(1)
Bacon.combineAsArray(prop).subscribe (x) ->
event = x if x.hasValue()
it "is output as Initial event", ->
expect(event.isInitial()).to.equal(true)
describe "combines properties and latest values of streams, into a Property having arrays as values", ->
expectPropertyEvents(
->
stream = series(1, ["a", "b"])
Bacon.combineAsArray([Bacon.constant(1), Bacon.constant(2), stream])
[[1, 2, "a"], [1, 2, "b"]])
describe "Works with streams provided as a list of arguments as well as with a single array arg", ->
expectPropertyEvents(
->
stream = series(1, ["a", "b"])
Bacon.combineAsArray(Bacon.constant(1), Bacon.constant(2), stream)
[[1, 2, "a"], [1, 2, "b"]])
describe "works with single property", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant(1)])
[[1]])
describe "works with single stream", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.once(1)])
[[1]])
describe "works with arrays as values, with first array being empty (bug fix)", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant([]), Bacon.constant([1])])
([[[], [1]]]))
describe "works with arrays as values, with first array being non-empty (bug fix)", ->
expectPropertyEvents(
->
Bacon.combineAsArray([Bacon.constant([1]), Bacon.constant([2])])
([[[1], [2]]]))
describe "works with empty array", ->
expectPropertyEvents(
-> Bacon.combineAsArray([])
[[]])
describe "works with empty args list", ->
expectPropertyEvents(
-> Bacon.combineAsArray()
[[]])
describe "accepts constant values instead of Observables", ->
expectPropertyEvents(
-> Bacon.combineAsArray(Bacon.constant(1), 2, 3)
[[1,2,3]])
it "preserves laziness", ->
calls = 0
id = (x) ->
calls++
x
Bacon.combineAsArray(Bacon.fromArray([1,2,3,4,5]).map(id)).skip(4).onValue()
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.combineAsArray(Bacon.never()).toString()).to.equal("Bacon.combineAsArray(Bacon.never())")
describe "Bacon.combineWith", ->
describe "combines n properties, streams and constants using an n-ary function", ->
expectPropertyEvents(
->
stream = series(1, [1, 2])
f = (x, y, z) -> x + y + z
Bacon.combineWith(f, stream, Bacon.constant(10), 100)
[111, 112])
describe "works with single input", ->
expectPropertyEvents(
->
stream = series(1, [1, 2])
f = (x) -> x * 2
Bacon.combineWith(f, stream)
[2, 4])
describe "works with 0 inputs (results to a constant)", ->
expectPropertyEvents(
->
Bacon.combineWith(-> 1)
[1])
it "toString", ->
expect(Bacon.combineWith((->), Bacon.never()).toString()).to.equal("Bacon.combineWith(function,Bacon.never())")
describe "Boolean logic", ->
describe "combines Properties with and()", ->
expectPropertyEvents(
-> Bacon.constant(true).and(Bacon.constant(false))
[false])
describe "combines Properties with or()", ->
expectPropertyEvents(
-> Bacon.constant(true).or(Bacon.constant(false))
[true])
describe "inverts property with not()", ->
expectPropertyEvents(
-> Bacon.constant(true).not()
[false])
describe "accepts constants instead of properties", ->
describe "true and false", ->
expectPropertyEvents(
-> Bacon.constant(true).and(false)
[false])
describe "true and true", ->
expectPropertyEvents(
-> Bacon.constant(true).and(true)
[true])
describe "true or false", ->
expectPropertyEvents(
-> Bacon.constant(true).or(false)
[true])
it "toString", ->
expect(Bacon.constant(1).and(Bacon.constant(2).not()).or(Bacon.constant(3)).toString()).to.equal("Bacon.constant(1).and(Bacon.constant(2).not()).or(Bacon.constant(3))")
describe "Bacon.mergeAll", ->
describe ("merges all given streams"), ->
expectStreamEvents(
->
Bacon.mergeAll([
series(3, [1, 2])
series(3, [3, 4]).delay(t(1))
series(3, [5, 6]).delay(t(2))])
[1, 3, 5, 2, 4, 6], unstable)
describe ("supports n-ary syntax"), ->
expectStreamEvents(
->
Bacon.mergeAll(
series(3, [1, 2])
series(3, [3, 4]).delay(t(1))
series(3, [5, 6]).delay(t(2)))
[1, 3, 5, 2, 4, 6], unstable)
describe "works with a single stream", ->
expectStreamEvents(
-> Bacon.mergeAll([Bacon.once(1)])
[1])
expectStreamEvents(
-> Bacon.mergeAll(Bacon.once(1))
[1])
describe "returns empty stream for zero input", ->
expectStreamEvents(
-> Bacon.mergeAll([])
[])
expectStreamEvents(
-> Bacon.mergeAll()
[])
it "toString", ->
expect(Bacon.mergeAll(Bacon.never()).toString()).to.equal("Bacon.mergeAll(Bacon.never())")
describe "Property.sampledBy(stream)", ->
describe "samples property at events, resulting to EventStream", ->
expectStreamEvents(
->
prop = series(2, [1, 2]).toProperty()
stream = repeat(3, ["troll"]).take(4)
prop.sampledBy(stream)
[1, 2, 2, 2])
describe "includes errors from both Property and EventStream", ->
expectStreamEvents(
->
prop = series(2, [error(), 2]).toProperty()
stream = series(3, [error(), "troll"])
prop.sampledBy(stream)
[error(), error(), 2])
describe "ends when sampling stream ends", ->
expectStreamEvents(
->
prop = repeat(2, [1, 2]).toProperty()
stream = repeat(2, [""]).delay(t(1)).take(4)
prop.sampledBy(stream)
[1, 2, 1, 2])
describe "accepts optional combinator function f(Vp, Vs)", ->
expectStreamEvents(
->
prop = series(2, ["a", "b"]).toProperty()
stream = series(2, ["1", "2", "1", "2"]).delay(t(1))
prop.sampledBy(stream, add)
["a1", "b2", "b1", "b2"])
describe "allows method name instead of function too", ->
expectStreamEvents(
->
Bacon.constant([1]).sampledBy(Bacon.once([2]), ".concat")
[[1, 2]])
describe "works with same origin", ->
expectStreamEvents(
->
src = series(2, [1, 2])
src.toProperty().sampledBy(src)
[1, 2])
expectStreamEvents(
->
src = series(2, [1, 2])
src.toProperty().sampledBy(src.map(times, 2))
[1, 2])
describe "uses updated property after combine", ->
latter = (a, b) -> b
expectPropertyEvents(
->
src = series(2, ["b", "c"]).toProperty("a")
combined = Bacon.constant().combine(src, latter)
src.sampledBy(combined, add)
["aa", "bb", "cc"])
describe "uses updated property after combine with subscriber", ->
latter = (a, b) -> b
expectPropertyEvents(
->
src = series(2, ["b", "c"]).toProperty("a")
combined = Bacon.constant().combine(src, latter)
combined.onValue(->)
src.sampledBy(combined, add)
["aa", "bb", "cc"])
describe "skips samplings that occur before the property gets its first value", ->
expectStreamEvents(
->
p = series(5, [1]).toProperty()
p.sampledBy(series(3, [0]))
[])
expectStreamEvents(
->
p = series(5, [1, 2]).toProperty()
p.sampledBy(series(3, [0, 0, 0, 0]))
[1, 1, 2], unstable)
expectPropertyEvents(
->
p = series(5, [1, 2]).toProperty()
p.sampledBy(series(3, [0, 0, 0, 0]).toProperty())
[1, 1, 2], unstable)
describe "works with stream of functions", ->
f = ->
expectStreamEvents(
->
p = series(1, [f]).toProperty()
p.sampledBy(series(1, [1, 2, 3]))
[f, f, f])
describe "works with synchronous sampler stream", ->
expectStreamEvents(
-> Bacon.constant(1).sampledBy(Bacon.fromArray([1,2,3]))
[1,1,1], unstable)
expectStreamEvents(
-> Bacon.later(1, 1).toProperty().sampledBy(Bacon.fromArray([1,2,3]))
[])
describe "laziness", ->
calls = 0
before (done) ->
id = (x) ->
calls++
x
sampler = Bacon.later(5).map(id)
property = repeat(1, [1]).toProperty().map(id)
sampled = property.sampledBy sampler
sampled.onValue()
sampled.onEnd(done)
it "preserves laziness", ->
expect(calls).to.equal(1)
it "toString", ->
expect(Bacon.constant(0).sampledBy(Bacon.never()).toString()).to.equal("Bacon.constant(0).sampledBy(Bacon.never(),function)")
describe "Property.sampledBy(property)", ->
describe "samples property at events, resulting to a Property", ->
expectPropertyEvents(
->
prop = series(2, [1, 2]).toProperty()
sampler = repeat(3, ["troll"]).take(4).toProperty()
prop.sampledBy(sampler)
[1, 2, 2, 2])
describe "works on an event stream by automatically converting to property", ->
expectPropertyEvents(
->
stream = series(2, [1, 2])
sampler = repeat(3, ["troll"]).take(4).toProperty()
stream.sampledBy(sampler)
[1, 2, 2, 2])
describe "accepts optional combinator function f(Vp, Vs)", ->
expectPropertyEvents(
->
prop = series(2, ["a", "b"]).toProperty()
sampler = series(2, ["1", "2", "1", "2"]).delay(t(1)).toProperty()
prop.sampledBy(sampler, add)
["a1", "b2", "b1", "b2"])
describe "Property.sample", ->
describe "samples property by given interval", ->
expectStreamEvents(
->
prop = series(2, [1, 2]).toProperty()
prop.sample(t(3)).take(4)
[1, 2, 2, 2])
describe "includes all errors", ->
expectStreamEvents(
->
prop = series(2, [1, error(), 2]).toProperty()
prop.sample(t(5)).take(2)
[error(), 1, 2], unstable)
describe "works with synchronous source", ->
expectStreamEvents(
->
prop = Bacon.constant(1)
prop.sample(t(3)).take(4)
[1, 1, 1, 1])
it "toString", ->
expect(Bacon.constant(0).sample(1).toString()).to.equal("Bacon.constant(0).sample(1)")
describe "EventStream.errors", ->
describe "Includes errors only", ->
expectStreamEvents(
-> series(1, [1, error(), 2]).errors()
[error()])
it "toString", ->
expect(Bacon.never().errors().toString()).to.equal("Bacon.never().errors()")
describe "EventStream.scan", ->
describe "accumulates values with given seed and accumulator function, passing through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).scan(0, add)
[0, 1, 3, error(), 6])
describe "also works with method name", ->
expectPropertyEvents(
-> series(1, [[1], [2]]).scan([], ".concat")
[[], [1], [1, 2]])
it "yields the seed value immediately", ->
outputs = []
bus = new Bacon.Bus()
bus.scan(0, -> 1).onValue((value) -> outputs.push(value))
expect(outputs).to.deep.equal([0])
describe "yields null seed value", ->
expectPropertyEvents(
-> series(1, [1]).scan(null, ->1)
[null, 1])
describe "works with synchronous streams", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).scan(0, ((x,y)->x+y))
[0,1,3,6])
describe "calls accumulator function once per value", ->
count = 0
expectPropertyEvents(
-> series(2, [1,2,3]).scan(0, (x,y) -> count++; x + y)
[0, 1, 3, 6]
)
it "calls accumulator once per value", ->
expect(count).to.equal(3)
describe "EventStream.fold", ->
describe "folds stream into a single-valued Property, passes through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "has reduce as synonym", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "works with synchronous source", ->
expectPropertyEvents(
-> Bacon.fromArray([1, 2, error(), 3]).fold(0, add)
[error(), 6])
describe "Property.scan", ->
describe "with Init value, starts with f(seed, init)", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).scan(0, add)
[1, 3, 6])
describe "without Init value, starts with seed", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty().scan(0, add)
[0, 2, 5])
describe "treats null seed value like any other value", ->
expectPropertyEvents(
-> series(1, [1]).toProperty().scan(null, add)
[null, 1])
expectPropertyEvents(
-> series(1, [2]).toProperty(1).scan(null, add)
[1, 3])
describe "for synchronous source", ->
describe "with Init value, starts with f(seed, init)", ->
expectPropertyEvents(
-> Bacon.fromArray([2,3]).toProperty(1).scan(0, add)
[1, 3, 6])
describe "without Init value, starts with seed", ->
expectPropertyEvents(
-> Bacon.fromArray([2,3]).toProperty().scan(0, add)
[0, 2, 5])
describe "works with synchronously responding empty source", ->
expectPropertyEvents(
-> Bacon.never().toProperty(1).scan(0, add)
[1])
describe "EventStream.withStateMachine", ->
f = (sum, event) ->
if event.hasValue()
[sum + event.value(), []]
else if event.isEnd()
[sum, [new Bacon.Next(-> sum), event]]
else
[sum, [event]]
describe "runs state machine on the stream", ->
expectStreamEvents(
-> Bacon.fromArray([1,2,3]).withStateMachine(0, f)
[6])
describe "Property.withStateMachine", ->
describe "runs state machine on the stream", ->
expectPropertyEvents(
-> Bacon.fromArray([1,2,3]).toProperty().withStateMachine(0, (sum, event) ->
if event.hasValue()
[sum + event.value(), []]
else if event.isEnd()
[sum, [new Bacon.Next(-> sum), event]]
else
[sum, [event]])
[6])
describe "Property.fold", ->
describe "Folds Property into a single-valued one", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).fold(0, add)
[6])
describe "EventStream.diff", ->
describe "apply diff function to previous and current values, passing through errors", ->
expectPropertyEvents(
-> series(1, [1, 2, error(), 3]).diff(0, add)
[1, 3, error(), 5])
describe "also works with method name", ->
expectPropertyEvents(
-> series(1, [[1], [2]]).diff([0], ".concat")
[[0, 1], [1, 2]])
it "does not yields the start value immediately", ->
outputs = []
bus = new Bacon.Bus()
bus.diff(0, -> 1).onValue((value) -> outputs.push(value))
expect(outputs).to.deep.equal([])
it "toString", ->
expect(Bacon.once(1).diff(0, (->)).toString()).to.equal("Bacon.once(1).diff(0,function)")
describe "Property.diff", ->
describe "with Init value, starts with f(start, init)", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty(1).diff(0, add)
[1, 3, 5])
describe "without Init value, waits for the first value", ->
expectPropertyEvents(
-> series(1, [2,3]).toProperty().diff(0, add)
[2, 5])
describe "treats null start value like any other value", ->
expectPropertyEvents(
-> series(1, [1]).toProperty().diff(null, add)
[1])
expectPropertyEvents(
-> series(1, [2]).toProperty(1).diff(null, add)
[1, 3])
describe "EventStream.zip", ->
describe "pairwise combines values from two streams", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).zip(series(1, ['a', 'b', 'c']))
[[1, 'a'], [2, 'b'], [3, 'c']])
describe "passes through errors", ->
expectStreamEvents(
-> series(2, [1, error(), 2]).zip(series(2, ['a', 'b']).delay(1))
[[1, 'a'], error(), [2, 'b']])
describe "completes as soon as possible", ->
expectStreamEvents(
-> series(1, [1]).zip(series(1, ['a', 'b', 'c']))
[[1, 'a']])
describe "can zip an observable with itself", ->
expectStreamEvents(
->
obs = series(1, ['a', 'b', 'c'])
obs.zip(obs.skip(1))
[['a', 'b'], ['b', 'c']])
it "toString", ->
expect(Bacon.never().zip(Bacon.once(1)).toString()).to.equal("Bacon.never().zip(Bacon.once(1))")
describe "Property.zip", ->
describe "pairwise combines values from two properties", ->
expectStreamEvents(
-> series(1, [1, 2, 3]).toProperty().zip(series(1, ['a', 'b', 'c']).toProperty())
[[1, 'a'], [2, 'b'], [3, 'c']], { unstable })
describe "Bacon.zipAsArray", ->
describe "zips an array of streams into a stream of arrays", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray([obs, obs.skip(1), obs.skip(2)])
[[1 , 2 , 3], [2 , 3 , 4]])
describe "supports n-ary syntax", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray(obs, obs.skip(1))
[[1 , 2], [2 , 3], [3, 4]])
describe "accepts Properties as well as EventStreams", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipAsArray(obs, obs.skip(1), Bacon.constant(5))
[[1 , 2, 5]])
describe "works with single stream", ->
expectStreamEvents(
->
obs = series(1, [1, 2])
Bacon.zipAsArray([obs])
[[1], [2]])
expectStreamEvents(
->
obs = series(1, [1, 2])
Bacon.zipAsArray(obs)
[[1], [2]])
describe "works with 0 streams (=Bacon.never())", ->
expectStreamEvents(
-> Bacon.zipAsArray([])
[])
expectStreamEvents(
-> Bacon.zipAsArray()
[])
it "toString", ->
expect(Bacon.zipAsArray(Bacon.never(), Bacon.never()).toString()).to.equal("Bacon.zipAsArray(Bacon.never(),Bacon.never())")
describe "Bacon.zipWith", ->
describe "zips an array of streams with given function", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
Bacon.zipWith([obs, obs.skip(1), obs.skip(2)], ((x,y,z) -> (x + y + z)))
[1 + 2 + 3, 2 + 3 + 4])
describe "supports n-ary syntax", ->
expectStreamEvents(
->
obs = series(1, [1, 2, 3, 4])
f = ((x,y,z) -> (x + y + z))
Bacon.zipWith(f, obs, obs.skip(1), obs.skip(2))
[1 + 2 + 3, 2 + 3 + 4])
describe "works with single stream", ->
expectStreamEvents(
->
obs = series(1, [1,2])
f = (x) -> x * 2
Bacon.zipWith(f, obs)
[1 * 2, 2 * 2])
describe "works with 0 streams (=Bacon.never())", ->
expectStreamEvents(
->
Bacon.zipWith([], ->)
[])
expectStreamEvents(
->
Bacon.zipWith(->)
[])
it "toString", ->
expect(Bacon.zipWith((->), Bacon.never()).toString()).to.equal("Bacon.zipWith(function,Bacon.never())")
describe "Bacon.when", ->
describe "synchronizes on join patterns", ->
expectStreamEvents(
->
[a,b,_] = ['a','b','_']
as = series(1, [a, _, a, a, _, a, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, b, _, b, b, _, _]).filter((x) -> x == b)
Bacon.when(
[as, bs], (a,b) -> a + b,
[as], (a) -> a)
['a', 'ab', 'a', 'ab', 'ab', 'ab'], unstable)
describe "consider the join patterns from top to bottom", ->
expectStreamEvents(
->
[a,b,_] = ['a','b','_']
as = series(1, [a, _, a, a, _, a, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, b, _, b, b, _, _]).filter((x) -> x == b)
Bacon.when(
[as], (a) -> a,
[as, bs], (a,b) -> a + b)
['a', 'a', 'a', 'a', 'a', 'a'])
describe "handles any number of join patterns", ->
expectStreamEvents(
->
[a,b,c,_] = ['a','b','c','_']
as = series(1, [a, _, a, _, a, _, a, _, _, _, a, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, _, b, _, b, _, b, _, _]).filter((x) -> x == b)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, _]).filter((x) -> x == c)
Bacon.when(
[as, bs, cs], (a,b,c) -> a + b + c,
[as, bs], (a,b) -> a + b,
[as], (a) -> a)
['a', 'ab', 'a', 'abc', 'abc', 'ab'], unstable)
describe "does'nt synchronize on properties", ->
expectStreamEvents(
->
p = repeat(1, ["p"]).take(100).toProperty()
s = series(3, ["1", "2", "3"])
Bacon.when(
[p,s], (p, s) -> p + s)
["p1", "p2", "p3"])
expectStreamEvents(
->
p = series(3, ["p"]).toProperty()
s = series(1, ["1"])
Bacon.when(
[p,s], (p, s) -> p + s)
[])
expectStreamEvents(
->
p = repeat(1, ["p"]).take(100).toProperty()
s = series(3, ["1", "2", "3"]).toProperty()
Bacon.when(
[p,s], (p, s) -> p + s)
[])
expectStreamEvents(
->
[a,b,c,_] = ['a','b','c','_']
as = series(1, [a, _, a, _, a, _, a, _, _, _, a, _, a]).filter((x) -> x == a)
bs = series(1, [_, b, _, _, _, b, _, b, _, b, _, _, _]).filter((x) -> x == b)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, c, _]).filter((x) -> x == c).map(1).scan 0, ((x,y) -> x + y)
Bacon.when(
[as, bs, cs], (a,b,c) -> a + b + c,
[as], (a) -> a)
['a', 'ab0', 'a', 'ab1', 'ab2', 'ab3'], unstable)
describe "doesn't output before properties have values", ->
expectStreamEvents(
->
p = series(2, ["p"])
s = series(1, ["s"])
Bacon.when(
[s, p], (s, p) -> s + p)
["sp"])
describe "returns Bacon.never() on the empty list of patterns", ->
expectStreamEvents(
->
Bacon.when()
[])
describe "returns Bacon.never() when all patterns are zero-length", ->
expectStreamEvents(
->
Bacon.when([], ->)
[])
describe "works with empty patterns", ->
expectStreamEvents(
-> Bacon.when(
[Bacon.once(1)], (x) -> x,
[], ->)
[1])
describe "works with empty patterns (2)", ->
expectStreamEvents(
-> Bacon.when(
[], ->,
[Bacon.once(1)], (x) -> x)
[1])
describe "works with single stream", ->
expectStreamEvents(
-> Bacon.when([Bacon.once(1)], (x) -> x)
[1])
describe "works with multiples of streams", ->
expectStreamEvents(
->
[h,o,c,_] = ['h','o','c','_']
hs = series(1, [h, _, h, _, h, _, h, _, _, _, h, _, h]).filter((x) -> x == h)
os = series(1, [_, o, _, _, _, o, _, o, _, o, _, _, _]).filter((x) -> x == o)
cs = series(1, [_, _, _, c, _, _, _, _, c, _, _, c, _]).filter((x) -> x == c)
Bacon.when(
[hs, hs, os], (h1,h2,o) -> [h1,h2,o],
[cs, os], (c,o) -> [c,o])
[['h', 'h', 'o'], ['c', 'o'], ['h', 'h', 'o'], ['c', 'o']], unstable)
describe "works with multiples of properties", ->
expectStreamEvents(
->
c = Bacon.constant("c")
Bacon.when(
[c, c, Bacon.once(1)], (c1, c2, _) -> c1 + c2)
["cc"])
describe "accepts constants instead of functions too", ->
expectStreamEvents(
-> Bacon.when(Bacon.once(1), 2)
[2])
describe "works with synchronous sources", ->
expectStreamEvents(
->
xs = Bacon.once "x"
ys = Bacon.once "y"
Bacon.when(
[xs, ys], (x, y) -> x + y
)
["xy"])
it "toString", ->
expect(Bacon.when([Bacon.never()], (->)).toString()).to.equal("Bacon.when([Bacon.never()],function)")
describe "Bacon.update", ->
describe "works like Bacon.when, but produces a property, and can be defined in terms of a current value", ->
expectPropertyEvents(
->
[r,i,_] = ['r','i',0]
incr = series(1, [1, _, 1, _, 2, _, 1, _, _, _, 2, _, 1]).filter((x) -> x != _)
reset = series(1, [_, r, _, _, _, r, _, r, _, r, _, _, _]).filter((x) -> x == r)
Bacon.update(
0,
[reset], 0,
[incr], (i,c) -> i+c)
[0, 1, 0, 1, 3, 0, 1, 0, 0, 2, 3])
describe "Correctly handles multiple arguments in parameter list, and synchronous sources", ->
expectPropertyEvents(
->
one = Bacon.once(1)
two = Bacon.once(2)
Bacon.update(
0,
[one, two], (i, a, b) -> [i,a,b])
[0, [0,1,2]])
it "toString", ->
expect(Bacon.update(0, [Bacon.never()], (->)).toString()).to.equal("Bacon.update(0,[Bacon.never()],function)")
describe "combineTemplate", ->
describe "combines streams according to a template object", ->
expectPropertyEvents(
->
firstName = Bacon.constant("juPI:NAME:<NAME>END_PI")
lastName = Bacon.constant("PI:NAME:<NAME>END_PI")
userName = Bacon.constant("mr.bacon")
Bacon.combineTemplate({ userName: userName, password: "PI:PASSWORD:<PASSWORD>END_PI*****", fullName: { firstName: firstName, lastName: lastName }})
[{ userName: "mr.bacon", password: "PI:PASSWORD:<PASSWORD>END_PI*****", fullName: { firstName: "juha", lastName: "paPI:NAME:<NAME>END_PI" } }])
describe "works with a single-stream template", ->
expectPropertyEvents(
->
bacon = Bacon.constant("bacon")
Bacon.combineTemplate({ favoriteFood: bacon })
[{ favoriteFood: "bacon" }])
describe "works when dynamic part is not the last part (bug fix)", ->
expectPropertyEvents(
->
username = Bacon.constant("raimohanska")
password = Bacon.constant("PI:PASSWORD:<PASSWORD>END_PI")
Bacon.combineTemplate({url: "/user/login",
data: { username: username, password: PI:PASSWORD:<PASSWORD>END_PI }, type: "post"})
[url: "/user/login", data: {username: "raimohanska", password: "PI:PASSWORD:<PASSWORD>END_PI"}, type: "post"])
describe "works with arrays as data (bug fix)", ->
expectPropertyEvents(
-> Bacon.combineTemplate( { x : Bacon.constant([]), y : Bacon.constant([[]]), z : Bacon.constant(["z"])})
[{ x : [], y : [[]], z : ["z"]}])
describe "supports empty object", ->
expectPropertyEvents(
-> Bacon.combineTemplate({})
[{}])
it "supports arrays", ->
value = {key: [{ x: 1 }, { x: 2 }]}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x.key instanceof Array).to.deep.equal(true) # seems that the former passes even if x is not an array
value = [{ x: 1 }, { x: 2 }]
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x instanceof Array).to.deep.equal(true)
value = {key: [{ x: 1 }, { x: 2 }], key2: {}}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
expect(x.key instanceof Array).to.deep.equal(true)
value = {key: [{ x: 1 }, { x: Bacon.constant(2) }]}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal({key: [{ x: 1 }, { x: 2 }]})
expect(x.key instanceof Array).to.deep.equal(true) # seems that the former passes even if x is not an array
it "supports nulls", ->
value = {key: null}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports NaNs", ->
value = {key: NaN}
Bacon.combineTemplate(value).onValue (x) ->
expect(isNaN(x.key)).to.deep.equal(true)
it "supports dates", ->
value = {key: new Date()}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports regexps", ->
value = {key: /[0-0]/i}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "supports functions", ->
value = {key: ->}
Bacon.combineTemplate(value).onValue (x) ->
expect(x).to.deep.equal(value)
it "toString", ->
expect(Bacon.combineTemplate({ thing: Bacon.never(), const: "a" }).toString()).to.equal("Bacon.combineTemplate({thing:Bacon.never(),const:a})")
describe "Property.decode", ->
describe "switches between source Properties based on property value", ->
expectPropertyEvents(
->
a = Bacon.constant("a")
b = Bacon.constant("b")
c = Bacon.constant("c")
series(1, [1,2,3]).toProperty().decode({1: a, 2: b, 3: c})
["a", "b", "c"])
it "toString", ->
expect(Bacon.constant(1).decode({1: "lol"}).toString()).to.equal("Bacon.constant(1).decode({1:lol})")
describe "EventStream.decode", ->
describe "switches between source Properties based on property value", ->
expectPropertyEvents(
->
a = Bacon.constant("a")
b = Bacon.constant("b")
c = Bacon.constant("c")
series(1, [1,2,3]).decode({1: a, 2: b, 3: c})
["a", "b", "c"])
describe "Observable.onValues", ->
it "splits value array to callback arguments", ->
f = mockFunction()
Bacon.constant([1,2,3]).onValues(f)
f.verify(1,2,3)
describe "Bacon.onValues", ->
it "is a shorthand for combineAsArray.onValues", ->
f = mockFunction()
Bacon.onValues(1, 2, 3, f)
f.verify(1,2,3)
describe "Observable.subscribe and onValue", ->
it "returns a dispose() for unsubscribing", ->
s = new Bacon.Bus()
values = []
dispose = s.onValue (value) -> values.push value
s.push "lol"
dispose()
s.push "wut"
expect(values).to.deep.equal(["lol"])
describe "Observable.onEnd", ->
it "is called on stream end", ->
s = new Bacon.Bus()
ended = false
s.onEnd(-> ended = true)
s.push("LOL")
expect(ended).to.deep.equal(false)
s.end()
expect(ended).to.deep.equal(true)
describe "Field value extraction", ->
describe "extracts field value", ->
expectStreamEvents(
-> Bacon.once({lol:"wut"}).map(".lol")
["wut"])
describe "extracts nested field value", ->
expectStreamEvents(
-> Bacon.once({lol:{wut: "wat"}}).map(".lol.wut")
["wat"])
describe "yields 'undefined' if any value on the path is 'undefined'", ->
expectStreamEvents(
-> Bacon.once({}).map(".lol.wut")
[undefined])
it "if field value is method, it does a method call", ->
context = null
result = null
object = {
method: ->
context = this
"result"
}
Bacon.once(object).map(".method").onValue((x) -> result = x)
expect(result).to.deep.equal("result")
expect(context).to.deep.equal(object)
testSideEffects = (wrapper, method) ->
->
it "(f) calls function with property value", ->
f = mockFunction()
wrapper("kaboom")[method](f)
f.verify("kaboom")
it "(f, param) calls function, partially applied with param", ->
f = mockFunction()
wrapper("kaboom")[method](f, "pow")
f.verify("pow", "kaboom")
it "('.method') calls event value object method", ->
value = mock("get")
value.when().get().thenReturn("pow")
wrapper(value)[method](".get")
value.verify().get()
it "('.method', param) calls event value object method with param", ->
value = mock("get")
value.when().get("value").thenReturn("pow")
wrapper(value)[method](".get", "value")
value.verify().get("value")
it "(object, method) calls object method with property value", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow")
target.verify().pow("kaboom")
it "(object, method, param) partially applies object method with param", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow", "smack")
target.verify().pow("smack", "kaboom")
it "(object, method, param1, param2) partially applies with 2 args", ->
target = mock("pow")
wrapper("kaboom")[method](target, "pow", "smack", "whack")
target.verify().pow("smack", "whack", "kaboom")
describe "Property.onValue", testSideEffects(Bacon.constant, "onValue")
describe "Property.assign", testSideEffects(Bacon.constant, "assign")
describe "EventStream.onValue", testSideEffects(Bacon.once, "onValue")
describe "Property.assign", ->
it "calls given objects given method with property values", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow")
target.verify().pow("kaboom")
it "allows partial application of method (i.e. adding fixed args)", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow", "smack")
target.verify().pow("smack", "kaboom")
it "allows partial application of method with 2 args (i.e. adding fixed args)", ->
target = mock("pow")
Bacon.constant("kaboom").assign(target, "pow", "smack", "whack")
target.verify().pow("smack", "whack", "kaboom")
describe "Bacon.Bus", ->
it "merges plugged-in streams", ->
bus = new Bacon.Bus()
values = []
dispose = bus.onValue (value) -> values.push value
push = new Bacon.Bus()
bus.plug(push)
push.push("lol")
expect(values).to.deep.equal(["lol"])
dispose()
verifyCleanup()
describe "works with looped streams", ->
expectStreamEvents(
->
bus = new Bacon.Bus()
bus.plug(Bacon.later(t(2), "lol"))
bus.plug(bus.filter((value) => "lol" == value).map(=> "wut"))
Bacon.later(t(4)).onValue(=> bus.end())
bus
["lol", "wut"])
it "dispose works with looped streams", ->
bus = new Bacon.Bus()
bus.plug(Bacon.later(t(2), "lol"))
bus.plug(bus.filter((value) => "lol" == value).map(=> "wut"))
dispose = bus.onValue(=>)
dispose()
it "Removes input from input list on End event", ->
subscribed = 0
bus = new Bacon.Bus()
input = new Bacon.Bus()
# override subscribe to increase the subscribed-count
inputSubscribe = input.subscribe
input.subscribe = (sink) ->
subscribed++
inputSubscribe(sink)
bus.plug(input)
dispose = bus.onValue(=>)
input.end()
dispose()
bus.onValue(=>) # this latter subscription should not go to the ended source anymore
expect(subscribed).to.deep.equal(1)
it "unsubscribes inputs on end() call", ->
bus = new Bacon.Bus()
input = new Bacon.Bus()
events = []
bus.plug(input)
bus.subscribe((e) => events.push(e))
input.push("a")
bus.end()
input.push("b")
expect(toValues(events)).to.deep.equal(["a", "<end>"])
it "handles cold single-event streams correctly (bug fix)", ->
values = []
bus = new Bacon.Bus()
bus.plug(Bacon.once("x"))
bus.plug(Bacon.once("y"))
bus.onValue((x) -> values.push(x))
expect(values).to.deep.equal(["x", "y"])
it "handles end() calls even when there are no subscribers", ->
bus = new Bacon.Bus()
bus.end()
describe "delivers pushed events and errors", ->
expectStreamEvents(
->
s = new Bacon.Bus()
s.push "pullMe"
soon ->
s.push "pushMe"
# test that it works regardless of "this"
s.push.call(null, "pushSomeMore")
s.error()
s.end()
s
["pushMe", "pushSomeMore", error()])
it "does not deliver pushed events after end() call", ->
called = false
bus = new Bacon.Bus()
bus.onValue(-> called = true)
bus.end()
bus.push("LOL")
expect(called).to.deep.equal(false)
it "does not plug after end() call", ->
plugged = false
bus = new Bacon.Bus()
bus.end()
bus.plug(new Bacon.EventStream((sink) -> plugged = true; (->)))
bus.onValue(->)
expect(plugged).to.deep.equal(false)
it "returns unplug function from plug", ->
values = []
bus = new Bacon.Bus()
src = new Bacon.Bus()
unplug = bus.plug(src)
bus.onValue((x) -> values.push(x))
src.push("x")
unplug()
src.push("y")
expect(values).to.deep.equal(["x"])
it "allows consumers to re-subscribe after other consumers have unsubscribed (bug fix)", ->
bus = new Bacon.Bus
otherBus = new Bacon.Bus
otherBus.plug(bus)
unsub = otherBus.onValue ->
unsub()
o = []
otherBus.onValue (v) -> o.push(v)
bus.push("foo")
expect(o).to.deep.equal(["foo"])
it "toString", ->
expect(new Bacon.Bus().toString()).to.equal("Bacon.Bus()")
describe "EventStream", ->
describe "works with functions as values (bug fix)", ->
expectStreamEvents(
-> Bacon.once(-> "hello").map((f) -> f())
["hello"])
expectStreamEvents(
-> Bacon.once(-> "hello").flatMap(Bacon.once).map((f) -> f())
["hello"])
expectPropertyEvents(
-> Bacon.constant(-> "hello").map((f) -> f())
["hello"])
expectStreamEvents(
-> Bacon.constant(-> "hello").flatMap(Bacon.once).map((f) -> f())
["hello"])
it "handles one subscriber added twice just like two separate subscribers (case Bacon.noMore)", ->
values = []
bus = new Bacon.Bus()
f = (v) ->
if v.hasValue()
values.push(v.value())
return Bacon.noMore
bus.subscribe(f)
bus.subscribe(f)
bus.push("bacon")
expect(values).to.deep.equal(["bacon", "bacon"])
it "handles one subscriber added twice just like two separate subscribers (case unsub)", ->
values = []
bus = new Bacon.Bus()
f = (v) ->
if v.hasValue()
values.push(v.value())
bus.subscribe(f)
unsub = bus.subscribe(f)
unsub()
bus.push("bacon")
expect(values).to.deep.equal(["bacon"])
describe "Bacon.fromBinder", ->
describe "Provides an easier alternative to the EventStream constructor, allowing sending multiple events at a time", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink([new Bacon.Next(1), new Bacon.End()])
(->)
[1])
describe "Allows sending unwrapped values as well as events", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink([1, new Bacon.End()])
(->)
[1])
describe "Allows sending single value without wrapping array", ->
expectStreamEvents(
->
Bacon.fromBinder (sink) ->
sink(1)
sink(new Bacon.End())
(->)
[1])
it "toString", ->
expect(Bacon.fromBinder(->).toString()).to.equal("Bacon.fromBinder(function,function)")
describe "String presentations", ->
describe "Initial(1).toString", ->
it "is 1", ->
expect(new Bacon.Initial(1).toString()).to.equal("1")
describe "Next({a:1i}).toString", ->
it "is {a:1}", ->
expect(new Bacon.Next({a:1}).toString()).to.equal("{a:1}")
describe "Error({a:1}).toString", ->
it "is <error> {a:1}", ->
expect(new Bacon.Error({a:1}).toString()).to.equal("<error> {a:1}")
describe "End.toString", ->
it "is <end>", ->
expect(new Bacon.End().toString()).to.equal("<end>")
describe "inspect", ->
it "is the same as toString", ->
expect(new Bacon.Initial(1).inspect()).to.equal("1")
describe "Observable.name", ->
it "sets return value of toString and inspect", ->
expect(Bacon.once(1).name("one").toString()).to.equal("one")
expect(Bacon.once(1).name("one").inspect()).to.equal("one")
it "modifies the stream in place", ->
obs = Bacon.once(1)
obs.name("one")
expect(obs.toString()).to.equal("one")
it "supports composition", ->
expect(Bacon.once("raimohanska").name("raimo").take(1).inspect()).to.equal("raimo.take(1)")
describe "Bacon.spy", ->
testSpy = (expectedCount, f) ->
calls = 0
spy = (obs) -> calls++
Bacon.spy spy
f()
expect(calls).to.equal(expectedCount)
describe "calls spy function for all created Observables", ->
it "EventStream", ->
testSpy 1, -> Bacon.once(1)
it "Property", ->
testSpy 1, -> Bacon.constant(1)
it "map", ->
testSpy 2, -> Bacon.once(1).map(->)
it "combineTemplate (also called for the intermediate combineAsArray property)", ->
testSpy 4, -> Bacon.combineTemplate(Bacon.once(1), Bacon.constant(2))
describe "Infinite synchronous sequences", ->
describe "Limiting length with take(n)", ->
expectStreamEvents(
-> endlessly(1,2,3).take(4)
[1,2,3,1], unstable)
expectStreamEvents(
-> endlessly(1,2,3).take(4).concat(Bacon.once(5))
[1,2,3,1,5], unstable)
expectStreamEvents(
-> endlessly(1,2,3).take(4).concat(endlessly(5, 6).take(2))
[1,2,3,1,5,6], unstable)
describe "With flatMap", ->
expectStreamEvents(
-> Bacon.fromArray([1,2]).flatMap((x) -> endlessly(x)).take(2)
[1,1])
expectStreamEvents(
-> endlessly(1,2).flatMap((x) -> endlessly(x)).take(2)
[1,1])
endlessly = (values...) ->
index = 0
Bacon.fromSynchronousGenerator -> new Bacon.Next(-> values[index++ % values.length])
Bacon.fromGenerator = (generator) ->
Bacon.fromBinder (sink) ->
unsubd = false
push = (events) ->
events = Bacon._.toArray(events)
for event in events
return if unsubd
reply = sink event
return if event.isEnd() or reply == Bacon.noMore
generator(push)
push []
-> unsubd = true
Bacon.fromSynchronousGenerator = (generator) ->
Bacon.fromGenerator (push) ->
push generator()
lessThan = (limit) ->
(x) -> x < limit
times = (x, y) -> x * y
add = (x, y) -> x + y
id = (x) -> x
|
[
{
"context": "ogging: true\n logFn: @logFn\n username: 'username'\n password: 'password'\n\n serverOptions.da",
"end": 393,
"score": 0.9986755847930908,
"start": 385,
"tag": "USERNAME",
"value": "username"
},
{
"context": "logFn\n username: 'username'\n pa... | test/integration/update-build-passed-spec.coffee | octoblu/deploy-state-service | 1 | request = require 'request'
moment = require 'moment'
Database = require '../database'
Server = require '../../src/server'
describe 'Update Build Passed', ->
beforeEach (done) ->
@db = new Database
@db.drop done
beforeEach (done) ->
@logFn = sinon.spy()
serverOptions =
port: undefined,
disableLogging: true
logFn: @logFn
username: 'username'
password: 'password'
serverOptions.database = @db.database
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
afterEach ->
@server.destroy()
describe 'on PUT /deployments/:owner/:repo/:tag/build/:state/passed', ->
describe 'when the deployment does NOT exist', ->
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: 'password'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
describe 'when the deployment exists', ->
describe 'when the build does NOT exist', ->
beforeEach (done) ->
deployment =
tag: 'v1.0.0'
repo: 'the-service'
owner: 'the-owner'
createdAt: moment('2001-01-01').toDate()
build: {
passing: false
docker: {
passing: true
}
}
cluster: {}
@db.deployments.insert deployment, done
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: 'password'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
it 'should have an empty body', ->
expect(@body).to.be.empty
describe 'when the database record is checked', ->
beforeEach (done) ->
query = { owner: 'the-owner', repo: 'the-service', tag: 'v1.0.0' }
@db.deployments.findOne query, (error, @record) =>
done error
it 'should have a passing build', ->
expect(@record.build.passing).to.be.true
it 'should have a travis-ci set to passed', ->
expect(@record.build["travis-ci"].passing).to.be.true
it 'should have a valid created at date for travis-ci', ->
expect(moment(@record.build["travis-ci"].createdAt).isBefore(moment())).to.be.true
expect(moment(@record.build["travis-ci"].createdAt).isAfter(moment().subtract(1, 'minute'))).to.be.true
describe 'when the build exists', ->
beforeEach (done) ->
deployment =
tag: 'v1.0.0'
repo: 'the-service'
owner: 'the-owner'
createdAt: moment('2001-01-01').toDate()
build: {
passing: false,
"travis-ci": {
passing: false,
createdAt: moment('2001-01-01').toDate()
}
}
cluster: {}
@db.deployments.insert deployment, done
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: 'password'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
it 'should have an empty body', ->
expect(@body).to.be.empty
describe 'when the database record is checked', ->
beforeEach (done) ->
query = { owner: 'the-owner', repo: 'the-service', tag: 'v1.0.0' }
@db.deployments.findOne query, (error, @record) =>
done error
it 'should have a non-passing build', ->
expect(@record.build.passing).to.be.false
it 'should have a travis-ci set to passed', ->
expect(@record.build["travis-ci"].passing).to.be.true
it 'should have a valid createdAt date for travis-ci', ->
expect(moment(@record.build["travis-ci"].createdAt).valueOf()).to.be.equal moment('2001-01-01').valueOf()
it 'should have a valid updatedAt date for travis-ci', ->
expect(moment(@record.build["travis-ci"].updatedAt).isBefore(moment())).to.be.true
expect(moment(@record.build["travis-ci"].updatedAt).isAfter(moment().subtract(1, 'minute'))).to.be.true
| 156073 | request = require 'request'
moment = require 'moment'
Database = require '../database'
Server = require '../../src/server'
describe 'Update Build Passed', ->
beforeEach (done) ->
@db = new Database
@db.drop done
beforeEach (done) ->
@logFn = sinon.spy()
serverOptions =
port: undefined,
disableLogging: true
logFn: @logFn
username: 'username'
password: '<PASSWORD>'
serverOptions.database = @db.database
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
afterEach ->
@server.destroy()
describe 'on PUT /deployments/:owner/:repo/:tag/build/:state/passed', ->
describe 'when the deployment does NOT exist', ->
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: '<PASSWORD>'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
describe 'when the deployment exists', ->
describe 'when the build does NOT exist', ->
beforeEach (done) ->
deployment =
tag: 'v1.0.0'
repo: 'the-service'
owner: 'the-owner'
createdAt: moment('2001-01-01').toDate()
build: {
passing: false
docker: {
passing: true
}
}
cluster: {}
@db.deployments.insert deployment, done
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: '<PASSWORD>'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
it 'should have an empty body', ->
expect(@body).to.be.empty
describe 'when the database record is checked', ->
beforeEach (done) ->
query = { owner: 'the-owner', repo: 'the-service', tag: 'v1.0.0' }
@db.deployments.findOne query, (error, @record) =>
done error
it 'should have a passing build', ->
expect(@record.build.passing).to.be.true
it 'should have a travis-ci set to passed', ->
expect(@record.build["travis-ci"].passing).to.be.true
it 'should have a valid created at date for travis-ci', ->
expect(moment(@record.build["travis-ci"].createdAt).isBefore(moment())).to.be.true
expect(moment(@record.build["travis-ci"].createdAt).isAfter(moment().subtract(1, 'minute'))).to.be.true
describe 'when the build exists', ->
beforeEach (done) ->
deployment =
tag: 'v1.0.0'
repo: 'the-service'
owner: 'the-owner'
createdAt: moment('2001-01-01').toDate()
build: {
passing: false,
"travis-ci": {
passing: false,
createdAt: moment('2001-01-01').toDate()
}
}
cluster: {}
@db.deployments.insert deployment, done
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: '<PASSWORD>'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
it 'should have an empty body', ->
expect(@body).to.be.empty
describe 'when the database record is checked', ->
beforeEach (done) ->
query = { owner: 'the-owner', repo: 'the-service', tag: 'v1.0.0' }
@db.deployments.findOne query, (error, @record) =>
done error
it 'should have a non-passing build', ->
expect(@record.build.passing).to.be.false
it 'should have a travis-ci set to passed', ->
expect(@record.build["travis-ci"].passing).to.be.true
it 'should have a valid createdAt date for travis-ci', ->
expect(moment(@record.build["travis-ci"].createdAt).valueOf()).to.be.equal moment('2001-01-01').valueOf()
it 'should have a valid updatedAt date for travis-ci', ->
expect(moment(@record.build["travis-ci"].updatedAt).isBefore(moment())).to.be.true
expect(moment(@record.build["travis-ci"].updatedAt).isAfter(moment().subtract(1, 'minute'))).to.be.true
| true | request = require 'request'
moment = require 'moment'
Database = require '../database'
Server = require '../../src/server'
describe 'Update Build Passed', ->
beforeEach (done) ->
@db = new Database
@db.drop done
beforeEach (done) ->
@logFn = sinon.spy()
serverOptions =
port: undefined,
disableLogging: true
logFn: @logFn
username: 'username'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
serverOptions.database = @db.database
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
afterEach ->
@server.destroy()
describe 'on PUT /deployments/:owner/:repo/:tag/build/:state/passed', ->
describe 'when the deployment does NOT exist', ->
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
describe 'when the deployment exists', ->
describe 'when the build does NOT exist', ->
beforeEach (done) ->
deployment =
tag: 'v1.0.0'
repo: 'the-service'
owner: 'the-owner'
createdAt: moment('2001-01-01').toDate()
build: {
passing: false
docker: {
passing: true
}
}
cluster: {}
@db.deployments.insert deployment, done
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
it 'should have an empty body', ->
expect(@body).to.be.empty
describe 'when the database record is checked', ->
beforeEach (done) ->
query = { owner: 'the-owner', repo: 'the-service', tag: 'v1.0.0' }
@db.deployments.findOne query, (error, @record) =>
done error
it 'should have a passing build', ->
expect(@record.build.passing).to.be.true
it 'should have a travis-ci set to passed', ->
expect(@record.build["travis-ci"].passing).to.be.true
it 'should have a valid created at date for travis-ci', ->
expect(moment(@record.build["travis-ci"].createdAt).isBefore(moment())).to.be.true
expect(moment(@record.build["travis-ci"].createdAt).isAfter(moment().subtract(1, 'minute'))).to.be.true
describe 'when the build exists', ->
beforeEach (done) ->
deployment =
tag: 'v1.0.0'
repo: 'the-service'
owner: 'the-owner'
createdAt: moment('2001-01-01').toDate()
build: {
passing: false,
"travis-ci": {
passing: false,
createdAt: moment('2001-01-01').toDate()
}
}
cluster: {}
@db.deployments.insert deployment, done
beforeEach (done) ->
options =
uri: '/deployments/the-owner/the-service/v1.0.0/build/travis-ci/passed'
baseUrl: "http://localhost:#{@serverPort}"
auth:
username: 'username'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
json: true
request.put options, (error, @response, @body) =>
done error
it 'should return a 204', ->
expect(@response.statusCode).to.equal 204
it 'should have an empty body', ->
expect(@body).to.be.empty
describe 'when the database record is checked', ->
beforeEach (done) ->
query = { owner: 'the-owner', repo: 'the-service', tag: 'v1.0.0' }
@db.deployments.findOne query, (error, @record) =>
done error
it 'should have a non-passing build', ->
expect(@record.build.passing).to.be.false
it 'should have a travis-ci set to passed', ->
expect(@record.build["travis-ci"].passing).to.be.true
it 'should have a valid createdAt date for travis-ci', ->
expect(moment(@record.build["travis-ci"].createdAt).valueOf()).to.be.equal moment('2001-01-01').valueOf()
it 'should have a valid updatedAt date for travis-ci', ->
expect(moment(@record.build["travis-ci"].updatedAt).isBefore(moment())).to.be.true
expect(moment(@record.build["travis-ci"].updatedAt).isAfter(moment().subtract(1, 'minute'))).to.be.true
|
[
{
"context": "###\n * @author \t\tAbdelhakim RAFIK\n * @version \tv1.0.1\n * @license \tMIT License\n * @",
"end": 33,
"score": 0.9998854398727417,
"start": 17,
"tag": "NAME",
"value": "Abdelhakim RAFIK"
},
{
"context": "nse \tMIT License\n * @copyright \tCopyright (c) 2021 Abdelhaki... | src/app/models/pharma-medicines.coffee | AbdelhakimRafik/Pharmalogy-API | 0 | ###
* @author Abdelhakim RAFIK
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 Abdelhakim RAFIK
* @date Mar 2021
###
{ DataTypes, Model } = require 'sequelize'
{ sequelize } = require '../../database'
###
pharmacy medicine model
###
class PharmacyMedicine extends Model
# initialize model
PharmacyMedicine.init
pharmacy:
allowNull: false
type: Sequelize.INTEGER
medicine:
allowNull: false
type: Sequelize.INTEGER
provider:
allowNull: false
type: Sequelize.INTEGER
quantity:
allowNull: false
type: Sequelize.INTEGER.UNSIGNED
defaultValue: 1
purchasePrice:
allowNull: false
type: Sequelize.FLOAT 11, 2
salePrice:
allowNull: false
type: Sequelize.FLOAT 11, 2
prodDate:
type: Sequelize.DATEONLY
expDate:
allowNull: false
type: Sequelize.DATEONLY
createdAt:
allowNull: false
type: Sequelize.DATE
updatedAt:
allowNull: false
type: Sequelize.DATE | 41956 | ###
* @author <NAME>
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 <NAME>
* @date Mar 2021
###
{ DataTypes, Model } = require 'sequelize'
{ sequelize } = require '../../database'
###
pharmacy medicine model
###
class PharmacyMedicine extends Model
# initialize model
PharmacyMedicine.init
pharmacy:
allowNull: false
type: Sequelize.INTEGER
medicine:
allowNull: false
type: Sequelize.INTEGER
provider:
allowNull: false
type: Sequelize.INTEGER
quantity:
allowNull: false
type: Sequelize.INTEGER.UNSIGNED
defaultValue: 1
purchasePrice:
allowNull: false
type: Sequelize.FLOAT 11, 2
salePrice:
allowNull: false
type: Sequelize.FLOAT 11, 2
prodDate:
type: Sequelize.DATEONLY
expDate:
allowNull: false
type: Sequelize.DATEONLY
createdAt:
allowNull: false
type: Sequelize.DATE
updatedAt:
allowNull: false
type: Sequelize.DATE | true | ###
* @author PI:NAME:<NAME>END_PI
* @version v1.0.1
* @license MIT License
* @copyright Copyright (c) 2021 PI:NAME:<NAME>END_PI
* @date Mar 2021
###
{ DataTypes, Model } = require 'sequelize'
{ sequelize } = require '../../database'
###
pharmacy medicine model
###
class PharmacyMedicine extends Model
# initialize model
PharmacyMedicine.init
pharmacy:
allowNull: false
type: Sequelize.INTEGER
medicine:
allowNull: false
type: Sequelize.INTEGER
provider:
allowNull: false
type: Sequelize.INTEGER
quantity:
allowNull: false
type: Sequelize.INTEGER.UNSIGNED
defaultValue: 1
purchasePrice:
allowNull: false
type: Sequelize.FLOAT 11, 2
salePrice:
allowNull: false
type: Sequelize.FLOAT 11, 2
prodDate:
type: Sequelize.DATEONLY
expDate:
allowNull: false
type: Sequelize.DATEONLY
createdAt:
allowNull: false
type: Sequelize.DATE
updatedAt:
allowNull: false
type: Sequelize.DATE |
[
{
"context": "bles already declared in the outer scope\n# @author Ilya Volodin\n###\n\n'use strict'\n\n#-----------------------------",
"end": 115,
"score": 0.9998103976249695,
"start": 103,
"tag": "NAME",
"value": "Ilya Volodin"
}
] | src/rules/no-shadow.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Rule to flag on declaring variables already declared in the outer scope
# @author Ilya Volodin
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'disallow variable declarations from shadowing variables declared in the outer scope'
category: 'Variables'
recommended: no
url: 'https://eslint.org/docs/rules/no-shadow'
schema: [
type: 'object'
properties:
builtinGlobals: type: 'boolean'
hoist: enum: ['all', 'functions', 'never']
allow:
type: 'array'
items:
type: 'string'
additionalProperties: no
]
create: (context) ->
options =
builtinGlobals: Boolean context.options[0]?.builtinGlobals
hoist: context.options[0]?.hoist or 'functions'
allow: context.options[0]?.allow or []
###*
# Check if variable name is allowed.
#
# @param {ASTNode} variable The variable to check.
# @returns {boolean} Whether or not the variable name is allowed.
###
isAllowed = (variable) -> options.allow.indexOf(variable.name) isnt -1
isDoIifeParam = (variable) ->
identifier = variable.identifiers?[0]
identifier?.type is 'Identifier' and
identifier.parent.type is 'FunctionExpression' and
identifier in identifier.parent.params and
identifier.parent.parent.type is 'UnaryExpression' and
identifier.parent.parent.operator is 'do'
###*
# Checks if a variable of the class name in the class scope of ClassDeclaration.
#
# ClassDeclaration creates two variables of its name into its outer scope and its class scope.
# So we should ignore the variable in the class scope.
#
# @param {Object} variable The variable to check.
# @returns {boolean} Whether or not the variable of the class name in the class scope of ClassDeclaration.
###
isDuplicatedClassNameVariable = (variable) ->
{block} = variable.scope
return yes if (
block.type in ['ClassDeclaration', 'ClassExpression'] and
block.id is variable.identifiers[0]
)
return yes if (
block.id?.type is 'Identifier' and
block.parent.type is 'AssignmentExpression' and
block.parent.left.type is 'Identifier' and
block.id.name is block.parent.left.name
)
no
###*
# Checks if a variable is inside the initializer of scopeVar.
#
# To avoid reporting at declarations such as `var a = function a() {};`.
# But it should report `var a = function(a) {};` or `var a = function() { function a() {} };`.
#
# @param {Object} variable The variable to check.
# @param {Object} scopeVar The scope variable to look for.
# @returns {boolean} Whether or not the variable is inside initializer of scopeVar.
###
isOnInitializer = (variable, scopeVar) ->
outerScope = scopeVar.scope
outerDef = scopeVar.defs[0]
outer = outerDef?.parent and outerDef.parent.range
innerScope = variable.scope
innerDef = variable.defs[0]
inner = innerDef?.name.range
outer and
inner and
outer[0] < inner[0] and
inner[1] < outer[1] and
((innerDef.type is 'FunctionName' and
innerDef.node.type is 'FunctionExpression') or
innerDef.node.type is 'ClassExpression') and
outerScope is innerScope.upper
###*
# Get a range of a variable's identifier node.
# @param {Object} variable The variable to get.
# @returns {Array|undefined} The range of the variable's identifier node.
###
getNameRange = (variable) ->
def = variable.defs[0]
def?.name.range
###*
# Checks if a variable is in TDZ of scopeVar.
# @param {Object} variable The variable to check.
# @param {Object} scopeVar The variable of TDZ.
# @returns {boolean} Whether or not the variable is in TDZ of scopeVar.
###
isInTdz = (variable, scopeVar) ->
outerDef = scopeVar.defs[0]
inner = getNameRange variable
outer = getNameRange scopeVar
inner and
outer and
inner[1] < outer[0] and
# Excepts FunctionDeclaration if is {"hoist":"function"}.
not (
options.hoist is 'functions' and
outerDef and
(outerDef.node.type is 'FunctionDeclaration' or
(outerDef.node.parent.type is 'AssignmentExpression' and
outerDef.node.parent.right.type is 'FunctionExpression'))
)
###*
# Checks the current context for shadowed variables.
# @param {Scope} scope - Fixme
# @returns {void}
###
checkForShadows = (scope) ->
{variables} = scope
for variable in variables
# Skips "arguments" or variables of a class name in the class scope of ClassDeclaration.
continue if (
variable.identifiers.length is 0 or
isDuplicatedClassNameVariable(variable) or
isDoIifeParam(variable) or
isAllowed variable
)
# Gets shadowed variable.
shadowed = astUtils.getVariableByName scope.upper, variable.name
if (
shadowed and
(shadowed.identifiers.length > 0 or
(options.builtinGlobals and 'writeable' of shadowed)) and
not isOnInitializer(variable, shadowed) and
not (options.hoist isnt 'all' and isInTdz variable, shadowed)
)
context.report
node: variable.identifiers[0]
message: "'{{name}}' is already declared in the upper scope."
data: variable
'Program:exit': ->
globalScope = context.getScope()
stack = globalScope.childScopes.slice()
while stack.length
scope = stack.pop()
stack.push ...scope.childScopes
checkForShadows scope
| 11649 | ###*
# @fileoverview Rule to flag on declaring variables already declared in the outer scope
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'disallow variable declarations from shadowing variables declared in the outer scope'
category: 'Variables'
recommended: no
url: 'https://eslint.org/docs/rules/no-shadow'
schema: [
type: 'object'
properties:
builtinGlobals: type: 'boolean'
hoist: enum: ['all', 'functions', 'never']
allow:
type: 'array'
items:
type: 'string'
additionalProperties: no
]
create: (context) ->
options =
builtinGlobals: Boolean context.options[0]?.builtinGlobals
hoist: context.options[0]?.hoist or 'functions'
allow: context.options[0]?.allow or []
###*
# Check if variable name is allowed.
#
# @param {ASTNode} variable The variable to check.
# @returns {boolean} Whether or not the variable name is allowed.
###
isAllowed = (variable) -> options.allow.indexOf(variable.name) isnt -1
isDoIifeParam = (variable) ->
identifier = variable.identifiers?[0]
identifier?.type is 'Identifier' and
identifier.parent.type is 'FunctionExpression' and
identifier in identifier.parent.params and
identifier.parent.parent.type is 'UnaryExpression' and
identifier.parent.parent.operator is 'do'
###*
# Checks if a variable of the class name in the class scope of ClassDeclaration.
#
# ClassDeclaration creates two variables of its name into its outer scope and its class scope.
# So we should ignore the variable in the class scope.
#
# @param {Object} variable The variable to check.
# @returns {boolean} Whether or not the variable of the class name in the class scope of ClassDeclaration.
###
isDuplicatedClassNameVariable = (variable) ->
{block} = variable.scope
return yes if (
block.type in ['ClassDeclaration', 'ClassExpression'] and
block.id is variable.identifiers[0]
)
return yes if (
block.id?.type is 'Identifier' and
block.parent.type is 'AssignmentExpression' and
block.parent.left.type is 'Identifier' and
block.id.name is block.parent.left.name
)
no
###*
# Checks if a variable is inside the initializer of scopeVar.
#
# To avoid reporting at declarations such as `var a = function a() {};`.
# But it should report `var a = function(a) {};` or `var a = function() { function a() {} };`.
#
# @param {Object} variable The variable to check.
# @param {Object} scopeVar The scope variable to look for.
# @returns {boolean} Whether or not the variable is inside initializer of scopeVar.
###
isOnInitializer = (variable, scopeVar) ->
outerScope = scopeVar.scope
outerDef = scopeVar.defs[0]
outer = outerDef?.parent and outerDef.parent.range
innerScope = variable.scope
innerDef = variable.defs[0]
inner = innerDef?.name.range
outer and
inner and
outer[0] < inner[0] and
inner[1] < outer[1] and
((innerDef.type is 'FunctionName' and
innerDef.node.type is 'FunctionExpression') or
innerDef.node.type is 'ClassExpression') and
outerScope is innerScope.upper
###*
# Get a range of a variable's identifier node.
# @param {Object} variable The variable to get.
# @returns {Array|undefined} The range of the variable's identifier node.
###
getNameRange = (variable) ->
def = variable.defs[0]
def?.name.range
###*
# Checks if a variable is in TDZ of scopeVar.
# @param {Object} variable The variable to check.
# @param {Object} scopeVar The variable of TDZ.
# @returns {boolean} Whether or not the variable is in TDZ of scopeVar.
###
isInTdz = (variable, scopeVar) ->
outerDef = scopeVar.defs[0]
inner = getNameRange variable
outer = getNameRange scopeVar
inner and
outer and
inner[1] < outer[0] and
# Excepts FunctionDeclaration if is {"hoist":"function"}.
not (
options.hoist is 'functions' and
outerDef and
(outerDef.node.type is 'FunctionDeclaration' or
(outerDef.node.parent.type is 'AssignmentExpression' and
outerDef.node.parent.right.type is 'FunctionExpression'))
)
###*
# Checks the current context for shadowed variables.
# @param {Scope} scope - Fixme
# @returns {void}
###
checkForShadows = (scope) ->
{variables} = scope
for variable in variables
# Skips "arguments" or variables of a class name in the class scope of ClassDeclaration.
continue if (
variable.identifiers.length is 0 or
isDuplicatedClassNameVariable(variable) or
isDoIifeParam(variable) or
isAllowed variable
)
# Gets shadowed variable.
shadowed = astUtils.getVariableByName scope.upper, variable.name
if (
shadowed and
(shadowed.identifiers.length > 0 or
(options.builtinGlobals and 'writeable' of shadowed)) and
not isOnInitializer(variable, shadowed) and
not (options.hoist isnt 'all' and isInTdz variable, shadowed)
)
context.report
node: variable.identifiers[0]
message: "'{{name}}' is already declared in the upper scope."
data: variable
'Program:exit': ->
globalScope = context.getScope()
stack = globalScope.childScopes.slice()
while stack.length
scope = stack.pop()
stack.push ...scope.childScopes
checkForShadows scope
| true | ###*
# @fileoverview Rule to flag on declaring variables already declared in the outer scope
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description:
'disallow variable declarations from shadowing variables declared in the outer scope'
category: 'Variables'
recommended: no
url: 'https://eslint.org/docs/rules/no-shadow'
schema: [
type: 'object'
properties:
builtinGlobals: type: 'boolean'
hoist: enum: ['all', 'functions', 'never']
allow:
type: 'array'
items:
type: 'string'
additionalProperties: no
]
create: (context) ->
options =
builtinGlobals: Boolean context.options[0]?.builtinGlobals
hoist: context.options[0]?.hoist or 'functions'
allow: context.options[0]?.allow or []
###*
# Check if variable name is allowed.
#
# @param {ASTNode} variable The variable to check.
# @returns {boolean} Whether or not the variable name is allowed.
###
isAllowed = (variable) -> options.allow.indexOf(variable.name) isnt -1
isDoIifeParam = (variable) ->
identifier = variable.identifiers?[0]
identifier?.type is 'Identifier' and
identifier.parent.type is 'FunctionExpression' and
identifier in identifier.parent.params and
identifier.parent.parent.type is 'UnaryExpression' and
identifier.parent.parent.operator is 'do'
###*
# Checks if a variable of the class name in the class scope of ClassDeclaration.
#
# ClassDeclaration creates two variables of its name into its outer scope and its class scope.
# So we should ignore the variable in the class scope.
#
# @param {Object} variable The variable to check.
# @returns {boolean} Whether or not the variable of the class name in the class scope of ClassDeclaration.
###
isDuplicatedClassNameVariable = (variable) ->
{block} = variable.scope
return yes if (
block.type in ['ClassDeclaration', 'ClassExpression'] and
block.id is variable.identifiers[0]
)
return yes if (
block.id?.type is 'Identifier' and
block.parent.type is 'AssignmentExpression' and
block.parent.left.type is 'Identifier' and
block.id.name is block.parent.left.name
)
no
###*
# Checks if a variable is inside the initializer of scopeVar.
#
# To avoid reporting at declarations such as `var a = function a() {};`.
# But it should report `var a = function(a) {};` or `var a = function() { function a() {} };`.
#
# @param {Object} variable The variable to check.
# @param {Object} scopeVar The scope variable to look for.
# @returns {boolean} Whether or not the variable is inside initializer of scopeVar.
###
isOnInitializer = (variable, scopeVar) ->
outerScope = scopeVar.scope
outerDef = scopeVar.defs[0]
outer = outerDef?.parent and outerDef.parent.range
innerScope = variable.scope
innerDef = variable.defs[0]
inner = innerDef?.name.range
outer and
inner and
outer[0] < inner[0] and
inner[1] < outer[1] and
((innerDef.type is 'FunctionName' and
innerDef.node.type is 'FunctionExpression') or
innerDef.node.type is 'ClassExpression') and
outerScope is innerScope.upper
###*
# Get a range of a variable's identifier node.
# @param {Object} variable The variable to get.
# @returns {Array|undefined} The range of the variable's identifier node.
###
getNameRange = (variable) ->
def = variable.defs[0]
def?.name.range
###*
# Checks if a variable is in TDZ of scopeVar.
# @param {Object} variable The variable to check.
# @param {Object} scopeVar The variable of TDZ.
# @returns {boolean} Whether or not the variable is in TDZ of scopeVar.
###
isInTdz = (variable, scopeVar) ->
outerDef = scopeVar.defs[0]
inner = getNameRange variable
outer = getNameRange scopeVar
inner and
outer and
inner[1] < outer[0] and
# Excepts FunctionDeclaration if is {"hoist":"function"}.
not (
options.hoist is 'functions' and
outerDef and
(outerDef.node.type is 'FunctionDeclaration' or
(outerDef.node.parent.type is 'AssignmentExpression' and
outerDef.node.parent.right.type is 'FunctionExpression'))
)
###*
# Checks the current context for shadowed variables.
# @param {Scope} scope - Fixme
# @returns {void}
###
checkForShadows = (scope) ->
{variables} = scope
for variable in variables
# Skips "arguments" or variables of a class name in the class scope of ClassDeclaration.
continue if (
variable.identifiers.length is 0 or
isDuplicatedClassNameVariable(variable) or
isDoIifeParam(variable) or
isAllowed variable
)
# Gets shadowed variable.
shadowed = astUtils.getVariableByName scope.upper, variable.name
if (
shadowed and
(shadowed.identifiers.length > 0 or
(options.builtinGlobals and 'writeable' of shadowed)) and
not isOnInitializer(variable, shadowed) and
not (options.hoist isnt 'all' and isInTdz variable, shadowed)
)
context.report
node: variable.identifiers[0]
message: "'{{name}}' is already declared in the upper scope."
data: variable
'Program:exit': ->
globalScope = context.getScope()
stack = globalScope.childScopes.slice()
while stack.length
scope = stack.pop()
stack.push ...scope.childScopes
checkForShadows scope
|
[
{
"context": "###\nCopyright 2016 Balena\n\nLicensed under the Apache License, Version 2.0 (",
"end": 25,
"score": 0.824062705039978,
"start": 19,
"tag": "NAME",
"value": "Balena"
},
{
"context": "models.billing.updateBillingInfo({ token_id: 'xxxxxxx' }).then(function(billingInfo) {\... | lib/models/billing.coffee | josecoelho/balena-sdk | 0 | ###
Copyright 2016 Balena
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
getBillingModel = (deps, opts) ->
{ request } = deps
{ apiUrl, isBrowser } = opts
###*
# @summary Get the user's billing account
# @name getAccount
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing account
# @returns {Promise}
#
# @example
# balena.models.billing.getAccount().then(function(billingAccount) {
# console.log(billingAccount);
# });
#
# @example
# balena.models.billing.getAccount(function(error, billingAccount) {
# if (error) throw error;
# console.log(billingAccount);
# });
###
exports.getAccount = (callback) ->
request.send
method: 'GET'
url: '/user/billing/account'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Get the current billing plan
# @name getPlan
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing plan
# @returns {Promise}
#
# @example
# balena.models.billing.getPlan().then(function(billingPlan) {
# console.log(billingPlan);
# });
#
# @example
# balena.models.billing.getPlan(function(error, billingPlan) {
# if (error) throw error;
# console.log(billingPlan);
# });
###
exports.getPlan = (callback) ->
request.send
method: 'GET'
url: '/user/billing/plan'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Get the current billing information
# @name getBillingInfo
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing information
# @returns {Promise}
#
# @example
# balena.models.billing.getBillingInfo().then(function(billingInfo) {
# console.log(billingInfo);
# });
#
# @example
# balena.models.billing.getBillingInfo(function(error, billingInfo) {
# if (error) throw error;
# console.log(billingInfo);
# });
###
exports.getBillingInfo = (callback) ->
request.send
method: 'GET'
url: '/user/billing/info'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Update the current billing information
# @name updateBillingInfo
# @public
# @function
# @memberof balena.models.billing
#
# @param {Object} billingInfo - an object containing a billing info token_id
# @param {String} billingInfo.token_id - the token id generated for the billing info form
# @param {(String|undefined)} [billingInfo.'g-recaptcha-response'] - the captcha response
# @fulfil {Object} - billing information
# @returns {Promise}
#
# @example
# balena.models.billing.updateBillingInfo({ token_id: 'xxxxxxx' }).then(function(billingInfo) {
# console.log(billingInfo);
# });
#
# @example
# balena.models.billing.updateBillingInfo({ token_id: 'xxxxxxx' }, function(error, billingInfo) {
# if (error) throw error;
# console.log(billingInfo);
# });
###
exports.updateBillingInfo = (billingInfo, callback) ->
request.send
method: 'POST'
url: '/user/billing/info'
baseUrl: apiUrl
body: billingInfo
.get('body')
.asCallback(callback)
###*
# @summary Get the available invoices
# @name getInvoices
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - invoices
# @returns {Promise}
#
# @example
# balena.models.billing.getInvoices().then(function(invoices) {
# console.log(invoices);
# });
#
# @example
# balena.models.billing.getInvoices(function(error, invoices) {
# if (error) throw error;
# console.log(invoices);
# });
###
exports.getInvoices = (callback) ->
request.send
method: 'GET'
url: '/user/billing/invoices'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Download a specific invoice
# @name downloadInvoice
# @public
# @function
# @memberof balena.models.billing
#
# @param {String} - an invoice number
# @fulfil {Blob|ReadableStream} - blob on the browser, download stream on node
# @returns {Promise}
#
# @example
# # Browser
# balena.models.billing.downloadInvoice('0000').then(function(blob) {
# console.log(blob);
# });
# # Node
# balena.models.billing.downloadInvoice('0000').then(function(stream) {
# stream.pipe(fs.createWriteStream('foo/bar/invoice-0000.pdf'));
# });
###
exports.downloadInvoice = (invoiceNumber, callback) ->
url = "/user/billing/invoices/#{invoiceNumber}/download"
if not isBrowser
return request.stream
method: 'GET'
url: url
baseUrl: apiUrl
.asCallback(callback)
request.send
method: 'GET'
url: url
baseUrl: apiUrl
responseFormat: 'blob'
.get('body')
.asCallback(callback)
return exports
module.exports = getBillingModel
| 135488 | ###
Copyright 2016 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
getBillingModel = (deps, opts) ->
{ request } = deps
{ apiUrl, isBrowser } = opts
###*
# @summary Get the user's billing account
# @name getAccount
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing account
# @returns {Promise}
#
# @example
# balena.models.billing.getAccount().then(function(billingAccount) {
# console.log(billingAccount);
# });
#
# @example
# balena.models.billing.getAccount(function(error, billingAccount) {
# if (error) throw error;
# console.log(billingAccount);
# });
###
exports.getAccount = (callback) ->
request.send
method: 'GET'
url: '/user/billing/account'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Get the current billing plan
# @name getPlan
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing plan
# @returns {Promise}
#
# @example
# balena.models.billing.getPlan().then(function(billingPlan) {
# console.log(billingPlan);
# });
#
# @example
# balena.models.billing.getPlan(function(error, billingPlan) {
# if (error) throw error;
# console.log(billingPlan);
# });
###
exports.getPlan = (callback) ->
request.send
method: 'GET'
url: '/user/billing/plan'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Get the current billing information
# @name getBillingInfo
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing information
# @returns {Promise}
#
# @example
# balena.models.billing.getBillingInfo().then(function(billingInfo) {
# console.log(billingInfo);
# });
#
# @example
# balena.models.billing.getBillingInfo(function(error, billingInfo) {
# if (error) throw error;
# console.log(billingInfo);
# });
###
exports.getBillingInfo = (callback) ->
request.send
method: 'GET'
url: '/user/billing/info'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Update the current billing information
# @name updateBillingInfo
# @public
# @function
# @memberof balena.models.billing
#
# @param {Object} billingInfo - an object containing a billing info token_id
# @param {String} billingInfo.token_id - the token id generated for the billing info form
# @param {(String|undefined)} [billingInfo.'g-recaptcha-response'] - the captcha response
# @fulfil {Object} - billing information
# @returns {Promise}
#
# @example
# balena.models.billing.updateBillingInfo({ token_id: 'xxxx<KEY>' }).then(function(billingInfo) {
# console.log(billingInfo);
# });
#
# @example
# balena.models.billing.updateBillingInfo({ token_id: '<KEY>' }, function(error, billingInfo) {
# if (error) throw error;
# console.log(billingInfo);
# });
###
exports.updateBillingInfo = (billingInfo, callback) ->
request.send
method: 'POST'
url: '/user/billing/info'
baseUrl: apiUrl
body: billingInfo
.get('body')
.asCallback(callback)
###*
# @summary Get the available invoices
# @name getInvoices
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - invoices
# @returns {Promise}
#
# @example
# balena.models.billing.getInvoices().then(function(invoices) {
# console.log(invoices);
# });
#
# @example
# balena.models.billing.getInvoices(function(error, invoices) {
# if (error) throw error;
# console.log(invoices);
# });
###
exports.getInvoices = (callback) ->
request.send
method: 'GET'
url: '/user/billing/invoices'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Download a specific invoice
# @name downloadInvoice
# @public
# @function
# @memberof balena.models.billing
#
# @param {String} - an invoice number
# @fulfil {Blob|ReadableStream} - blob on the browser, download stream on node
# @returns {Promise}
#
# @example
# # Browser
# balena.models.billing.downloadInvoice('0000').then(function(blob) {
# console.log(blob);
# });
# # Node
# balena.models.billing.downloadInvoice('0000').then(function(stream) {
# stream.pipe(fs.createWriteStream('foo/bar/invoice-0000.pdf'));
# });
###
exports.downloadInvoice = (invoiceNumber, callback) ->
url = "/user/billing/invoices/#{invoiceNumber}/download"
if not isBrowser
return request.stream
method: 'GET'
url: url
baseUrl: apiUrl
.asCallback(callback)
request.send
method: 'GET'
url: url
baseUrl: apiUrl
responseFormat: 'blob'
.get('body')
.asCallback(callback)
return exports
module.exports = getBillingModel
| true | ###
Copyright 2016 PI:NAME:<NAME>END_PI
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
getBillingModel = (deps, opts) ->
{ request } = deps
{ apiUrl, isBrowser } = opts
###*
# @summary Get the user's billing account
# @name getAccount
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing account
# @returns {Promise}
#
# @example
# balena.models.billing.getAccount().then(function(billingAccount) {
# console.log(billingAccount);
# });
#
# @example
# balena.models.billing.getAccount(function(error, billingAccount) {
# if (error) throw error;
# console.log(billingAccount);
# });
###
exports.getAccount = (callback) ->
request.send
method: 'GET'
url: '/user/billing/account'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Get the current billing plan
# @name getPlan
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing plan
# @returns {Promise}
#
# @example
# balena.models.billing.getPlan().then(function(billingPlan) {
# console.log(billingPlan);
# });
#
# @example
# balena.models.billing.getPlan(function(error, billingPlan) {
# if (error) throw error;
# console.log(billingPlan);
# });
###
exports.getPlan = (callback) ->
request.send
method: 'GET'
url: '/user/billing/plan'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Get the current billing information
# @name getBillingInfo
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - billing information
# @returns {Promise}
#
# @example
# balena.models.billing.getBillingInfo().then(function(billingInfo) {
# console.log(billingInfo);
# });
#
# @example
# balena.models.billing.getBillingInfo(function(error, billingInfo) {
# if (error) throw error;
# console.log(billingInfo);
# });
###
exports.getBillingInfo = (callback) ->
request.send
method: 'GET'
url: '/user/billing/info'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Update the current billing information
# @name updateBillingInfo
# @public
# @function
# @memberof balena.models.billing
#
# @param {Object} billingInfo - an object containing a billing info token_id
# @param {String} billingInfo.token_id - the token id generated for the billing info form
# @param {(String|undefined)} [billingInfo.'g-recaptcha-response'] - the captcha response
# @fulfil {Object} - billing information
# @returns {Promise}
#
# @example
# balena.models.billing.updateBillingInfo({ token_id: 'xxxxPI:KEY:<KEY>END_PI' }).then(function(billingInfo) {
# console.log(billingInfo);
# });
#
# @example
# balena.models.billing.updateBillingInfo({ token_id: 'PI:KEY:<KEY>END_PI' }, function(error, billingInfo) {
# if (error) throw error;
# console.log(billingInfo);
# });
###
exports.updateBillingInfo = (billingInfo, callback) ->
request.send
method: 'POST'
url: '/user/billing/info'
baseUrl: apiUrl
body: billingInfo
.get('body')
.asCallback(callback)
###*
# @summary Get the available invoices
# @name getInvoices
# @public
# @function
# @memberof balena.models.billing
#
# @fulfil {Object} - invoices
# @returns {Promise}
#
# @example
# balena.models.billing.getInvoices().then(function(invoices) {
# console.log(invoices);
# });
#
# @example
# balena.models.billing.getInvoices(function(error, invoices) {
# if (error) throw error;
# console.log(invoices);
# });
###
exports.getInvoices = (callback) ->
request.send
method: 'GET'
url: '/user/billing/invoices'
baseUrl: apiUrl
.get('body')
.asCallback(callback)
###*
# @summary Download a specific invoice
# @name downloadInvoice
# @public
# @function
# @memberof balena.models.billing
#
# @param {String} - an invoice number
# @fulfil {Blob|ReadableStream} - blob on the browser, download stream on node
# @returns {Promise}
#
# @example
# # Browser
# balena.models.billing.downloadInvoice('0000').then(function(blob) {
# console.log(blob);
# });
# # Node
# balena.models.billing.downloadInvoice('0000').then(function(stream) {
# stream.pipe(fs.createWriteStream('foo/bar/invoice-0000.pdf'));
# });
###
exports.downloadInvoice = (invoiceNumber, callback) ->
url = "/user/billing/invoices/#{invoiceNumber}/download"
if not isBrowser
return request.stream
method: 'GET'
url: url
baseUrl: apiUrl
.asCallback(callback)
request.send
method: 'GET'
url: url
baseUrl: apiUrl
responseFormat: 'blob'
.get('body')
.asCallback(callback)
return exports
module.exports = getBillingModel
|
[
{
"context": "#\n# MConn Framework\n# https://www.github.com/livespotting/mconn\n#\n# @copyright 2015 Livespotting Media GmbH",
"end": 57,
"score": 0.992474377155304,
"start": 45,
"tag": "USERNAME",
"value": "livespotting"
},
{
"context": "tting Media GmbH\n# @license Apache-2.0\n#\n... | test/ModuleTest.coffee | livespotting/mconn-helloworld | 0 | #
# MConn Framework
# https://www.github.com/livespotting/mconn
#
# @copyright 2015 Livespotting Media GmbH
# @license Apache-2.0
#
# @author Christoph Johannsdotter [c.johannsdotter@livespottingmedia.com]
# @author Jan Stabenow [j.stabenow@livespottingmedia.com]
#
bodyParser = require('body-parser')
chai = require("chai")
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
chai.use sinonChai
expect = chai.expect
express = require("express")
http = require('http')
Q = require("q")
request = require("request")
pathtomconnstartjs = if process.env.MCONN_TEST_PATH then process.env.MCONN_TEST_PATH + "/bin/start.js" else "/mconn/bin/start.js"
pathtomconn = if process.env.MCONN_TEST_PATH then process.env.MCONN_TEST_PATH else "/mconn"
modulefolderroot = if process.env.MCONN_TEST_MODULE_PATH then process.env.MCONN_TEST_MODULE_PATH else "/mconn/modules"
modulefoldername = if process.env.MCONN_TEST_MODULE_START then process.env.MCONN_TEST_MODULE_START else "HelloWorld"
moduleclassname = if process.env.MCONN_TEST_MODULE_CLASSNAME then process.env.MCONN_TEST_MODULE_CLASSNAME else "HelloWorld"
Module = require(pathtomconn + "/src/application/classes/Module")
routes = require(pathtomconn + "/src/application/webserver/routes/index")
MainApp = require(pathtomconn + "/src/application/App")
Manager = require(pathtomconn + "/test/utils/ProcessManager")
createMarathonRequestdata = require(pathtomconn + "/test/utils/Helper").createMarathonRequestdata
createPresetRequestdata = require(pathtomconn + "/test/utils/Helper").createPresetRequestdata
webserverIsStarted = require(pathtomconn + "/test/utils/Helper").webserverIsStarted
webserverIsKilled = require(pathtomconn + "/test/utils/Helper").webserverIsKilled
isEmpty = (obj) ->
for k of obj
if obj.hasOwnProperty(k)
return false
true
check = (done, f) ->
try
f()
done()
catch e
done(e)
environment = (processName, port) ->
name: processName
MCONN_HOST: "127.0.0.1"
MCONN_PORT: port
MCONN_CREDENTIALS: ""
MCONN_PATH: pathtomconn
MCONN_MODULE_PATH: modulefolderroot
MCONN_MODULE_START: modulefoldername
MCONN_MOULE_HELLOWORLD_DELAY: 250
MCONN_ZK_HOSTS: if process.env.ALIAS_PORT_2181_TCP_ADDR? then process.env.ALIAS_PORT_2181_TCP_ADDR + ":2181" else "127.0.0.1:2181"
MCONN_ZK_PATH: "/mconn-dev-module-helloworld"
mconn1 = null
describe "Module Tests", ->
describe "Unittests", ->
describe "pause", ->
it "should wait for activeTask if activeTask has not been finished", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 400
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).to.be.at.least(2)
expect(stubPause).not.to.have.been.called
stubPause.restore()
it "should pause if activeTask has been finished", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 400
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(true)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).equal(1)
expect(stubPause).to.have.been.called
stubPause.restore()
it "should pause if activeTask has not been finished, but gets finished after a while", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 200
anyModule.timeout = 1500
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(500)
.then ->
expect(stubPause).not.to.have.been.called
stubCheckTaskHasFinishedState.returns(true) #task has noew been finished
.delay(500).then ->
check done, ->
expect(stubPause).to.have.been.called
stubPause.restore()
it "should clear checkInterval if task has been finished and queue has been paused", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 300
anyModule.timeout = 1500
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(true)
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).equal(1)
it "should clear checkInterval if timeout has been reached", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 200
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
anyModule.pause()
Q.delay(1500).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).to.be.at.most(5)
describe "Integrationtests", ->
before (done) ->
this.timeout(60000)
mconn1 = new Manager pathtomconnstartjs, environment("MCONN_NODE_1", 1240)
webserverIsStarted(1240).then ->
Q.delay(2000)
.then ->
done()
describe "check if the #{moduleclassname} Module has been loaded", ->
describe "GET /v1/module/list", ->
this.timeout(5000)
it "should return array of loaded modules including #{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/list", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname].name).equal(moduleclassname)
describe "GET /v1/module/list/#{moduleclassname}", ->
it "should return the data of the #{moduleclassname}Module", (done) ->
request.get "http://127.0.0.1:1240/v1/module/list/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.name).equal(moduleclassname)
describe "Presets API", ->
describe "CRUD", ->
describe "- create - ", ->
it "should respond status 'ok' on POST /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond message 'AppId for module #{moduleclassname} created: /app1' on POST /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app1", moduleclassname, "enabled"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} created: /app1")
describe "- read - ", ->
it "should respond with Object including preset '/app' for module '#{moduleclassname}' on GET /v1/module/preset", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].appId).equal('/app')
it "should respond with Object including preset '/app' on GET /v1/module/preset/#{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body[0].appId).equal('/app')
describe "- update - ", ->
it "should respond with status ='ok' on PUT /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "PUT"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond message 'AppId for module #{moduleclassname} modified: /app' on PUT /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "PUT"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} modified: /app")
# PLEASE NOTE: deletion rely on successfull creations in tests above!
describe "- delete - ", ->
it "should respond with 'ok' on DELETE /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond with message 'AppId for module #{moduleclassname} deleted: /app1' on DELETE v1/module/preset/#{moduleclassname}", (done) ->
request createPresetRequestdata(1240, "/app1", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} deleted: /app1")
it "should return empty presetlist on GET v1/module/preset/#{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(0)
describe "check recovering presets from zookeeper after leader-change (create 3 presets for testing)", ->
#before: create a preset
before (done) ->
this.timeout(60000)
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart1", moduleclassname, "enabled"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart2", moduleclassname, "enabled"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart3", moduleclassname, "enabled"), (error, req, body) ->
mconn1.kill()
Q.delay(5000) # zk session timeout
.then ->
mconn1 = new Manager pathtomconnstartjs, environment("MCONN_NODE_1", 1240)
webserverIsStarted(1240)
.then ->
Q.delay(2000).then ->
done()
it "should recover 3 presets after restart", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(3)
for i in [1..3]
do (i) ->
it "should recover preset '/app-should-be-loaded-after-restart#{i}' after restart", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
found = false
for preset in body
if preset.appId is "/app-should-be-loaded-after-restart#{i}"
found = true
expect(found).equal(true)
after (done) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart1", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart2", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart3", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
Q.delay(1500).then ->
done()
describe "check if tasks are only beeing processed if there is an assigned preset", ->
describe "POST a marathon-task to /v1/queue", ->
it "should respond with status/message 'ok'", (done) ->
request createMarathonRequestdata(1240, "/app-without-a-preset", "task_app_1234"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('task_app_1234_TASK_RUNNING')
it "should return an empty queue after 300ms (working time is 250ms) on GET /v1/queue", (done) ->
Q.delay(1000).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true, "body is " + JSON.stringify(body))
describe "preset is disabled", ->
# add disabled preset
before (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "disabled"), (error, req, body) ->
done()
it "should write the status 'disabled' to zk-node", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].status).equal('disabled')
it "should not process any jobs and quickly remove them from queue (after 300ms, normally they would last 750ms)", (done) ->
request createMarathonRequestdata(1240, "/anotherapp", "app_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3_1"), (error, req, body) ->
Q.delay(600).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(0)
describe "preset gets enabled", ->
before (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "enabled", "PUT"), (error, req, body) ->
done()
it "should write the status 'enabled' to zk-node", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].status).equal('enabled')
it "should process jobs now", (done) ->
request createMarathonRequestdata(1240, "/anotherapp", "app1a_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2a_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3a_1"), (error, req, body) ->
Q.delay(600).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).not.equal(0)
#remove preset to avoid sideeffects on other tests
after (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
done()
describe "POST a marathon-task to /v1/queue", ->
before (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled"), (error, req, body) ->
done()
it "should return taskId 'app_1_TASK_RUNNING'", (done) ->
request createMarathonRequestdata(1240, "/app", "app_1"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('app_1_TASK_RUNNING')
describe "GET /v1/queue", ->
it "should return an empty queue", (done) ->
Q.delay(1850).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true, "body is " + JSON.stringify(body))
describe "check processing tasks", ->
describe "POST one task for preset /app to /v1/queue", ->
it "should return with taskId 'app_1_TASK_KILLED'", (done) ->
request createMarathonRequestdata(1240, "/app", "app_1", "TASK_KILLED"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('app_1_TASK_KILLED')
describe "GET /v1/module/queue/list/#{moduleclassname}", ->
responseBody = null
Q.delay(250).then ->
request createMarathonRequestdata(1240, "/app", "app_2", "TASK_RUNNING"), (error, req, body) ->
it "should return a queue with 1 task queued", (done) ->
request.get "http://127.0.0.1:1240/v1/module/queue/list/#{moduleclassname}", {json: true}, (error, req, body) ->
responseBody = body
check done, ->
expect(responseBody.length).equal(1)
describe "GET /v1/queue", ->
responseBody = null
Q.delay(250).then ->
request createMarathonRequestdata(1240, "/app", "app_2", "TASK_KILLED"), (error, req, body) ->
it "should return a queue with 1 task queued", (done) ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
responseBody = body
check done, ->
expect(responseBody.length).equal(1)
describe "the queued task", ->
Q.delay(260).then ->
request createMarathonRequestdata(1240, "/app", "app" + new Date().getTime(), "TASK_RUNNING"), (error, req, body) ->
it "should have one module registered", ->
expect(responseBody[0].moduleState.length).equal(1)
it "should have registered the module '#{moduleclassname}'", ->
expect(responseBody[0].moduleState[0].name).equal(moduleclassname)
it "should have the state 'started' for module '#{moduleclassname}'", ->
expect(responseBody[0].moduleState[0].state).equal('started')
it "should be finished after 250ms (that's the static workertime for this test)", (done) ->
Q.delay(260).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true)
describe "POST multiple (five) tasks", ->
timeperjob = 250
before (done) ->
request createMarathonRequestdata(1240, "/app", "app1_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app2_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app3_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app4_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app5_1"), (error, req, body) ->
done()
describe "the queue", ->
responseBody = null
before (done) ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
responseBody = body
done()
it "should have tasks queued", ->
expect(responseBody.length).not.to.equal(0)
it "should not have 0 tasks after 500ms", (done) ->
Q.delay(timeperjob + 10).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).not.to.equal(0)
it "should have 0 tasks after 1500ms", (done) ->
this.timeout(3000)
Q.delay(2000).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).to.equal(0)
after (done) ->
this.timeout(20000)
request createMarathonRequestdata(1240, "/app", "app1_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app1a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app2_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app3_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app4_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app4a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app5_1", "TASK_KILLED"), (error, req, body) ->
Q.delay(10000).then ->
mconn1.kill()
webserverIsKilled(1240).then ->
done()
| 46786 | #
# MConn Framework
# https://www.github.com/livespotting/mconn
#
# @copyright 2015 Livespotting Media GmbH
# @license Apache-2.0
#
# @author <NAME> [<EMAIL>]
# @author <NAME> [<EMAIL>]
#
bodyParser = require('body-parser')
chai = require("chai")
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
chai.use sinonChai
expect = chai.expect
express = require("express")
http = require('http')
Q = require("q")
request = require("request")
pathtomconnstartjs = if process.env.MCONN_TEST_PATH then process.env.MCONN_TEST_PATH + "/bin/start.js" else "/mconn/bin/start.js"
pathtomconn = if process.env.MCONN_TEST_PATH then process.env.MCONN_TEST_PATH else "/mconn"
modulefolderroot = if process.env.MCONN_TEST_MODULE_PATH then process.env.MCONN_TEST_MODULE_PATH else "/mconn/modules"
modulefoldername = if process.env.MCONN_TEST_MODULE_START then process.env.MCONN_TEST_MODULE_START else "HelloWorld"
moduleclassname = if process.env.MCONN_TEST_MODULE_CLASSNAME then process.env.MCONN_TEST_MODULE_CLASSNAME else "HelloWorld"
Module = require(pathtomconn + "/src/application/classes/Module")
routes = require(pathtomconn + "/src/application/webserver/routes/index")
MainApp = require(pathtomconn + "/src/application/App")
Manager = require(pathtomconn + "/test/utils/ProcessManager")
createMarathonRequestdata = require(pathtomconn + "/test/utils/Helper").createMarathonRequestdata
createPresetRequestdata = require(pathtomconn + "/test/utils/Helper").createPresetRequestdata
webserverIsStarted = require(pathtomconn + "/test/utils/Helper").webserverIsStarted
webserverIsKilled = require(pathtomconn + "/test/utils/Helper").webserverIsKilled
isEmpty = (obj) ->
for k of obj
if obj.hasOwnProperty(k)
return false
true
check = (done, f) ->
try
f()
done()
catch e
done(e)
environment = (processName, port) ->
name: processName
MCONN_HOST: "127.0.0.1"
MCONN_PORT: port
MCONN_CREDENTIALS: ""
MCONN_PATH: pathtomconn
MCONN_MODULE_PATH: modulefolderroot
MCONN_MODULE_START: modulefoldername
MCONN_MOULE_HELLOWORLD_DELAY: 250
MCONN_ZK_HOSTS: if process.env.ALIAS_PORT_2181_TCP_ADDR? then process.env.ALIAS_PORT_2181_TCP_ADDR + ":2181" else "127.0.0.1:2181"
MCONN_ZK_PATH: "/mconn-dev-module-helloworld"
mconn1 = null
describe "Module Tests", ->
describe "Unittests", ->
describe "pause", ->
it "should wait for activeTask if activeTask has not been finished", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 400
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).to.be.at.least(2)
expect(stubPause).not.to.have.been.called
stubPause.restore()
it "should pause if activeTask has been finished", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 400
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(true)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).equal(1)
expect(stubPause).to.have.been.called
stubPause.restore()
it "should pause if activeTask has not been finished, but gets finished after a while", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 200
anyModule.timeout = 1500
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(500)
.then ->
expect(stubPause).not.to.have.been.called
stubCheckTaskHasFinishedState.returns(true) #task has noew been finished
.delay(500).then ->
check done, ->
expect(stubPause).to.have.been.called
stubPause.restore()
it "should clear checkInterval if task has been finished and queue has been paused", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 300
anyModule.timeout = 1500
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(true)
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).equal(1)
it "should clear checkInterval if timeout has been reached", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 200
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
anyModule.pause()
Q.delay(1500).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).to.be.at.most(5)
describe "Integrationtests", ->
before (done) ->
this.timeout(60000)
mconn1 = new Manager pathtomconnstartjs, environment("MCONN_NODE_1", 1240)
webserverIsStarted(1240).then ->
Q.delay(2000)
.then ->
done()
describe "check if the #{moduleclassname} Module has been loaded", ->
describe "GET /v1/module/list", ->
this.timeout(5000)
it "should return array of loaded modules including #{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/list", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname].name).equal(moduleclassname)
describe "GET /v1/module/list/#{moduleclassname}", ->
it "should return the data of the #{moduleclassname}Module", (done) ->
request.get "http://127.0.0.1:1240/v1/module/list/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.name).equal(moduleclassname)
describe "Presets API", ->
describe "CRUD", ->
describe "- create - ", ->
it "should respond status 'ok' on POST /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond message 'AppId for module #{moduleclassname} created: /app1' on POST /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app1", moduleclassname, "enabled"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} created: /app1")
describe "- read - ", ->
it "should respond with Object including preset '/app' for module '#{moduleclassname}' on GET /v1/module/preset", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].appId).equal('/app')
it "should respond with Object including preset '/app' on GET /v1/module/preset/#{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body[0].appId).equal('/app')
describe "- update - ", ->
it "should respond with status ='ok' on PUT /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "PUT"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond message 'AppId for module #{moduleclassname} modified: /app' on PUT /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "PUT"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} modified: /app")
# PLEASE NOTE: deletion rely on successfull creations in tests above!
describe "- delete - ", ->
it "should respond with 'ok' on DELETE /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond with message 'AppId for module #{moduleclassname} deleted: /app1' on DELETE v1/module/preset/#{moduleclassname}", (done) ->
request createPresetRequestdata(1240, "/app1", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} deleted: /app1")
it "should return empty presetlist on GET v1/module/preset/#{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(0)
describe "check recovering presets from zookeeper after leader-change (create 3 presets for testing)", ->
#before: create a preset
before (done) ->
this.timeout(60000)
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart1", moduleclassname, "enabled"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart2", moduleclassname, "enabled"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart3", moduleclassname, "enabled"), (error, req, body) ->
mconn1.kill()
Q.delay(5000) # zk session timeout
.then ->
mconn1 = new Manager pathtomconnstartjs, environment("MCONN_NODE_1", 1240)
webserverIsStarted(1240)
.then ->
Q.delay(2000).then ->
done()
it "should recover 3 presets after restart", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(3)
for i in [1..3]
do (i) ->
it "should recover preset '/app-should-be-loaded-after-restart#{i}' after restart", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
found = false
for preset in body
if preset.appId is "/app-should-be-loaded-after-restart#{i}"
found = true
expect(found).equal(true)
after (done) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart1", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart2", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart3", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
Q.delay(1500).then ->
done()
describe "check if tasks are only beeing processed if there is an assigned preset", ->
describe "POST a marathon-task to /v1/queue", ->
it "should respond with status/message 'ok'", (done) ->
request createMarathonRequestdata(1240, "/app-without-a-preset", "task_app_1234"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('task_app_1234_TASK_RUNNING')
it "should return an empty queue after 300ms (working time is 250ms) on GET /v1/queue", (done) ->
Q.delay(1000).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true, "body is " + JSON.stringify(body))
describe "preset is disabled", ->
# add disabled preset
before (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "disabled"), (error, req, body) ->
done()
it "should write the status 'disabled' to zk-node", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].status).equal('disabled')
it "should not process any jobs and quickly remove them from queue (after 300ms, normally they would last 750ms)", (done) ->
request createMarathonRequestdata(1240, "/anotherapp", "app_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3_1"), (error, req, body) ->
Q.delay(600).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(0)
describe "preset gets enabled", ->
before (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "enabled", "PUT"), (error, req, body) ->
done()
it "should write the status 'enabled' to zk-node", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].status).equal('enabled')
it "should process jobs now", (done) ->
request createMarathonRequestdata(1240, "/anotherapp", "app1a_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2a_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3a_1"), (error, req, body) ->
Q.delay(600).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).not.equal(0)
#remove preset to avoid sideeffects on other tests
after (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
done()
describe "POST a marathon-task to /v1/queue", ->
before (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled"), (error, req, body) ->
done()
it "should return taskId 'app_1_TASK_RUNNING'", (done) ->
request createMarathonRequestdata(1240, "/app", "app_1"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('app_1_TASK_RUNNING')
describe "GET /v1/queue", ->
it "should return an empty queue", (done) ->
Q.delay(1850).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true, "body is " + JSON.stringify(body))
describe "check processing tasks", ->
describe "POST one task for preset /app to /v1/queue", ->
it "should return with taskId 'app_1_TASK_KILLED'", (done) ->
request createMarathonRequestdata(1240, "/app", "app_1", "TASK_KILLED"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('app_1_TASK_KILLED')
describe "GET /v1/module/queue/list/#{moduleclassname}", ->
responseBody = null
Q.delay(250).then ->
request createMarathonRequestdata(1240, "/app", "app_2", "TASK_RUNNING"), (error, req, body) ->
it "should return a queue with 1 task queued", (done) ->
request.get "http://127.0.0.1:1240/v1/module/queue/list/#{moduleclassname}", {json: true}, (error, req, body) ->
responseBody = body
check done, ->
expect(responseBody.length).equal(1)
describe "GET /v1/queue", ->
responseBody = null
Q.delay(250).then ->
request createMarathonRequestdata(1240, "/app", "app_2", "TASK_KILLED"), (error, req, body) ->
it "should return a queue with 1 task queued", (done) ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
responseBody = body
check done, ->
expect(responseBody.length).equal(1)
describe "the queued task", ->
Q.delay(260).then ->
request createMarathonRequestdata(1240, "/app", "app" + new Date().getTime(), "TASK_RUNNING"), (error, req, body) ->
it "should have one module registered", ->
expect(responseBody[0].moduleState.length).equal(1)
it "should have registered the module '#{moduleclassname}'", ->
expect(responseBody[0].moduleState[0].name).equal(moduleclassname)
it "should have the state 'started' for module '#{moduleclassname}'", ->
expect(responseBody[0].moduleState[0].state).equal('started')
it "should be finished after 250ms (that's the static workertime for this test)", (done) ->
Q.delay(260).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true)
describe "POST multiple (five) tasks", ->
timeperjob = 250
before (done) ->
request createMarathonRequestdata(1240, "/app", "app1_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app2_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app3_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app4_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app5_1"), (error, req, body) ->
done()
describe "the queue", ->
responseBody = null
before (done) ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
responseBody = body
done()
it "should have tasks queued", ->
expect(responseBody.length).not.to.equal(0)
it "should not have 0 tasks after 500ms", (done) ->
Q.delay(timeperjob + 10).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).not.to.equal(0)
it "should have 0 tasks after 1500ms", (done) ->
this.timeout(3000)
Q.delay(2000).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).to.equal(0)
after (done) ->
this.timeout(20000)
request createMarathonRequestdata(1240, "/app", "app1_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app1a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app2_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app3_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app4_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app4a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app5_1", "TASK_KILLED"), (error, req, body) ->
Q.delay(10000).then ->
mconn1.kill()
webserverIsKilled(1240).then ->
done()
| true | #
# MConn Framework
# https://www.github.com/livespotting/mconn
#
# @copyright 2015 Livespotting Media GmbH
# @license Apache-2.0
#
# @author PI:NAME:<NAME>END_PI [PI:EMAIL:<EMAIL>END_PI]
# @author PI:NAME:<NAME>END_PI [PI:EMAIL:<EMAIL>END_PI]
#
bodyParser = require('body-parser')
chai = require("chai")
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
chai.use sinonChai
expect = chai.expect
express = require("express")
http = require('http')
Q = require("q")
request = require("request")
pathtomconnstartjs = if process.env.MCONN_TEST_PATH then process.env.MCONN_TEST_PATH + "/bin/start.js" else "/mconn/bin/start.js"
pathtomconn = if process.env.MCONN_TEST_PATH then process.env.MCONN_TEST_PATH else "/mconn"
modulefolderroot = if process.env.MCONN_TEST_MODULE_PATH then process.env.MCONN_TEST_MODULE_PATH else "/mconn/modules"
modulefoldername = if process.env.MCONN_TEST_MODULE_START then process.env.MCONN_TEST_MODULE_START else "HelloWorld"
moduleclassname = if process.env.MCONN_TEST_MODULE_CLASSNAME then process.env.MCONN_TEST_MODULE_CLASSNAME else "HelloWorld"
Module = require(pathtomconn + "/src/application/classes/Module")
routes = require(pathtomconn + "/src/application/webserver/routes/index")
MainApp = require(pathtomconn + "/src/application/App")
Manager = require(pathtomconn + "/test/utils/ProcessManager")
createMarathonRequestdata = require(pathtomconn + "/test/utils/Helper").createMarathonRequestdata
createPresetRequestdata = require(pathtomconn + "/test/utils/Helper").createPresetRequestdata
webserverIsStarted = require(pathtomconn + "/test/utils/Helper").webserverIsStarted
webserverIsKilled = require(pathtomconn + "/test/utils/Helper").webserverIsKilled
isEmpty = (obj) ->
for k of obj
if obj.hasOwnProperty(k)
return false
true
check = (done, f) ->
try
f()
done()
catch e
done(e)
environment = (processName, port) ->
name: processName
MCONN_HOST: "127.0.0.1"
MCONN_PORT: port
MCONN_CREDENTIALS: ""
MCONN_PATH: pathtomconn
MCONN_MODULE_PATH: modulefolderroot
MCONN_MODULE_START: modulefoldername
MCONN_MOULE_HELLOWORLD_DELAY: 250
MCONN_ZK_HOSTS: if process.env.ALIAS_PORT_2181_TCP_ADDR? then process.env.ALIAS_PORT_2181_TCP_ADDR + ":2181" else "127.0.0.1:2181"
MCONN_ZK_PATH: "/mconn-dev-module-helloworld"
mconn1 = null
describe "Module Tests", ->
describe "Unittests", ->
describe "pause", ->
it "should wait for activeTask if activeTask has not been finished", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 400
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).to.be.at.least(2)
expect(stubPause).not.to.have.been.called
stubPause.restore()
it "should pause if activeTask has been finished", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 400
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(true)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).equal(1)
expect(stubPause).to.have.been.called
stubPause.restore()
it "should pause if activeTask has not been finished, but gets finished after a while", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 200
anyModule.timeout = 1500
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
stubPause = sinon.stub(anyModule.queue, "pause")
anyModule.pause()
Q.delay(500)
.then ->
expect(stubPause).not.to.have.been.called
stubCheckTaskHasFinishedState.returns(true) #task has noew been finished
.delay(500).then ->
check done, ->
expect(stubPause).to.have.been.called
stubPause.restore()
it "should clear checkInterval if task has been finished and queue has been paused", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 300
anyModule.timeout = 1500
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(true)
anyModule.pause()
Q.delay(1000).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).equal(1)
it "should clear checkInterval if timeout has been reached", (done)->
async = require("async")
anyModule = new Module("AnyModule")
anyModule.checkIntervalPauseQueue = 200
anyModule.timeout = 1000
stubCheckTaskHasFinishedState = sinon.stub(anyModule, "checkTaskHasFinishedState")
stubCheckTaskHasFinishedState.returns(false)
anyModule.pause()
Q.delay(1500).then ->
check done, ->
expect(stubCheckTaskHasFinishedState.callCount).to.be.at.most(5)
describe "Integrationtests", ->
before (done) ->
this.timeout(60000)
mconn1 = new Manager pathtomconnstartjs, environment("MCONN_NODE_1", 1240)
webserverIsStarted(1240).then ->
Q.delay(2000)
.then ->
done()
describe "check if the #{moduleclassname} Module has been loaded", ->
describe "GET /v1/module/list", ->
this.timeout(5000)
it "should return array of loaded modules including #{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/list", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname].name).equal(moduleclassname)
describe "GET /v1/module/list/#{moduleclassname}", ->
it "should return the data of the #{moduleclassname}Module", (done) ->
request.get "http://127.0.0.1:1240/v1/module/list/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.name).equal(moduleclassname)
describe "Presets API", ->
describe "CRUD", ->
describe "- create - ", ->
it "should respond status 'ok' on POST /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond message 'AppId for module #{moduleclassname} created: /app1' on POST /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app1", moduleclassname, "enabled"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} created: /app1")
describe "- read - ", ->
it "should respond with Object including preset '/app' for module '#{moduleclassname}' on GET /v1/module/preset", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].appId).equal('/app')
it "should respond with Object including preset '/app' on GET /v1/module/preset/#{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body[0].appId).equal('/app')
describe "- update - ", ->
it "should respond with status ='ok' on PUT /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "PUT"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond message 'AppId for module #{moduleclassname} modified: /app' on PUT /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "PUT"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} modified: /app")
# PLEASE NOTE: deletion rely on successfull creations in tests above!
describe "- delete - ", ->
it "should respond with 'ok' on DELETE /v1/module/preset", (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
check done, ->
expect(body.status).equal("ok")
it "should respond with message 'AppId for module #{moduleclassname} deleted: /app1' on DELETE v1/module/preset/#{moduleclassname}", (done) ->
request createPresetRequestdata(1240, "/app1", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
check done, ->
expect(body.message).equal("AppId for module #{moduleclassname} deleted: /app1")
it "should return empty presetlist on GET v1/module/preset/#{moduleclassname}", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(0)
describe "check recovering presets from zookeeper after leader-change (create 3 presets for testing)", ->
#before: create a preset
before (done) ->
this.timeout(60000)
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart1", moduleclassname, "enabled"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart2", moduleclassname, "enabled"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart3", moduleclassname, "enabled"), (error, req, body) ->
mconn1.kill()
Q.delay(5000) # zk session timeout
.then ->
mconn1 = new Manager pathtomconnstartjs, environment("MCONN_NODE_1", 1240)
webserverIsStarted(1240)
.then ->
Q.delay(2000).then ->
done()
it "should recover 3 presets after restart", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(3)
for i in [1..3]
do (i) ->
it "should recover preset '/app-should-be-loaded-after-restart#{i}' after restart", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset/#{moduleclassname}", {json: true}, (error, req, body) ->
check done, ->
found = false
for preset in body
if preset.appId is "/app-should-be-loaded-after-restart#{i}"
found = true
expect(found).equal(true)
after (done) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart1", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart2", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
request createPresetRequestdata(1240, "/app-should-be-loaded-after-restart3", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
Q.delay(1500).then ->
done()
describe "check if tasks are only beeing processed if there is an assigned preset", ->
describe "POST a marathon-task to /v1/queue", ->
it "should respond with status/message 'ok'", (done) ->
request createMarathonRequestdata(1240, "/app-without-a-preset", "task_app_1234"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('task_app_1234_TASK_RUNNING')
it "should return an empty queue after 300ms (working time is 250ms) on GET /v1/queue", (done) ->
Q.delay(1000).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true, "body is " + JSON.stringify(body))
describe "preset is disabled", ->
# add disabled preset
before (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "disabled"), (error, req, body) ->
done()
it "should write the status 'disabled' to zk-node", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].status).equal('disabled')
it "should not process any jobs and quickly remove them from queue (after 300ms, normally they would last 750ms)", (done) ->
request createMarathonRequestdata(1240, "/anotherapp", "app_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3_1"), (error, req, body) ->
Q.delay(600).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).equal(0)
describe "preset gets enabled", ->
before (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "enabled", "PUT"), (error, req, body) ->
done()
it "should write the status 'enabled' to zk-node", (done) ->
request.get "http://127.0.0.1:1240/v1/module/preset", {json: true}, (error, req, body) ->
check done, ->
expect(body[moduleclassname][0].status).equal('enabled')
it "should process jobs now", (done) ->
request createMarathonRequestdata(1240, "/anotherapp", "app1a_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2a_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3a_1"), (error, req, body) ->
Q.delay(600).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).not.equal(0)
#remove preset to avoid sideeffects on other tests
after (done) ->
request createPresetRequestdata(1240, "/anotherapp", moduleclassname, "enabled", "DELETE"), (error, req, body) ->
done()
describe "POST a marathon-task to /v1/queue", ->
before (done) ->
request createPresetRequestdata(1240, "/app", moduleclassname, "enabled"), (error, req, body) ->
done()
it "should return taskId 'app_1_TASK_RUNNING'", (done) ->
request createMarathonRequestdata(1240, "/app", "app_1"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('app_1_TASK_RUNNING')
describe "GET /v1/queue", ->
it "should return an empty queue", (done) ->
Q.delay(1850).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true, "body is " + JSON.stringify(body))
describe "check processing tasks", ->
describe "POST one task for preset /app to /v1/queue", ->
it "should return with taskId 'app_1_TASK_KILLED'", (done) ->
request createMarathonRequestdata(1240, "/app", "app_1", "TASK_KILLED"), (error, req, body) ->
check done, ->
expect(body.taskId).equal('app_1_TASK_KILLED')
describe "GET /v1/module/queue/list/#{moduleclassname}", ->
responseBody = null
Q.delay(250).then ->
request createMarathonRequestdata(1240, "/app", "app_2", "TASK_RUNNING"), (error, req, body) ->
it "should return a queue with 1 task queued", (done) ->
request.get "http://127.0.0.1:1240/v1/module/queue/list/#{moduleclassname}", {json: true}, (error, req, body) ->
responseBody = body
check done, ->
expect(responseBody.length).equal(1)
describe "GET /v1/queue", ->
responseBody = null
Q.delay(250).then ->
request createMarathonRequestdata(1240, "/app", "app_2", "TASK_KILLED"), (error, req, body) ->
it "should return a queue with 1 task queued", (done) ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
responseBody = body
check done, ->
expect(responseBody.length).equal(1)
describe "the queued task", ->
Q.delay(260).then ->
request createMarathonRequestdata(1240, "/app", "app" + new Date().getTime(), "TASK_RUNNING"), (error, req, body) ->
it "should have one module registered", ->
expect(responseBody[0].moduleState.length).equal(1)
it "should have registered the module '#{moduleclassname}'", ->
expect(responseBody[0].moduleState[0].name).equal(moduleclassname)
it "should have the state 'started' for module '#{moduleclassname}'", ->
expect(responseBody[0].moduleState[0].state).equal('started')
it "should be finished after 250ms (that's the static workertime for this test)", (done) ->
Q.delay(260).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(isEmpty(body)).equal(true)
describe "POST multiple (five) tasks", ->
timeperjob = 250
before (done) ->
request createMarathonRequestdata(1240, "/app", "app1_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app2_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app3_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app4_1"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app5_1"), (error, req, body) ->
done()
describe "the queue", ->
responseBody = null
before (done) ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
responseBody = body
done()
it "should have tasks queued", ->
expect(responseBody.length).not.to.equal(0)
it "should not have 0 tasks after 500ms", (done) ->
Q.delay(timeperjob + 10).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).not.to.equal(0)
it "should have 0 tasks after 1500ms", (done) ->
this.timeout(3000)
Q.delay(2000).then ->
request.get "http://127.0.0.1:1240/v1/queue", {json: true}, (error, req, body) ->
check done, ->
expect(body.length).to.equal(0)
after (done) ->
this.timeout(20000)
request createMarathonRequestdata(1240, "/app", "app1_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app1a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app2_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app2a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app3_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app3a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app4_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/anotherapp", "app4a_1", "TASK_KILLED"), (error, req, body) ->
request createMarathonRequestdata(1240, "/app", "app5_1", "TASK_KILLED"), (error, req, body) ->
Q.delay(10000).then ->
mconn1.kill()
webserverIsKilled(1240).then ->
done()
|
[
{
"context": "tpClient) ->\n\n alert: (parameters, routingKey = 'everyone') ->\n @httpClient.request\n method: 'POST'",
"end": 289,
"score": 0.9486525058746338,
"start": 281,
"tag": "KEY",
"value": "everyone"
}
] | src/victorops.coffee | keyvanakbary/victorops | 6 | HttpClient = require './http_client'
class VictorOps
END_POINT = 'https://alert.victorops.com/integrations/generic/20131114/'
@create: (apiKey) ->
new VictorOps(apiKey, new HttpClient())
constructor: (@apiKey, @httpClient) ->
alert: (parameters, routingKey = 'everyone') ->
@httpClient.request
method: 'POST'
url: END_POINT + "alert/#{@apiKey}/#{routingKey}"
body: JSON.stringify(parameters)
headers: 'Content-Type': 'application/json'
done: failOnError
failOnError = (response) ->
data = JSON.parse(response)
throw "Error #{data.messages}" if data.result isnt 'success'
module.exports = VictorOps | 150962 | HttpClient = require './http_client'
class VictorOps
END_POINT = 'https://alert.victorops.com/integrations/generic/20131114/'
@create: (apiKey) ->
new VictorOps(apiKey, new HttpClient())
constructor: (@apiKey, @httpClient) ->
alert: (parameters, routingKey = '<KEY>') ->
@httpClient.request
method: 'POST'
url: END_POINT + "alert/#{@apiKey}/#{routingKey}"
body: JSON.stringify(parameters)
headers: 'Content-Type': 'application/json'
done: failOnError
failOnError = (response) ->
data = JSON.parse(response)
throw "Error #{data.messages}" if data.result isnt 'success'
module.exports = VictorOps | true | HttpClient = require './http_client'
class VictorOps
END_POINT = 'https://alert.victorops.com/integrations/generic/20131114/'
@create: (apiKey) ->
new VictorOps(apiKey, new HttpClient())
constructor: (@apiKey, @httpClient) ->
alert: (parameters, routingKey = 'PI:KEY:<KEY>END_PI') ->
@httpClient.request
method: 'POST'
url: END_POINT + "alert/#{@apiKey}/#{routingKey}"
body: JSON.stringify(parameters)
headers: 'Content-Type': 'application/json'
done: failOnError
failOnError = (response) ->
data = JSON.parse(response)
throw "Error #{data.messages}" if data.result isnt 'success'
module.exports = VictorOps |
[
{
"context": "USERNAME_FIELD or ndx.settings.USERNAME_FIELD or 'email'\n passwordField = process.env.PASSWORD_FIELD or ",
"end": 347,
"score": 0.8837460875511169,
"start": 342,
"tag": "USERNAME",
"value": "email"
},
{
"context": "PASSWORD_FIELD or ndx.settings.PASSWORD_FIELD or 'pas... | src/index.coffee | ndxbxrme/ndx-passport-property | 0 | 'use strict'
objtrans = require 'objtrans'
module.exports = (ndx) ->
callbacks =
login: []
logout: []
signup: []
refreshLogin: []
updatePassword: []
ndx.passport = require 'passport'
LocalStrategy = require('passport-local').Strategy
usernameField = process.env.USERNAME_FIELD or ndx.settings.USERNAME_FIELD or 'email'
passwordField = process.env.PASSWORD_FIELD or ndx.settings.PASSWORD_FIELD or 'password'
if ndx.settings.HAS_FORGOT or process.env.HAS_FORGOT
require('./forgot') ndx
if ndx.settings.HAS_INVITE or process.env.HAS_INVITE
require('./invite') ndx
syncCallback = (name, obj, cb) ->
if callbacks[name] and callbacks[name].length
for callback in callbacks[name]
callback obj
cb?()
ndx.passport.syncCallback = syncCallback
ndx.passport.on = (name, callback) ->
callbacks[name].push callback
ndx.passport.off = (name, callback) ->
callbacks[name].splice callbacks[name].indexOf(callback), 1
ndx.passport.serializeUser (user, done) ->
done null, user[ndx.settings.AUTO_ID]
ndx.passport.deserializeUser (id, done) ->
done null, id
ndx.passport.splitScopes = (scope) ->
scopes = scope.replace(/^[ ,]+/, '').replace(/[ ,]+$/, '').split(/[ ,]+/g)
if scopes.length < 2
return scopes[0]
else
return scopes
ndx.app
.use ndx.passport.initialize()
ndx.app.post '/api/refresh-login', (req, res) ->
if ndx.user
output = {}
if ndx.settings.PUBLIC_USER
output = objtrans ndx.user, ndx.settings.PUBLIC_USER
else
output = ndx.user
syncCallback 'refreshLogin', output
res.end JSON.stringify output
else
res.end ''
###
if ndx.settings.SOFT_LOGIN
res.end ''
else
throw ndx.UNAUTHORIZED
###
ndx.app.get '/api/logout', (req, res) ->
syncCallback 'logout', ndx.user
res.clearCookie 'token'
ndx.user = null
res.redirect '/'
return
ndx.app.post '/api/update-password', (req, res) ->
if ndx.user
if ndx.user.local
if ndx.validPassword req.body.oldPassword, ndx.user.local.password
where = {}
where[ndx.settings.AUTO_ID] = ndx.user[ndx.settings.AUTO_ID]
ndx.database.update ndx.settings.USER_TABLE,
local:
email: ndx.user.local.email
password: ndx.generateHash req.body.newPassword
, where, null, true
syncCallback 'updatePassword', ndx.user
res.end 'OK'
else
throw
status: 401
message: 'Invalid password'
else
throw
status: 401
message: 'No local details'
else
throw
status: 401
message: 'Not logged in'
ndx.passport.use 'local-signup', new LocalStrategy
usernameField: usernameField
passwordField: passwordField
passReqToCallback: true
, (req, email, password, done) ->
ndx.database.select ndx.settings.USER_TABLE,
where:
local:
email: email
, (users) ->
if users and users.length
ndx.passport.loginMessage = 'That email is already taken.'
return done(null, false)
else
newUser =
email: email
local:
email: email
password: ndx.generateHash password
newUser[ndx.settings.AUTO_ID] = ndx.generateID()
ndx.database.insert ndx.settings.USER_TABLE, newUser, null, true
ndx.user = newUser
if ndx.auth
ndx.auth.extendUser ndx.user
syncCallback 'signup', ndx.user
done null, ndx.user
, true
ndx.passport.use 'local-login', new LocalStrategy
usernameField: usernameField
passwordField: passwordField
passReqToCallback: true
, (req, email, password, done) ->
ndx.database.select ndx.settings.USER_TABLE,
where:
local:
email: email
, (users) ->
if users and users.length
if not ndx.validPassword password, users[0].local.password
ndx.passport.loginMessage = 'Wrong password'
return done(null, false)
ndx.user = users[0]
if ndx.auth
ndx.auth.extendUser ndx.user
syncCallback 'login', ndx.user
return done(null, users[0])
else
ndx.passport.loginMessage = 'No user found'
return done(null, false)
, true
ndx.app.post '/api/signup', ndx.passport.authenticate('local-signup', failureRedirect: '/api/badlogin')
, ndx.postAuthenticate
ndx.app.post '/api/login', ndx.passport.authenticate('local-login', failureRedirect: '/api/badlogin')
, ndx.postAuthenticate
ndx.app.get '/api/connect/local', (req, res) ->
#send flash message
return
ndx.app.post '/api/connect/local', ndx.passport.authorize('local-signup', failureRedirect: '/api/badlogin')
ndx.app.get '/api/unlink/local', (req, res) ->
user = ndx.user
user.local.email = undefined
user.local.password = undefined
user.save (err) ->
res.redirect '/profile'
return
return
ndx.app.get '/api/badlogin', (req, res) ->
throw
status: 401
message: ndx.passport.loginMessage
| 24065 | 'use strict'
objtrans = require 'objtrans'
module.exports = (ndx) ->
callbacks =
login: []
logout: []
signup: []
refreshLogin: []
updatePassword: []
ndx.passport = require 'passport'
LocalStrategy = require('passport-local').Strategy
usernameField = process.env.USERNAME_FIELD or ndx.settings.USERNAME_FIELD or 'email'
passwordField = process.env.PASSWORD_FIELD or ndx.settings.PASSWORD_FIELD or '<PASSWORD>'
if ndx.settings.HAS_FORGOT or process.env.HAS_FORGOT
require('./forgot') ndx
if ndx.settings.HAS_INVITE or process.env.HAS_INVITE
require('./invite') ndx
syncCallback = (name, obj, cb) ->
if callbacks[name] and callbacks[name].length
for callback in callbacks[name]
callback obj
cb?()
ndx.passport.syncCallback = syncCallback
ndx.passport.on = (name, callback) ->
callbacks[name].push callback
ndx.passport.off = (name, callback) ->
callbacks[name].splice callbacks[name].indexOf(callback), 1
ndx.passport.serializeUser (user, done) ->
done null, user[ndx.settings.AUTO_ID]
ndx.passport.deserializeUser (id, done) ->
done null, id
ndx.passport.splitScopes = (scope) ->
scopes = scope.replace(/^[ ,]+/, '').replace(/[ ,]+$/, '').split(/[ ,]+/g)
if scopes.length < 2
return scopes[0]
else
return scopes
ndx.app
.use ndx.passport.initialize()
ndx.app.post '/api/refresh-login', (req, res) ->
if ndx.user
output = {}
if ndx.settings.PUBLIC_USER
output = objtrans ndx.user, ndx.settings.PUBLIC_USER
else
output = ndx.user
syncCallback 'refreshLogin', output
res.end JSON.stringify output
else
res.end ''
###
if ndx.settings.SOFT_LOGIN
res.end ''
else
throw ndx.UNAUTHORIZED
###
ndx.app.get '/api/logout', (req, res) ->
syncCallback 'logout', ndx.user
res.clearCookie 'token'
ndx.user = null
res.redirect '/'
return
ndx.app.post '/api/update-password', (req, res) ->
if ndx.user
if ndx.user.local
if ndx.validPassword req.body.oldPassword, ndx.user.local.password
where = {}
where[ndx.settings.AUTO_ID] = ndx.user[ndx.settings.AUTO_ID]
ndx.database.update ndx.settings.USER_TABLE,
local:
email: ndx.user.local.email
password: <PASSWORD> <PASSWORD>
, where, null, true
syncCallback 'updatePassword', ndx.user
res.end 'OK'
else
throw
status: 401
message: 'Invalid password'
else
throw
status: 401
message: 'No local details'
else
throw
status: 401
message: 'Not logged in'
ndx.passport.use 'local-signup', new LocalStrategy
usernameField: usernameField
passwordField: <PASSWORD>
passReqToCallback: true
, (req, email, password, done) ->
ndx.database.select ndx.settings.USER_TABLE,
where:
local:
email: email
, (users) ->
if users and users.length
ndx.passport.loginMessage = 'That email is already taken.'
return done(null, false)
else
newUser =
email: email
local:
email: email
password: <PASSWORD>
newUser[ndx.settings.AUTO_ID] = ndx.generateID()
ndx.database.insert ndx.settings.USER_TABLE, newUser, null, true
ndx.user = newUser
if ndx.auth
ndx.auth.extendUser ndx.user
syncCallback 'signup', ndx.user
done null, ndx.user
, true
ndx.passport.use 'local-login', new LocalStrategy
usernameField: usernameField
passwordField: <PASSWORD>
passReqToCallback: true
, (req, email, password, done) ->
ndx.database.select ndx.settings.USER_TABLE,
where:
local:
email: email
, (users) ->
if users and users.length
if not ndx.validPassword password, users[0].local.password
ndx.passport.loginMessage = 'Wrong password'
return done(null, false)
ndx.user = users[0]
if ndx.auth
ndx.auth.extendUser ndx.user
syncCallback 'login', ndx.user
return done(null, users[0])
else
ndx.passport.loginMessage = 'No user found'
return done(null, false)
, true
ndx.app.post '/api/signup', ndx.passport.authenticate('local-signup', failureRedirect: '/api/badlogin')
, ndx.postAuthenticate
ndx.app.post '/api/login', ndx.passport.authenticate('local-login', failureRedirect: '/api/badlogin')
, ndx.postAuthenticate
ndx.app.get '/api/connect/local', (req, res) ->
#send flash message
return
ndx.app.post '/api/connect/local', ndx.passport.authorize('local-signup', failureRedirect: '/api/badlogin')
ndx.app.get '/api/unlink/local', (req, res) ->
user = ndx.user
user.local.email = undefined
user.local.password = <PASSWORD>
user.save (err) ->
res.redirect '/profile'
return
return
ndx.app.get '/api/badlogin', (req, res) ->
throw
status: 401
message: ndx.passport.loginMessage
| true | 'use strict'
objtrans = require 'objtrans'
module.exports = (ndx) ->
callbacks =
login: []
logout: []
signup: []
refreshLogin: []
updatePassword: []
ndx.passport = require 'passport'
LocalStrategy = require('passport-local').Strategy
usernameField = process.env.USERNAME_FIELD or ndx.settings.USERNAME_FIELD or 'email'
passwordField = process.env.PASSWORD_FIELD or ndx.settings.PASSWORD_FIELD or 'PI:PASSWORD:<PASSWORD>END_PI'
if ndx.settings.HAS_FORGOT or process.env.HAS_FORGOT
require('./forgot') ndx
if ndx.settings.HAS_INVITE or process.env.HAS_INVITE
require('./invite') ndx
syncCallback = (name, obj, cb) ->
if callbacks[name] and callbacks[name].length
for callback in callbacks[name]
callback obj
cb?()
ndx.passport.syncCallback = syncCallback
ndx.passport.on = (name, callback) ->
callbacks[name].push callback
ndx.passport.off = (name, callback) ->
callbacks[name].splice callbacks[name].indexOf(callback), 1
ndx.passport.serializeUser (user, done) ->
done null, user[ndx.settings.AUTO_ID]
ndx.passport.deserializeUser (id, done) ->
done null, id
ndx.passport.splitScopes = (scope) ->
scopes = scope.replace(/^[ ,]+/, '').replace(/[ ,]+$/, '').split(/[ ,]+/g)
if scopes.length < 2
return scopes[0]
else
return scopes
ndx.app
.use ndx.passport.initialize()
ndx.app.post '/api/refresh-login', (req, res) ->
if ndx.user
output = {}
if ndx.settings.PUBLIC_USER
output = objtrans ndx.user, ndx.settings.PUBLIC_USER
else
output = ndx.user
syncCallback 'refreshLogin', output
res.end JSON.stringify output
else
res.end ''
###
if ndx.settings.SOFT_LOGIN
res.end ''
else
throw ndx.UNAUTHORIZED
###
ndx.app.get '/api/logout', (req, res) ->
syncCallback 'logout', ndx.user
res.clearCookie 'token'
ndx.user = null
res.redirect '/'
return
ndx.app.post '/api/update-password', (req, res) ->
if ndx.user
if ndx.user.local
if ndx.validPassword req.body.oldPassword, ndx.user.local.password
where = {}
where[ndx.settings.AUTO_ID] = ndx.user[ndx.settings.AUTO_ID]
ndx.database.update ndx.settings.USER_TABLE,
local:
email: ndx.user.local.email
password: PI:PASSWORD:<PASSWORD>END_PI PI:PASSWORD:<PASSWORD>END_PI
, where, null, true
syncCallback 'updatePassword', ndx.user
res.end 'OK'
else
throw
status: 401
message: 'Invalid password'
else
throw
status: 401
message: 'No local details'
else
throw
status: 401
message: 'Not logged in'
ndx.passport.use 'local-signup', new LocalStrategy
usernameField: usernameField
passwordField: PI:PASSWORD:<PASSWORD>END_PI
passReqToCallback: true
, (req, email, password, done) ->
ndx.database.select ndx.settings.USER_TABLE,
where:
local:
email: email
, (users) ->
if users and users.length
ndx.passport.loginMessage = 'That email is already taken.'
return done(null, false)
else
newUser =
email: email
local:
email: email
password: PI:PASSWORD:<PASSWORD>END_PI
newUser[ndx.settings.AUTO_ID] = ndx.generateID()
ndx.database.insert ndx.settings.USER_TABLE, newUser, null, true
ndx.user = newUser
if ndx.auth
ndx.auth.extendUser ndx.user
syncCallback 'signup', ndx.user
done null, ndx.user
, true
ndx.passport.use 'local-login', new LocalStrategy
usernameField: usernameField
passwordField: PI:PASSWORD:<PASSWORD>END_PI
passReqToCallback: true
, (req, email, password, done) ->
ndx.database.select ndx.settings.USER_TABLE,
where:
local:
email: email
, (users) ->
if users and users.length
if not ndx.validPassword password, users[0].local.password
ndx.passport.loginMessage = 'Wrong password'
return done(null, false)
ndx.user = users[0]
if ndx.auth
ndx.auth.extendUser ndx.user
syncCallback 'login', ndx.user
return done(null, users[0])
else
ndx.passport.loginMessage = 'No user found'
return done(null, false)
, true
ndx.app.post '/api/signup', ndx.passport.authenticate('local-signup', failureRedirect: '/api/badlogin')
, ndx.postAuthenticate
ndx.app.post '/api/login', ndx.passport.authenticate('local-login', failureRedirect: '/api/badlogin')
, ndx.postAuthenticate
ndx.app.get '/api/connect/local', (req, res) ->
#send flash message
return
ndx.app.post '/api/connect/local', ndx.passport.authorize('local-signup', failureRedirect: '/api/badlogin')
ndx.app.get '/api/unlink/local', (req, res) ->
user = ndx.user
user.local.email = undefined
user.local.password = PI:PASSWORD:<PASSWORD>END_PI
user.save (err) ->
res.redirect '/profile'
return
return
ndx.app.get '/api/badlogin', (req, res) ->
throw
status: 401
message: ndx.passport.loginMessage
|
[
{
"context": "\t\t\tname: \"main.tex\"\n\t\t\t}]\n\t\t@members = [{\n\t\t\tuser: @owner = {\n\t\t\t\t_id: \"owner-id\"\n\t\t\t\tfirst_name : \"Ow",
"end": 979,
"score": 0.8921070098876953,
"start": 979,
"tag": "USERNAME",
"value": ""
},
{
"context": " @owner = {\n\t\t\t\t_id: \"owner... | test/UnitTests/coffee/Project/ProjectEditorHandlerTests.coffee | bowlofstew/web-sharelatex | 0 | chai = require('chai')
should = chai.should()
modulePath = "../../../../app/js/Features/Project/ProjectEditorHandler"
SandboxedModule = require('sandboxed-module')
describe "ProjectEditorHandler", ->
beforeEach ->
@project =
_id : "project-id"
name : "Project Name"
rootDoc_id : "file-id"
publicAccesLevel : "private"
deletedByExternalDataSource: false
rootFolder : [{
_id : "root-folder-id"
name : ""
docs : []
fileRefs : []
folders : [{
_id : "sub-folder-id"
name : "folder"
docs : [{
_id : "doc-id"
name : "main.tex"
lines : @lines = [
"line 1"
"line 2"
"line 3"
]
}]
fileRefs : [{
_id : "file-id"
name : "image.png"
created : new Date()
size : 1234
}]
folders : []
}]
}]
deletedDocs: [{
_id: "deleted-doc-id"
name: "main.tex"
}]
@members = [{
user: @owner = {
_id: "owner-id"
first_name : "Owner"
last_name : "ShareLaTeX"
email : "owner@sharelatex.com"
},
privilegeLevel: "owner"
},{
user: {
_id: "read-only-id"
first_name : "Read"
last_name : "Only"
email : "read-only@sharelatex.com"
},
privilegeLevel: "readOnly"
},{
user: {
_id: "read-write-id"
first_name : "Read"
last_name : "Write"
email : "read-write@sharelatex.com"
},
privilegeLevel: "readAndWrite"
}]
@handler = SandboxedModule.require modulePath
describe "buildProjectModelView", ->
describe "with owner and members included", ->
beforeEach ->
@result = @handler.buildProjectModelView @project, @members
it "should include the id", ->
should.exist @result._id
@result._id.should.equal "project-id"
it "should include the name", ->
should.exist @result.name
@result.name.should.equal "Project Name"
it "should include the root doc id", ->
should.exist @result.rootDoc_id
@result.rootDoc_id.should.equal "file-id"
it "should include the public access level", ->
should.exist @result.publicAccesLevel
@result.publicAccesLevel.should.equal "private"
it "should include the owner", ->
should.exist @result.owner
@result.owner._id.should.equal "owner-id"
@result.owner.email.should.equal "owner@sharelatex.com"
@result.owner.first_name.should.equal "Owner"
@result.owner.last_name.should.equal "ShareLaTeX"
@result.owner.privileges.should.equal "owner"
it "should include the deletedDocs", ->
should.exist @result.deletedDocs
@result.deletedDocs.should.equal @project.deletedDocs
it "should gather readOnly_refs and collaberators_refs into a list of members", ->
findMember = (id) =>
for member in @result.members
return member if member._id == id
return null
@result.members.length.should.equal 2
should.exist findMember("read-only-id")
findMember("read-only-id").privileges.should.equal "readOnly"
findMember("read-only-id").first_name.should.equal "Read"
findMember("read-only-id").last_name.should.equal "Only"
findMember("read-only-id").email.should.equal "read-only@sharelatex.com"
should.exist findMember("read-write-id")
findMember("read-write-id").privileges.should.equal "readAndWrite"
findMember("read-write-id").first_name.should.equal "Read"
findMember("read-write-id").last_name.should.equal "Write"
findMember("read-write-id").email.should.equal "read-write@sharelatex.com"
it "should include folders in the project", ->
@result.rootFolder[0]._id.should.equal "root-folder-id"
@result.rootFolder[0].name.should.equal ""
@result.rootFolder[0].folders[0]._id.should.equal "sub-folder-id"
@result.rootFolder[0].folders[0].name.should.equal "folder"
it "should not duplicate folder contents", ->
@result.rootFolder[0].docs.length.should.equal 0
@result.rootFolder[0].fileRefs.length.should.equal 0
it "should include files in the project", ->
@result.rootFolder[0].folders[0].fileRefs[0]._id.should.equal "file-id"
@result.rootFolder[0].folders[0].fileRefs[0].name.should.equal "image.png"
should.not.exist @result.rootFolder[0].folders[0].fileRefs[0].created
should.not.exist @result.rootFolder[0].folders[0].fileRefs[0].size
it "should include docs in the project but not the lines", ->
@result.rootFolder[0].folders[0].docs[0]._id.should.equal "doc-id"
@result.rootFolder[0].folders[0].docs[0].name.should.equal "main.tex"
should.not.exist @result.rootFolder[0].folders[0].docs[0].lines
describe "deletedByExternalDataSource", ->
it "should set the deletedByExternalDataSource flag to false when it is not there", ->
delete @project.deletedByExternalDataSource
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal false
it "should set the deletedByExternalDataSource flag to false when it is false", ->
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal false
it "should set the deletedByExternalDataSource flag to true when it is true", ->
@project.deletedByExternalDataSource = true
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal true
describe "features", ->
beforeEach ->
@owner.features =
versioning: true
collaborators: 3
compileGroup:"priority"
compileTimeout: 96
@result = @handler.buildProjectModelView @project, @members
it "should copy the owner features to the project", ->
@result.features.versioning.should.equal @owner.features.versioning
@result.features.collaborators.should.equal @owner.features.collaborators
@result.features.compileGroup.should.equal @owner.features.compileGroup
@result.features.compileTimeout.should.equal @owner.features.compileTimeout
| 107200 | chai = require('chai')
should = chai.should()
modulePath = "../../../../app/js/Features/Project/ProjectEditorHandler"
SandboxedModule = require('sandboxed-module')
describe "ProjectEditorHandler", ->
beforeEach ->
@project =
_id : "project-id"
name : "Project Name"
rootDoc_id : "file-id"
publicAccesLevel : "private"
deletedByExternalDataSource: false
rootFolder : [{
_id : "root-folder-id"
name : ""
docs : []
fileRefs : []
folders : [{
_id : "sub-folder-id"
name : "folder"
docs : [{
_id : "doc-id"
name : "main.tex"
lines : @lines = [
"line 1"
"line 2"
"line 3"
]
}]
fileRefs : [{
_id : "file-id"
name : "image.png"
created : new Date()
size : 1234
}]
folders : []
}]
}]
deletedDocs: [{
_id: "deleted-doc-id"
name: "main.tex"
}]
@members = [{
user: @owner = {
_id: "owner-id"
first_name : "<NAME>"
last_name : "<NAME>"
email : "<EMAIL>"
},
privilegeLevel: "owner"
},{
user: {
_id: "read-only-id"
first_name : "Read"
last_name : "Only"
email : "<EMAIL>"
},
privilegeLevel: "readOnly"
},{
user: {
_id: "read-write-id"
first_name : "<NAME>"
last_name : "<NAME>"
email : "<EMAIL>"
},
privilegeLevel: "readAndWrite"
}]
@handler = SandboxedModule.require modulePath
describe "buildProjectModelView", ->
describe "with owner and members included", ->
beforeEach ->
@result = @handler.buildProjectModelView @project, @members
it "should include the id", ->
should.exist @result._id
@result._id.should.equal "project-id"
it "should include the name", ->
should.exist @result.name
@result.name.should.equal "Project Name"
it "should include the root doc id", ->
should.exist @result.rootDoc_id
@result.rootDoc_id.should.equal "file-id"
it "should include the public access level", ->
should.exist @result.publicAccesLevel
@result.publicAccesLevel.should.equal "private"
it "should include the owner", ->
should.exist @result.owner
@result.owner._id.should.equal "owner-id"
@result.owner.email.should.equal "<EMAIL>"
@result.owner.first_name.should.equal "<NAME>"
@result.owner.last_name.should.equal "<NAME>"
@result.owner.privileges.should.equal "owner"
it "should include the deletedDocs", ->
should.exist @result.deletedDocs
@result.deletedDocs.should.equal @project.deletedDocs
it "should gather readOnly_refs and collaberators_refs into a list of members", ->
findMember = (id) =>
for member in @result.members
return member if member._id == id
return null
@result.members.length.should.equal 2
should.exist findMember("read-only-id")
findMember("read-only-id").privileges.should.equal "readOnly"
findMember("read-only-id").first_name.should.equal "<NAME>"
findMember("read-only-id").last_name.should.equal "<NAME>"
findMember("read-only-id").email.should.equal "<EMAIL>"
should.exist findMember("read-write-id")
findMember("read-write-id").privileges.should.equal "readAndWrite"
findMember("read-write-id").first_name.should.equal "<NAME>"
findMember("read-write-id").last_name.should.equal "<NAME>"
findMember("read-write-id").email.should.equal "<EMAIL>"
it "should include folders in the project", ->
@result.rootFolder[0]._id.should.equal "root-folder-id"
@result.rootFolder[0].name.should.equal ""
@result.rootFolder[0].folders[0]._id.should.equal "sub-folder-id"
@result.rootFolder[0].folders[0].name.should.equal "folder"
it "should not duplicate folder contents", ->
@result.rootFolder[0].docs.length.should.equal 0
@result.rootFolder[0].fileRefs.length.should.equal 0
it "should include files in the project", ->
@result.rootFolder[0].folders[0].fileRefs[0]._id.should.equal "file-id"
@result.rootFolder[0].folders[0].fileRefs[0].name.should.equal "image.png"
should.not.exist @result.rootFolder[0].folders[0].fileRefs[0].created
should.not.exist @result.rootFolder[0].folders[0].fileRefs[0].size
it "should include docs in the project but not the lines", ->
@result.rootFolder[0].folders[0].docs[0]._id.should.equal "doc-id"
@result.rootFolder[0].folders[0].docs[0].name.should.equal "main.tex"
should.not.exist @result.rootFolder[0].folders[0].docs[0].lines
describe "deletedByExternalDataSource", ->
it "should set the deletedByExternalDataSource flag to false when it is not there", ->
delete @project.deletedByExternalDataSource
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal false
it "should set the deletedByExternalDataSource flag to false when it is false", ->
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal false
it "should set the deletedByExternalDataSource flag to true when it is true", ->
@project.deletedByExternalDataSource = true
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal true
describe "features", ->
beforeEach ->
@owner.features =
versioning: true
collaborators: 3
compileGroup:"priority"
compileTimeout: 96
@result = @handler.buildProjectModelView @project, @members
it "should copy the owner features to the project", ->
@result.features.versioning.should.equal @owner.features.versioning
@result.features.collaborators.should.equal @owner.features.collaborators
@result.features.compileGroup.should.equal @owner.features.compileGroup
@result.features.compileTimeout.should.equal @owner.features.compileTimeout
| true | chai = require('chai')
should = chai.should()
modulePath = "../../../../app/js/Features/Project/ProjectEditorHandler"
SandboxedModule = require('sandboxed-module')
describe "ProjectEditorHandler", ->
beforeEach ->
@project =
_id : "project-id"
name : "Project Name"
rootDoc_id : "file-id"
publicAccesLevel : "private"
deletedByExternalDataSource: false
rootFolder : [{
_id : "root-folder-id"
name : ""
docs : []
fileRefs : []
folders : [{
_id : "sub-folder-id"
name : "folder"
docs : [{
_id : "doc-id"
name : "main.tex"
lines : @lines = [
"line 1"
"line 2"
"line 3"
]
}]
fileRefs : [{
_id : "file-id"
name : "image.png"
created : new Date()
size : 1234
}]
folders : []
}]
}]
deletedDocs: [{
_id: "deleted-doc-id"
name: "main.tex"
}]
@members = [{
user: @owner = {
_id: "owner-id"
first_name : "PI:NAME:<NAME>END_PI"
last_name : "PI:NAME:<NAME>END_PI"
email : "PI:EMAIL:<EMAIL>END_PI"
},
privilegeLevel: "owner"
},{
user: {
_id: "read-only-id"
first_name : "Read"
last_name : "Only"
email : "PI:EMAIL:<EMAIL>END_PI"
},
privilegeLevel: "readOnly"
},{
user: {
_id: "read-write-id"
first_name : "PI:NAME:<NAME>END_PI"
last_name : "PI:NAME:<NAME>END_PI"
email : "PI:EMAIL:<EMAIL>END_PI"
},
privilegeLevel: "readAndWrite"
}]
@handler = SandboxedModule.require modulePath
describe "buildProjectModelView", ->
describe "with owner and members included", ->
beforeEach ->
@result = @handler.buildProjectModelView @project, @members
it "should include the id", ->
should.exist @result._id
@result._id.should.equal "project-id"
it "should include the name", ->
should.exist @result.name
@result.name.should.equal "Project Name"
it "should include the root doc id", ->
should.exist @result.rootDoc_id
@result.rootDoc_id.should.equal "file-id"
it "should include the public access level", ->
should.exist @result.publicAccesLevel
@result.publicAccesLevel.should.equal "private"
it "should include the owner", ->
should.exist @result.owner
@result.owner._id.should.equal "owner-id"
@result.owner.email.should.equal "PI:EMAIL:<EMAIL>END_PI"
@result.owner.first_name.should.equal "PI:NAME:<NAME>END_PI"
@result.owner.last_name.should.equal "PI:NAME:<NAME>END_PI"
@result.owner.privileges.should.equal "owner"
it "should include the deletedDocs", ->
should.exist @result.deletedDocs
@result.deletedDocs.should.equal @project.deletedDocs
it "should gather readOnly_refs and collaberators_refs into a list of members", ->
findMember = (id) =>
for member in @result.members
return member if member._id == id
return null
@result.members.length.should.equal 2
should.exist findMember("read-only-id")
findMember("read-only-id").privileges.should.equal "readOnly"
findMember("read-only-id").first_name.should.equal "PI:NAME:<NAME>END_PI"
findMember("read-only-id").last_name.should.equal "PI:NAME:<NAME>END_PI"
findMember("read-only-id").email.should.equal "PI:EMAIL:<EMAIL>END_PI"
should.exist findMember("read-write-id")
findMember("read-write-id").privileges.should.equal "readAndWrite"
findMember("read-write-id").first_name.should.equal "PI:NAME:<NAME>END_PI"
findMember("read-write-id").last_name.should.equal "PI:NAME:<NAME>END_PI"
findMember("read-write-id").email.should.equal "PI:EMAIL:<EMAIL>END_PI"
it "should include folders in the project", ->
@result.rootFolder[0]._id.should.equal "root-folder-id"
@result.rootFolder[0].name.should.equal ""
@result.rootFolder[0].folders[0]._id.should.equal "sub-folder-id"
@result.rootFolder[0].folders[0].name.should.equal "folder"
it "should not duplicate folder contents", ->
@result.rootFolder[0].docs.length.should.equal 0
@result.rootFolder[0].fileRefs.length.should.equal 0
it "should include files in the project", ->
@result.rootFolder[0].folders[0].fileRefs[0]._id.should.equal "file-id"
@result.rootFolder[0].folders[0].fileRefs[0].name.should.equal "image.png"
should.not.exist @result.rootFolder[0].folders[0].fileRefs[0].created
should.not.exist @result.rootFolder[0].folders[0].fileRefs[0].size
it "should include docs in the project but not the lines", ->
@result.rootFolder[0].folders[0].docs[0]._id.should.equal "doc-id"
@result.rootFolder[0].folders[0].docs[0].name.should.equal "main.tex"
should.not.exist @result.rootFolder[0].folders[0].docs[0].lines
describe "deletedByExternalDataSource", ->
it "should set the deletedByExternalDataSource flag to false when it is not there", ->
delete @project.deletedByExternalDataSource
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal false
it "should set the deletedByExternalDataSource flag to false when it is false", ->
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal false
it "should set the deletedByExternalDataSource flag to true when it is true", ->
@project.deletedByExternalDataSource = true
result = @handler.buildProjectModelView @project, @members
result.deletedByExternalDataSource.should.equal true
describe "features", ->
beforeEach ->
@owner.features =
versioning: true
collaborators: 3
compileGroup:"priority"
compileTimeout: 96
@result = @handler.buildProjectModelView @project, @members
it "should copy the owner features to the project", ->
@result.features.versioning.should.equal @owner.features.versioning
@result.features.collaborators.should.equal @owner.features.collaborators
@result.features.compileGroup.should.equal @owner.features.compileGroup
@result.features.compileTimeout.should.equal @owner.features.compileTimeout
|
[
{
"context": " i in @props.project.urls\n link._key ?= Math.random()\n <AutoSave key={link._key} tag=\"tr\" ",
"end": 1001,
"score": 0.931738555431366,
"start": 990,
"tag": "KEY",
"value": "Math.random"
}
] | app/pages/lab/project-details.cjsx | camallen/Panoptes-Front-End | 0 | React = require 'react'
AutoSave = require '../../components/auto-save'
handleInputChange = require '../../lib/handle-input-change'
PromiseRenderer = require '../../components/promise-renderer'
ImageSelector = require '../../components/image-selector'
apiClient = require '../../api/client'
putFile = require '../../lib/put-file'
counterpart = require 'counterpart'
DisplayNameSlugEditor = require '../../partials/display-name-slug-editor'
TagSearch = require '../../components/tag-search'
{MarkdownEditor} = require 'markdownz'
markdownHelp = require '../../lib/markdown-help'
MAX_AVATAR_SIZE = 64000
MAX_BACKGROUND_SIZE = 256000
ExternalLinksEditor = React.createClass
displayName: 'ExternalLinksEditor'
getDefaultProps: ->
project: {}
render: ->
<div>
<table>
<thead>
<tr>
<th>Label</th>
<th>URL</th>
</tr>
</thead>
<tbody>
{for link, i in @props.project.urls
link._key ?= Math.random()
<AutoSave key={link._key} tag="tr" resource={@props.project}>
<td>
<input type="text" name="urls.#{i}.label" value={@props.project.urls[i].label} onChange={handleInputChange.bind @props.project} />
</td>
<td>
<input type="text" name="urls.#{i}.url" value={@props.project.urls[i].url} onChange={handleInputChange.bind @props.project} />
</td>
<td>
<button type="button" onClick={@handleRemoveLink.bind this, link}>
<i className="fa fa-remove"></i>
</button>
</td>
</AutoSave>}
</tbody>
</table>
<AutoSave resource={@props.project}>
<button type="button" onClick={@handleAddLink}>Add a link</button>
</AutoSave>
</div>
handleAddLink: ->
changes = {}
changes["urls.#{@props.project.urls.length}"] =
label: 'Example'
url: 'https://example.com/'
@props.project.update changes
handleRemoveLink: (linkToRemove) ->
changes =
urls: (link for link in @props.project.urls when link isnt linkToRemove)
@props.project.update changes
module.exports = React.createClass
displayName: 'EditProjectDetails'
getDefaultProps: ->
project: {}
getInitialState: ->
avatarError: null
backgroundError: null
render: ->
# Failures on media GETs are acceptable here,
# but the JSON-API lib doesn't cache failed requests,
# so do it manually:
@avatarGet ?= @props.project.get 'avatar'
.catch ->
null
@backgroundGet ?= @props.project.get 'background'
.catch ->
null
<div>
<p className="form-help">Input the basic information about your project, and set up its home page.</p>
<div className="columns-container">
<div>
Avatar<br />
<PromiseRenderer promise={@avatarGet} then={(avatar) =>
console.log 'Avatar is', avatar
placeholder = <div className="form-help content-container">Drop an avatar image here</div>
<ImageSelector maxSize={MAX_AVATAR_SIZE} ratio={1} defaultValue={avatar?.src} placeholder={placeholder} onChange={@handleMediaChange.bind this, 'avatar'} />
} />
{if @state.avatarError
<div className="form-help error">{@state.avatarError.toString()}</div>}
<p><small className="form-help">Pick a logo to represent your project. To add an image, either drag and drop or click to open your file viewer. For best results, use a square image of not more than 50 KB.</small></p>
<hr />
Background image<br />
<PromiseRenderer promise={@backgroundGet} then={(background) =>
console.log 'Background is', background
placeholder = <div className="form-help content-container">Drop a background image here</div>
<ImageSelector maxSize={MAX_BACKGROUND_SIZE} defaultValue={background?.src} placeholder={placeholder} onChange={@handleMediaChange.bind this, 'background'} />
} />
{if @state.backgroundError
<div className="form-help error">{@state.backgroundError.toString()}</div>}
<p><small className="form-help">This image will be the background for all of your project pages, including your project’s front page. To add an image, either drag and drop or left click to open your file viewer. For best results, use good quality images no more than 256 KB.</small></p>
<hr />
<p>
<AutoSave tag="label" resource={@props.project}>
{checked = @props.project.configuration?.user_chooses_workflow}
<input type="checkbox" name="configuration.user_chooses_workflow" defaultChecked={checked} defaultValue={checked} onChange={handleInputChange.bind @props.project} />{' '}
Volunteers can choose which workflow they work on
</AutoSave>
<br />
<small className="form-help">If you have multiple workflows, check this to let volunteers select which workflow they want to to work on; otherwise, they’ll be served randomly.</small>
</p>
</div>
<div className="column">
<DisplayNameSlugEditor resource={@props.project} resourceType="project" />
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Description</span>
<br />
<input className="standard-input full" name="description" value={@props.project.description} onChange={handleInputChange.bind @props.project} />
</AutoSave>
<small className="form-help">This should be a one-line call to action for your project that displays on your landing page. Some volunteers will decide whether to try your project based on reading this, so try to write short text that will make people actively want to join your project.</small>
</p>
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Introduction</span>
<br />
<MarkdownEditor className="full" name="introduction" rows="10" value={@props.project.introduction} project={@props.project} onChange={handleInputChange.bind @props.project} onHelp={markdownHelp}/>
</AutoSave>
<small className="form-help">Add a brief introduction to get people interested in your project. This will display on your landing page.</small>
</p>
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Workflow Description</span>
<br />
<textarea className="standard-input full" name="workflow_description" value={@props.project.workflow_description} onChange={handleInputChange.bind @props.project} />
</AutoSave>
<small className="form-help">Add text here when you have multiple workflows and want to help your volunteers decide which one they should do.</small>
</p>
<div>
<AutoSave resource={@props.project}>
<span className="form-label">Tags</span>
<br />
<TagSearch name="tags" multi={true} value={@props.project.tags} onChange={@handleTagChange} />
</AutoSave>
<small className="form-help">Enter a list of tags separated by commas to help users find your project.</small>
</div>
<div>
External links<br />
<small className="form-help">Adding an external link will make it appear as a new tab alongside the science, classify, and talk tabs.</small>
<ExternalLinksEditor project={@props.project} />
</div>
</div>
</div>
</div>
handleTagChange: (value) ->
event =
target:
value: if value is '' then [] else value.split(',')
name: 'tags'
dataset: {}
handleInputChange.call @props.project, event
handleMediaChange: (type, file) ->
errorProp = "#{type}Error"
newState = {}
newState[errorProp] = null
@setState newState
apiClient.post @props.project._getURL(type), media: content_type: file.type
.then ([resource]) =>
putFile resource.src, file
.then =>
@props.project.uncacheLink type
@["#{type}Get"] = null # Uncache the local request so that rerendering makes it again.
@props.project.refresh() # Update the resource's links.
.then =>
@props.project.emit 'change' # Re-render
.catch (error) =>
newState = {}
newState[errorProp] = error
@setState newState
| 157765 | React = require 'react'
AutoSave = require '../../components/auto-save'
handleInputChange = require '../../lib/handle-input-change'
PromiseRenderer = require '../../components/promise-renderer'
ImageSelector = require '../../components/image-selector'
apiClient = require '../../api/client'
putFile = require '../../lib/put-file'
counterpart = require 'counterpart'
DisplayNameSlugEditor = require '../../partials/display-name-slug-editor'
TagSearch = require '../../components/tag-search'
{MarkdownEditor} = require 'markdownz'
markdownHelp = require '../../lib/markdown-help'
MAX_AVATAR_SIZE = 64000
MAX_BACKGROUND_SIZE = 256000
ExternalLinksEditor = React.createClass
displayName: 'ExternalLinksEditor'
getDefaultProps: ->
project: {}
render: ->
<div>
<table>
<thead>
<tr>
<th>Label</th>
<th>URL</th>
</tr>
</thead>
<tbody>
{for link, i in @props.project.urls
link._key ?= <KEY>()
<AutoSave key={link._key} tag="tr" resource={@props.project}>
<td>
<input type="text" name="urls.#{i}.label" value={@props.project.urls[i].label} onChange={handleInputChange.bind @props.project} />
</td>
<td>
<input type="text" name="urls.#{i}.url" value={@props.project.urls[i].url} onChange={handleInputChange.bind @props.project} />
</td>
<td>
<button type="button" onClick={@handleRemoveLink.bind this, link}>
<i className="fa fa-remove"></i>
</button>
</td>
</AutoSave>}
</tbody>
</table>
<AutoSave resource={@props.project}>
<button type="button" onClick={@handleAddLink}>Add a link</button>
</AutoSave>
</div>
handleAddLink: ->
changes = {}
changes["urls.#{@props.project.urls.length}"] =
label: 'Example'
url: 'https://example.com/'
@props.project.update changes
handleRemoveLink: (linkToRemove) ->
changes =
urls: (link for link in @props.project.urls when link isnt linkToRemove)
@props.project.update changes
module.exports = React.createClass
displayName: 'EditProjectDetails'
getDefaultProps: ->
project: {}
getInitialState: ->
avatarError: null
backgroundError: null
render: ->
# Failures on media GETs are acceptable here,
# but the JSON-API lib doesn't cache failed requests,
# so do it manually:
@avatarGet ?= @props.project.get 'avatar'
.catch ->
null
@backgroundGet ?= @props.project.get 'background'
.catch ->
null
<div>
<p className="form-help">Input the basic information about your project, and set up its home page.</p>
<div className="columns-container">
<div>
Avatar<br />
<PromiseRenderer promise={@avatarGet} then={(avatar) =>
console.log 'Avatar is', avatar
placeholder = <div className="form-help content-container">Drop an avatar image here</div>
<ImageSelector maxSize={MAX_AVATAR_SIZE} ratio={1} defaultValue={avatar?.src} placeholder={placeholder} onChange={@handleMediaChange.bind this, 'avatar'} />
} />
{if @state.avatarError
<div className="form-help error">{@state.avatarError.toString()}</div>}
<p><small className="form-help">Pick a logo to represent your project. To add an image, either drag and drop or click to open your file viewer. For best results, use a square image of not more than 50 KB.</small></p>
<hr />
Background image<br />
<PromiseRenderer promise={@backgroundGet} then={(background) =>
console.log 'Background is', background
placeholder = <div className="form-help content-container">Drop a background image here</div>
<ImageSelector maxSize={MAX_BACKGROUND_SIZE} defaultValue={background?.src} placeholder={placeholder} onChange={@handleMediaChange.bind this, 'background'} />
} />
{if @state.backgroundError
<div className="form-help error">{@state.backgroundError.toString()}</div>}
<p><small className="form-help">This image will be the background for all of your project pages, including your project’s front page. To add an image, either drag and drop or left click to open your file viewer. For best results, use good quality images no more than 256 KB.</small></p>
<hr />
<p>
<AutoSave tag="label" resource={@props.project}>
{checked = @props.project.configuration?.user_chooses_workflow}
<input type="checkbox" name="configuration.user_chooses_workflow" defaultChecked={checked} defaultValue={checked} onChange={handleInputChange.bind @props.project} />{' '}
Volunteers can choose which workflow they work on
</AutoSave>
<br />
<small className="form-help">If you have multiple workflows, check this to let volunteers select which workflow they want to to work on; otherwise, they’ll be served randomly.</small>
</p>
</div>
<div className="column">
<DisplayNameSlugEditor resource={@props.project} resourceType="project" />
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Description</span>
<br />
<input className="standard-input full" name="description" value={@props.project.description} onChange={handleInputChange.bind @props.project} />
</AutoSave>
<small className="form-help">This should be a one-line call to action for your project that displays on your landing page. Some volunteers will decide whether to try your project based on reading this, so try to write short text that will make people actively want to join your project.</small>
</p>
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Introduction</span>
<br />
<MarkdownEditor className="full" name="introduction" rows="10" value={@props.project.introduction} project={@props.project} onChange={handleInputChange.bind @props.project} onHelp={markdownHelp}/>
</AutoSave>
<small className="form-help">Add a brief introduction to get people interested in your project. This will display on your landing page.</small>
</p>
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Workflow Description</span>
<br />
<textarea className="standard-input full" name="workflow_description" value={@props.project.workflow_description} onChange={handleInputChange.bind @props.project} />
</AutoSave>
<small className="form-help">Add text here when you have multiple workflows and want to help your volunteers decide which one they should do.</small>
</p>
<div>
<AutoSave resource={@props.project}>
<span className="form-label">Tags</span>
<br />
<TagSearch name="tags" multi={true} value={@props.project.tags} onChange={@handleTagChange} />
</AutoSave>
<small className="form-help">Enter a list of tags separated by commas to help users find your project.</small>
</div>
<div>
External links<br />
<small className="form-help">Adding an external link will make it appear as a new tab alongside the science, classify, and talk tabs.</small>
<ExternalLinksEditor project={@props.project} />
</div>
</div>
</div>
</div>
handleTagChange: (value) ->
event =
target:
value: if value is '' then [] else value.split(',')
name: 'tags'
dataset: {}
handleInputChange.call @props.project, event
handleMediaChange: (type, file) ->
errorProp = "#{type}Error"
newState = {}
newState[errorProp] = null
@setState newState
apiClient.post @props.project._getURL(type), media: content_type: file.type
.then ([resource]) =>
putFile resource.src, file
.then =>
@props.project.uncacheLink type
@["#{type}Get"] = null # Uncache the local request so that rerendering makes it again.
@props.project.refresh() # Update the resource's links.
.then =>
@props.project.emit 'change' # Re-render
.catch (error) =>
newState = {}
newState[errorProp] = error
@setState newState
| true | React = require 'react'
AutoSave = require '../../components/auto-save'
handleInputChange = require '../../lib/handle-input-change'
PromiseRenderer = require '../../components/promise-renderer'
ImageSelector = require '../../components/image-selector'
apiClient = require '../../api/client'
putFile = require '../../lib/put-file'
counterpart = require 'counterpart'
DisplayNameSlugEditor = require '../../partials/display-name-slug-editor'
TagSearch = require '../../components/tag-search'
{MarkdownEditor} = require 'markdownz'
markdownHelp = require '../../lib/markdown-help'
MAX_AVATAR_SIZE = 64000
MAX_BACKGROUND_SIZE = 256000
ExternalLinksEditor = React.createClass
displayName: 'ExternalLinksEditor'
getDefaultProps: ->
project: {}
render: ->
<div>
<table>
<thead>
<tr>
<th>Label</th>
<th>URL</th>
</tr>
</thead>
<tbody>
{for link, i in @props.project.urls
link._key ?= PI:KEY:<KEY>END_PI()
<AutoSave key={link._key} tag="tr" resource={@props.project}>
<td>
<input type="text" name="urls.#{i}.label" value={@props.project.urls[i].label} onChange={handleInputChange.bind @props.project} />
</td>
<td>
<input type="text" name="urls.#{i}.url" value={@props.project.urls[i].url} onChange={handleInputChange.bind @props.project} />
</td>
<td>
<button type="button" onClick={@handleRemoveLink.bind this, link}>
<i className="fa fa-remove"></i>
</button>
</td>
</AutoSave>}
</tbody>
</table>
<AutoSave resource={@props.project}>
<button type="button" onClick={@handleAddLink}>Add a link</button>
</AutoSave>
</div>
handleAddLink: ->
changes = {}
changes["urls.#{@props.project.urls.length}"] =
label: 'Example'
url: 'https://example.com/'
@props.project.update changes
handleRemoveLink: (linkToRemove) ->
changes =
urls: (link for link in @props.project.urls when link isnt linkToRemove)
@props.project.update changes
module.exports = React.createClass
displayName: 'EditProjectDetails'
getDefaultProps: ->
project: {}
getInitialState: ->
avatarError: null
backgroundError: null
render: ->
# Failures on media GETs are acceptable here,
# but the JSON-API lib doesn't cache failed requests,
# so do it manually:
@avatarGet ?= @props.project.get 'avatar'
.catch ->
null
@backgroundGet ?= @props.project.get 'background'
.catch ->
null
<div>
<p className="form-help">Input the basic information about your project, and set up its home page.</p>
<div className="columns-container">
<div>
Avatar<br />
<PromiseRenderer promise={@avatarGet} then={(avatar) =>
console.log 'Avatar is', avatar
placeholder = <div className="form-help content-container">Drop an avatar image here</div>
<ImageSelector maxSize={MAX_AVATAR_SIZE} ratio={1} defaultValue={avatar?.src} placeholder={placeholder} onChange={@handleMediaChange.bind this, 'avatar'} />
} />
{if @state.avatarError
<div className="form-help error">{@state.avatarError.toString()}</div>}
<p><small className="form-help">Pick a logo to represent your project. To add an image, either drag and drop or click to open your file viewer. For best results, use a square image of not more than 50 KB.</small></p>
<hr />
Background image<br />
<PromiseRenderer promise={@backgroundGet} then={(background) =>
console.log 'Background is', background
placeholder = <div className="form-help content-container">Drop a background image here</div>
<ImageSelector maxSize={MAX_BACKGROUND_SIZE} defaultValue={background?.src} placeholder={placeholder} onChange={@handleMediaChange.bind this, 'background'} />
} />
{if @state.backgroundError
<div className="form-help error">{@state.backgroundError.toString()}</div>}
<p><small className="form-help">This image will be the background for all of your project pages, including your project’s front page. To add an image, either drag and drop or left click to open your file viewer. For best results, use good quality images no more than 256 KB.</small></p>
<hr />
<p>
<AutoSave tag="label" resource={@props.project}>
{checked = @props.project.configuration?.user_chooses_workflow}
<input type="checkbox" name="configuration.user_chooses_workflow" defaultChecked={checked} defaultValue={checked} onChange={handleInputChange.bind @props.project} />{' '}
Volunteers can choose which workflow they work on
</AutoSave>
<br />
<small className="form-help">If you have multiple workflows, check this to let volunteers select which workflow they want to to work on; otherwise, they’ll be served randomly.</small>
</p>
</div>
<div className="column">
<DisplayNameSlugEditor resource={@props.project} resourceType="project" />
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Description</span>
<br />
<input className="standard-input full" name="description" value={@props.project.description} onChange={handleInputChange.bind @props.project} />
</AutoSave>
<small className="form-help">This should be a one-line call to action for your project that displays on your landing page. Some volunteers will decide whether to try your project based on reading this, so try to write short text that will make people actively want to join your project.</small>
</p>
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Introduction</span>
<br />
<MarkdownEditor className="full" name="introduction" rows="10" value={@props.project.introduction} project={@props.project} onChange={handleInputChange.bind @props.project} onHelp={markdownHelp}/>
</AutoSave>
<small className="form-help">Add a brief introduction to get people interested in your project. This will display on your landing page.</small>
</p>
<p>
<AutoSave resource={@props.project}>
<span className="form-label">Workflow Description</span>
<br />
<textarea className="standard-input full" name="workflow_description" value={@props.project.workflow_description} onChange={handleInputChange.bind @props.project} />
</AutoSave>
<small className="form-help">Add text here when you have multiple workflows and want to help your volunteers decide which one they should do.</small>
</p>
<div>
<AutoSave resource={@props.project}>
<span className="form-label">Tags</span>
<br />
<TagSearch name="tags" multi={true} value={@props.project.tags} onChange={@handleTagChange} />
</AutoSave>
<small className="form-help">Enter a list of tags separated by commas to help users find your project.</small>
</div>
<div>
External links<br />
<small className="form-help">Adding an external link will make it appear as a new tab alongside the science, classify, and talk tabs.</small>
<ExternalLinksEditor project={@props.project} />
</div>
</div>
</div>
</div>
handleTagChange: (value) ->
event =
target:
value: if value is '' then [] else value.split(',')
name: 'tags'
dataset: {}
handleInputChange.call @props.project, event
handleMediaChange: (type, file) ->
errorProp = "#{type}Error"
newState = {}
newState[errorProp] = null
@setState newState
apiClient.post @props.project._getURL(type), media: content_type: file.type
.then ([resource]) =>
putFile resource.src, file
.then =>
@props.project.uncacheLink type
@["#{type}Get"] = null # Uncache the local request so that rerendering makes it again.
@props.project.refresh() # Update the resource's links.
.then =>
@props.project.emit 'change' # Re-render
.catch (error) =>
newState = {}
newState[errorProp] = error
@setState newState
|
[
{
"context": " settings[raw_key] = raw_value if raw_key.match(@_nameRegex)\n @__retryer (ecbr) =>\n @client.put_s",
"end": 2450,
"score": 0.7474048137664795,
"start": 2439,
"tag": "KEY",
"value": "@_nameRegex"
},
{
"context": " settings[raw_key] = raw_value if raw_k... | app/src/utils/storage/synced_store.coffee | doged/ledger-wallet-doged-chrome | 1 | # A store able to synchronize with a remote crypted store. This store has an extra method in order to order a push or pull
# operations
# @event pulled Emitted once the store is pulled from the remote API
class ledger.storage.SyncedStore extends ledger.storage.SecureStore
PULL_INTERVAL_DELAY: ledger.config.syncRestClient.pullIntervalDelay || 10000
PULL_THROTTLE_DELAY: ledger.config.syncRestClient.pullThrottleDelay || 1000
PUSH_DEBOUNCE_DELAY: ledger.config.syncRestClient.pushDebounceDelay || 1000
# @param [String] name The store name
# @param [String] key The secure key used to encrypt/decrypt the store
# @param [Function] syncPushHandler A function used to perform push synchronization operations
# @param [Function] syncPullHandler A function used to perform pull synchronization operations
constructor: (name, addr, key) ->
super(name, key)
@mergeStrategy = @_overwriteStrategy
@client = ledger.api.SyncRestClient.instance(addr)
@throttled_pull = _.throttle _.bind(@._pull,@), @PULL_THROTTLE_DELAY
@debounced_push = _.debounce _.bind(@._push,@), @PUSH_DEBOUNCE_DELAY
_.defer =>
ledger.storage.wallet.get ['__last_sync_md5'], (item) =>
@lastMd5 = item.__last_sync_md5
@_initConnection()
# Stores one or many item
#
# @param [Object] items Items to store
# @param [Function] cb A callback invoked once the insertion is done
set: (items, cb) ->
super items, =>
this.debounced_push()
cb?()
# Removes one or more items from storage.
#
# @param [Array|String] key A single key to get, list of keys to get.
# @param [Function] cb A callback invoked once the removal is done.
remove: (keys, cb) ->
super keys, =>
this.debounced_push()
cb?()
clear: (cb) ->
super(cb)
@client.delete_settings()
# @return A promise
_pull: ->
@client.get_settings_md5().then( (md5) =>
return undefined if md5 == @lastMd5
@client.get_settings().then (items) =>
@mergeStrategy(items).then =>
@_setLastMd5(md5)
@emit('pulled')
items
).catch( (jqXHR) =>
# Data not synced already
return this._init() if jqXHR.status == 404
jqXHR
)
# @return A jQuery promise
_push: ->
d = Q.defer()
this._raw_get null, (raw_items) =>
settings = {}
for raw_key, raw_value of raw_items
settings[raw_key] = raw_value if raw_key.match(@_nameRegex)
@__retryer (ecbr) =>
@client.put_settings(settings).catch(ecbr).then (md5) =>
@_setLastMd5(md5)
d.resolve(md5)
, _.bind(d.reject,d)
, _.bind(d.reject,d)
d.promise
# @return A jQuery promise
_overwriteStrategy: (items) ->
d = Q.defer()
this._raw_set items, _.bind(d.resolve,d)
d.promise
# Call fct with ecbr as arg and retry it on fail.
# Wait 1 second before retry first time, double until 64 s then.
#
# @param [Function] fct A function invoked with ecbr, a retry on error callback.
# @param [Function] ecb A callback invoked when retry all fail.
__retryer: (fct, ecb, wait=1000) ->
fct (err) =>
if wait <= 64*1000
setTimeout (=> @__retryer(fct, ecb, wait*2)), wait
else
console.error(err)
ecb?(err)
_initConnection: ->
@__retryer (ecbr) =>
@_pull().then( =>
setTimeout =>
@pullTimer = setInterval(@throttled_pull, @PULL_INTERVAL_DELAY)
, @PULL_INTERVAL_DELAY
).catch (jqXHR) =>
# Data not synced already
if jqXHR.status == 404
this._init().catch(ecbr).then =>
setInterval(@throttled_pull, @PULL_INTERVAL_DELAY)
else if jqXHR.status == 400
console.error("BadRequest during SyncedStore initialization:", jqXHR)
else
ecbr(jqXHR)
ledger.app.wallet.once 'state:changed', =>
clearInterval(@pullTimer) if ledger.app.wallet._state != ledger.wallet.States.UNLOCKED
# @param [Function] cb A callback invoked once init is done. cb()
# @param [Function] ecb A callback invoked when init fail. Take $.ajax.fail args.
# @return A jQuery promise
_init: ->
d = Q.defer()
this._raw_get null, (raw_items) =>
settings = {}
for raw_key, raw_value of raw_items
settings[raw_key] = raw_value if raw_key.match(@_nameRegex)
@__retryer (ecbr) =>
@client.post_settings(settings).catch(ecbr).then (md5) =>
@_setLastMd5(md5)
d.resolve(md5)
, _.bind(d.reject,d)
, _.bind(d.reject,d)
d.promise
# Save lastMd5 in settings
_setLastMd5: (md5) ->
@lastMd5 = md5
ledger.storage.wallet.set("__last_sync_md5": md5)
| 127724 | # A store able to synchronize with a remote crypted store. This store has an extra method in order to order a push or pull
# operations
# @event pulled Emitted once the store is pulled from the remote API
class ledger.storage.SyncedStore extends ledger.storage.SecureStore
PULL_INTERVAL_DELAY: ledger.config.syncRestClient.pullIntervalDelay || 10000
PULL_THROTTLE_DELAY: ledger.config.syncRestClient.pullThrottleDelay || 1000
PUSH_DEBOUNCE_DELAY: ledger.config.syncRestClient.pushDebounceDelay || 1000
# @param [String] name The store name
# @param [String] key The secure key used to encrypt/decrypt the store
# @param [Function] syncPushHandler A function used to perform push synchronization operations
# @param [Function] syncPullHandler A function used to perform pull synchronization operations
constructor: (name, addr, key) ->
super(name, key)
@mergeStrategy = @_overwriteStrategy
@client = ledger.api.SyncRestClient.instance(addr)
@throttled_pull = _.throttle _.bind(@._pull,@), @PULL_THROTTLE_DELAY
@debounced_push = _.debounce _.bind(@._push,@), @PUSH_DEBOUNCE_DELAY
_.defer =>
ledger.storage.wallet.get ['__last_sync_md5'], (item) =>
@lastMd5 = item.__last_sync_md5
@_initConnection()
# Stores one or many item
#
# @param [Object] items Items to store
# @param [Function] cb A callback invoked once the insertion is done
set: (items, cb) ->
super items, =>
this.debounced_push()
cb?()
# Removes one or more items from storage.
#
# @param [Array|String] key A single key to get, list of keys to get.
# @param [Function] cb A callback invoked once the removal is done.
remove: (keys, cb) ->
super keys, =>
this.debounced_push()
cb?()
clear: (cb) ->
super(cb)
@client.delete_settings()
# @return A promise
_pull: ->
@client.get_settings_md5().then( (md5) =>
return undefined if md5 == @lastMd5
@client.get_settings().then (items) =>
@mergeStrategy(items).then =>
@_setLastMd5(md5)
@emit('pulled')
items
).catch( (jqXHR) =>
# Data not synced already
return this._init() if jqXHR.status == 404
jqXHR
)
# @return A jQuery promise
_push: ->
d = Q.defer()
this._raw_get null, (raw_items) =>
settings = {}
for raw_key, raw_value of raw_items
settings[raw_key] = raw_value if raw_key.match(<KEY>)
@__retryer (ecbr) =>
@client.put_settings(settings).catch(ecbr).then (md5) =>
@_setLastMd5(md5)
d.resolve(md5)
, _.bind(d.reject,d)
, _.bind(d.reject,d)
d.promise
# @return A jQuery promise
_overwriteStrategy: (items) ->
d = Q.defer()
this._raw_set items, _.bind(d.resolve,d)
d.promise
# Call fct with ecbr as arg and retry it on fail.
# Wait 1 second before retry first time, double until 64 s then.
#
# @param [Function] fct A function invoked with ecbr, a retry on error callback.
# @param [Function] ecb A callback invoked when retry all fail.
__retryer: (fct, ecb, wait=1000) ->
fct (err) =>
if wait <= 64*1000
setTimeout (=> @__retryer(fct, ecb, wait*2)), wait
else
console.error(err)
ecb?(err)
_initConnection: ->
@__retryer (ecbr) =>
@_pull().then( =>
setTimeout =>
@pullTimer = setInterval(@throttled_pull, @PULL_INTERVAL_DELAY)
, @PULL_INTERVAL_DELAY
).catch (jqXHR) =>
# Data not synced already
if jqXHR.status == 404
this._init().catch(ecbr).then =>
setInterval(@throttled_pull, @PULL_INTERVAL_DELAY)
else if jqXHR.status == 400
console.error("BadRequest during SyncedStore initialization:", jqXHR)
else
ecbr(jqXHR)
ledger.app.wallet.once 'state:changed', =>
clearInterval(@pullTimer) if ledger.app.wallet._state != ledger.wallet.States.UNLOCKED
# @param [Function] cb A callback invoked once init is done. cb()
# @param [Function] ecb A callback invoked when init fail. Take $.ajax.fail args.
# @return A jQuery promise
_init: ->
d = Q.defer()
this._raw_get null, (raw_items) =>
settings = {}
for raw_key, raw_value of raw_items
settings[raw_key] = raw_value if raw_key.match(@_nameRegex)
@__retryer (ecbr) =>
@client.post_settings(settings).catch(ecbr).then (md5) =>
@_setLastMd5(md5)
d.resolve(md5)
, _.bind(d.reject,d)
, _.bind(d.reject,d)
d.promise
# Save lastMd5 in settings
_setLastMd5: (md5) ->
@lastMd5 = md5
ledger.storage.wallet.set("__last_sync_md5": md5)
| true | # A store able to synchronize with a remote crypted store. This store has an extra method in order to order a push or pull
# operations
# @event pulled Emitted once the store is pulled from the remote API
class ledger.storage.SyncedStore extends ledger.storage.SecureStore
PULL_INTERVAL_DELAY: ledger.config.syncRestClient.pullIntervalDelay || 10000
PULL_THROTTLE_DELAY: ledger.config.syncRestClient.pullThrottleDelay || 1000
PUSH_DEBOUNCE_DELAY: ledger.config.syncRestClient.pushDebounceDelay || 1000
# @param [String] name The store name
# @param [String] key The secure key used to encrypt/decrypt the store
# @param [Function] syncPushHandler A function used to perform push synchronization operations
# @param [Function] syncPullHandler A function used to perform pull synchronization operations
constructor: (name, addr, key) ->
super(name, key)
@mergeStrategy = @_overwriteStrategy
@client = ledger.api.SyncRestClient.instance(addr)
@throttled_pull = _.throttle _.bind(@._pull,@), @PULL_THROTTLE_DELAY
@debounced_push = _.debounce _.bind(@._push,@), @PUSH_DEBOUNCE_DELAY
_.defer =>
ledger.storage.wallet.get ['__last_sync_md5'], (item) =>
@lastMd5 = item.__last_sync_md5
@_initConnection()
# Stores one or many item
#
# @param [Object] items Items to store
# @param [Function] cb A callback invoked once the insertion is done
set: (items, cb) ->
super items, =>
this.debounced_push()
cb?()
# Removes one or more items from storage.
#
# @param [Array|String] key A single key to get, list of keys to get.
# @param [Function] cb A callback invoked once the removal is done.
remove: (keys, cb) ->
super keys, =>
this.debounced_push()
cb?()
clear: (cb) ->
super(cb)
@client.delete_settings()
# @return A promise
_pull: ->
@client.get_settings_md5().then( (md5) =>
return undefined if md5 == @lastMd5
@client.get_settings().then (items) =>
@mergeStrategy(items).then =>
@_setLastMd5(md5)
@emit('pulled')
items
).catch( (jqXHR) =>
# Data not synced already
return this._init() if jqXHR.status == 404
jqXHR
)
# @return A jQuery promise
_push: ->
d = Q.defer()
this._raw_get null, (raw_items) =>
settings = {}
for raw_key, raw_value of raw_items
settings[raw_key] = raw_value if raw_key.match(PI:KEY:<KEY>END_PI)
@__retryer (ecbr) =>
@client.put_settings(settings).catch(ecbr).then (md5) =>
@_setLastMd5(md5)
d.resolve(md5)
, _.bind(d.reject,d)
, _.bind(d.reject,d)
d.promise
# @return A jQuery promise
_overwriteStrategy: (items) ->
d = Q.defer()
this._raw_set items, _.bind(d.resolve,d)
d.promise
# Call fct with ecbr as arg and retry it on fail.
# Wait 1 second before retry first time, double until 64 s then.
#
# @param [Function] fct A function invoked with ecbr, a retry on error callback.
# @param [Function] ecb A callback invoked when retry all fail.
__retryer: (fct, ecb, wait=1000) ->
fct (err) =>
if wait <= 64*1000
setTimeout (=> @__retryer(fct, ecb, wait*2)), wait
else
console.error(err)
ecb?(err)
_initConnection: ->
@__retryer (ecbr) =>
@_pull().then( =>
setTimeout =>
@pullTimer = setInterval(@throttled_pull, @PULL_INTERVAL_DELAY)
, @PULL_INTERVAL_DELAY
).catch (jqXHR) =>
# Data not synced already
if jqXHR.status == 404
this._init().catch(ecbr).then =>
setInterval(@throttled_pull, @PULL_INTERVAL_DELAY)
else if jqXHR.status == 400
console.error("BadRequest during SyncedStore initialization:", jqXHR)
else
ecbr(jqXHR)
ledger.app.wallet.once 'state:changed', =>
clearInterval(@pullTimer) if ledger.app.wallet._state != ledger.wallet.States.UNLOCKED
# @param [Function] cb A callback invoked once init is done. cb()
# @param [Function] ecb A callback invoked when init fail. Take $.ajax.fail args.
# @return A jQuery promise
_init: ->
d = Q.defer()
this._raw_get null, (raw_items) =>
settings = {}
for raw_key, raw_value of raw_items
settings[raw_key] = raw_value if raw_key.match(@_nameRegex)
@__retryer (ecbr) =>
@client.post_settings(settings).catch(ecbr).then (md5) =>
@_setLastMd5(md5)
d.resolve(md5)
, _.bind(d.reject,d)
, _.bind(d.reject,d)
d.promise
# Save lastMd5 in settings
_setLastMd5: (md5) ->
@lastMd5 = md5
ledger.storage.wallet.set("__last_sync_md5": md5)
|
[
{
"context": "Options)\n\n\ntaskQueue = new TaskQueue\n secToken: \"f0a4d82d-d2a1-4e83-9efb-84482d5806b0\"\n taskResourceUrl: \"http://localhost:3000/api/ap",
"end": 280,
"score": 0.8796785473823547,
"start": 244,
"tag": "PASSWORD",
"value": "f0a4d82d-d2a1-4e83-9efb-84482d5806b0"
}
] | example/example.coffee | ICGGroup/icg-task-queue | 0 | TaskQueue = require("../task-queue")
bunyan = require('bunyan');
logOptions =
name: 'example-log'
streams: [
stream: process.stdout
level: "debug"
]
log = bunyan.createLogger(logOptions)
taskQueue = new TaskQueue
secToken: "f0a4d82d-d2a1-4e83-9efb-84482d5806b0"
taskResourceUrl: "http://localhost:3000/api/ap/queuedTasks"
log: log
# ask for work
processOpts =
backoff:
algorithm : 'fibonacci',
delayRatio : 1,
maxDelay : 300,
maxTries: 1000000
log: log
processQ = taskQueue.process "non.notification", processOpts, (task, cb)->
log.debug "starting processing for task", task
| 199860 | TaskQueue = require("../task-queue")
bunyan = require('bunyan');
logOptions =
name: 'example-log'
streams: [
stream: process.stdout
level: "debug"
]
log = bunyan.createLogger(logOptions)
taskQueue = new TaskQueue
secToken: "<PASSWORD>"
taskResourceUrl: "http://localhost:3000/api/ap/queuedTasks"
log: log
# ask for work
processOpts =
backoff:
algorithm : 'fibonacci',
delayRatio : 1,
maxDelay : 300,
maxTries: 1000000
log: log
processQ = taskQueue.process "non.notification", processOpts, (task, cb)->
log.debug "starting processing for task", task
| true | TaskQueue = require("../task-queue")
bunyan = require('bunyan');
logOptions =
name: 'example-log'
streams: [
stream: process.stdout
level: "debug"
]
log = bunyan.createLogger(logOptions)
taskQueue = new TaskQueue
secToken: "PI:PASSWORD:<PASSWORD>END_PI"
taskResourceUrl: "http://localhost:3000/api/ap/queuedTasks"
log: log
# ask for work
processOpts =
backoff:
algorithm : 'fibonacci',
delayRatio : 1,
maxDelay : 300,
maxTries: 1000000
log: log
processQ = taskQueue.process "non.notification", processOpts, (task, cb)->
log.debug "starting processing for task", task
|
[
{
"context": "#\n# grunt-chalkboard\n# https://github.com/adrianlee44/grunt-chalkboard\n#\n# Copyright (c) 2013 Adrian Le",
"end": 53,
"score": 0.9995395541191101,
"start": 42,
"tag": "USERNAME",
"value": "adrianlee44"
},
{
"context": "drianlee44/grunt-chalkboard\n#\n# Copyright (c) ... | Gruntfile.coffee | gruntjs-updater/grunt-chalkboard | 0 | #
# grunt-chalkboard
# https://github.com/adrianlee44/grunt-chalkboard
#
# Copyright (c) 2013 Adrian Lee
# Licensed under the MIT license.
#
module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
coffeelint:
all: ["Gruntfile.coffee", "tasks/*.coffee", "<%= nodeunit.tests %>"]
# Before generating any new files, remove any previously-created files.
clean:
tests: ["tmp"]
# Configuration to be run (and then tested).
chalkboard:
fixture:
files:
"tmp/fixture.md": ["test/fixture.coffee"]
coffee:
src:
files:
"tasks/chalkboard.js": "src/chalkboard.coffee"
# Unit tests.
nodeunit:
tests: ["test/*_test.coffee"]
watch:
src:
files: "src/chalkboard.coffee"
tasks: ["coffeelint", "coffee", "test"]
# Actually load this plugin's task(s).
grunt.loadTasks "tasks"
# These plugins provide necessary tasks.
grunt.loadNpmTasks "grunt-coffeelint"
grunt.loadNpmTasks "grunt-contrib-clean"
grunt.loadNpmTasks "grunt-contrib-nodeunit"
grunt.loadNpmTasks "grunt-contrib-coffee"
grunt.loadNpmTasks "grunt-contrib-watch"
# Whenever the "test" task is run, first clean the "tmp" dir, then run this
# plugin's task(s), then test the result.
grunt.registerTask "test", ["clean", "coffee", "chalkboard", "nodeunit"]
# By default, lint and run all tests.
grunt.registerTask "default", ["coffeelint", "test"] | 38018 | #
# grunt-chalkboard
# https://github.com/adrianlee44/grunt-chalkboard
#
# Copyright (c) 2013 <NAME>
# Licensed under the MIT license.
#
module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
coffeelint:
all: ["Gruntfile.coffee", "tasks/*.coffee", "<%= nodeunit.tests %>"]
# Before generating any new files, remove any previously-created files.
clean:
tests: ["tmp"]
# Configuration to be run (and then tested).
chalkboard:
fixture:
files:
"tmp/fixture.md": ["test/fixture.coffee"]
coffee:
src:
files:
"tasks/chalkboard.js": "src/chalkboard.coffee"
# Unit tests.
nodeunit:
tests: ["test/*_test.coffee"]
watch:
src:
files: "src/chalkboard.coffee"
tasks: ["coffeelint", "coffee", "test"]
# Actually load this plugin's task(s).
grunt.loadTasks "tasks"
# These plugins provide necessary tasks.
grunt.loadNpmTasks "grunt-coffeelint"
grunt.loadNpmTasks "grunt-contrib-clean"
grunt.loadNpmTasks "grunt-contrib-nodeunit"
grunt.loadNpmTasks "grunt-contrib-coffee"
grunt.loadNpmTasks "grunt-contrib-watch"
# Whenever the "test" task is run, first clean the "tmp" dir, then run this
# plugin's task(s), then test the result.
grunt.registerTask "test", ["clean", "coffee", "chalkboard", "nodeunit"]
# By default, lint and run all tests.
grunt.registerTask "default", ["coffeelint", "test"] | true | #
# grunt-chalkboard
# https://github.com/adrianlee44/grunt-chalkboard
#
# Copyright (c) 2013 PI:NAME:<NAME>END_PI
# Licensed under the MIT license.
#
module.exports = (grunt) ->
# Project configuration.
grunt.initConfig
coffeelint:
all: ["Gruntfile.coffee", "tasks/*.coffee", "<%= nodeunit.tests %>"]
# Before generating any new files, remove any previously-created files.
clean:
tests: ["tmp"]
# Configuration to be run (and then tested).
chalkboard:
fixture:
files:
"tmp/fixture.md": ["test/fixture.coffee"]
coffee:
src:
files:
"tasks/chalkboard.js": "src/chalkboard.coffee"
# Unit tests.
nodeunit:
tests: ["test/*_test.coffee"]
watch:
src:
files: "src/chalkboard.coffee"
tasks: ["coffeelint", "coffee", "test"]
# Actually load this plugin's task(s).
grunt.loadTasks "tasks"
# These plugins provide necessary tasks.
grunt.loadNpmTasks "grunt-coffeelint"
grunt.loadNpmTasks "grunt-contrib-clean"
grunt.loadNpmTasks "grunt-contrib-nodeunit"
grunt.loadNpmTasks "grunt-contrib-coffee"
grunt.loadNpmTasks "grunt-contrib-watch"
# Whenever the "test" task is run, first clean the "tmp" dir, then run this
# plugin's task(s), then test the result.
grunt.registerTask "test", ["clean", "coffee", "chalkboard", "nodeunit"]
# By default, lint and run all tests.
grunt.registerTask "default", ["coffeelint", "test"] |
[
{
"context": "ormation, please see the LICENSE file\n\n@author Bryan Conrad <bkconrad@gmail.com>\n@copyright 2016 Bryan Conra",
"end": 156,
"score": 0.9998887777328491,
"start": 144,
"tag": "NAME",
"value": "Bryan Conrad"
},
{
"context": "e see the LICENSE file\n\n@author Br... | index.coffee | pieterbrandsen/screeps-grafana | 70 | ###
hopsoft\screeps-statsd
Licensed under the MIT license
For full copyright and license information, please see the LICENSE file
@author Bryan Conrad <bkconrad@gmail.com>
@copyright 2016 Bryan Conrad
@link https://github.com/hopsoft/docker-graphite-statsd
@license http://choosealicense.com/licenses/MIT MIT License
###
# Application's initialisation and startup script
ScreepsStatsd = require './src/ScreepsStatsd'
(new ScreepsStatsd).run() | 200327 | ###
hopsoft\screeps-statsd
Licensed under the MIT license
For full copyright and license information, please see the LICENSE file
@author <NAME> <<EMAIL>>
@copyright 2016 <NAME>
@link https://github.com/hopsoft/docker-graphite-statsd
@license http://choosealicense.com/licenses/MIT MIT License
###
# Application's initialisation and startup script
ScreepsStatsd = require './src/ScreepsStatsd'
(new ScreepsStatsd).run() | true | ###
hopsoft\screeps-statsd
Licensed under the MIT license
For full copyright and license information, please see the LICENSE file
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
@copyright 2016 PI:NAME:<NAME>END_PI
@link https://github.com/hopsoft/docker-graphite-statsd
@license http://choosealicense.com/licenses/MIT MIT License
###
# Application's initialisation and startup script
ScreepsStatsd = require './src/ScreepsStatsd'
(new ScreepsStatsd).run() |
[
{
"context": "ats - proves the lotto has no bias\n#\n# Author:\n# sakatam\n\n_ = require \"underscore\"\nasync = req",
"end": 452,
"score": 0.9997385740280151,
"start": 445,
"tag": "USERNAME",
"value": "sakatam"
},
{
"context": "_LOTTO_DEBUG in [\"1\", \"true\"]\n\n ST... | src/reviewer-lotto.coffee | sakatam/hubot-reviewer-lotto | 32 | # Description:
# assigns random reviewer for a pull request.
#
# Configuration:
# HUBOT_GITHUB_TOKEN (required)
# HUBOT_GITHUB_ORG (required)
# HUBOT_GITHUB_REVIEWER_TEAM (required)
# github team id. this script randomly picks a reviewer from this team members.
#
# Commands:
# hubot reviewer for <repo> <pull> - assigns random reviewer for pull request
# hubot reviewer show stats - proves the lotto has no bias
#
# Author:
# sakatam
_ = require "underscore"
async = require "async"
GitHubApi = require "github"
weighted = require "weighted"
module.exports = (robot) ->
ghToken = process.env.HUBOT_GITHUB_TOKEN
ghOrg = process.env.HUBOT_GITHUB_ORG
ghReviwerTeam = process.env.HUBOT_GITHUB_REVIEWER_TEAM
ghWithAvatar = process.env.HUBOT_GITHUB_WITH_AVATAR in ["1", "true"]
normalMessage = process.env.HUBOT_REVIEWER_LOTTO_MESSAGE || "Please review this."
politeMessage = process.env.HUBOT_REVIEWER_LOTTO_POLITE_MESSAGE || "#{normalMessage} :bow::bow::bow::bow:"
debug = process.env.HUBOT_REVIEWER_LOTTO_DEBUG in ["1", "true"]
STATS_KEY = 'reviewer-lotto-stats'
# draw lotto - weighted random selection
draw = (reviewers, stats = null) ->
max = if stats? then (_.max _.map stats, (count) -> count) else 0
arms = {}
sum = 0
for {login} in reviewers
weight = Math.exp max - (stats?[login] || 0)
arms[login] = weight
sum += weight
# normalize weights
for login, weight of arms
arms[login] = if sum > 0 then weight / sum else 1
if debug
robot.logger.info 'arms: ', arms
selected = weighted.select arms
_.find reviewers, ({login}) -> login == selected
if !ghToken? or !ghOrg? or !ghReviwerTeam?
return robot.logger.error """
reviewer-lottery is not loaded due to missing configuration!
#{__filename}
HUBOT_GITHUB_TOKEN: #{ghToken}
HUBOT_GITHUB_ORG: #{ghOrg}
HUBOT_GITHUB_REVIEWER_TEAM: #{ghReviwerTeam}
"""
robot.respond /reviewer reset stats/i, (msg) ->
robot.brain.set STATS_KEY, {}
msg.reply "Reset reviewer stats!"
robot.respond /reviewer show stats$/i, (msg) ->
stats = robot.brain.get STATS_KEY
msgs = ["login, percentage, num assigned"]
total = 0
for login, count of stats
total += count
for login, count of stats
percentage = Math.floor(count * 100.0 / total)
msgs.push "#{login}, #{percentage}%, #{count}"
msg.reply msgs.join "\n"
robot.respond /reviewer for ([\w-\.]+) (\d+)( polite)?$/i, (msg) ->
repo = msg.match[1]
pr = msg.match[2]
polite = msg.match[3]?
prParams =
owner: ghOrg
repo: repo
number: pr
gh = new GitHubApi version: "3.0.0"
gh.authenticate {type: "oauth", token: ghToken}
# mock api if debug mode
if debug
gh.issues.createComment = (params, cb) ->
robot.logger.info "GitHubApi - createComment is called", params
cb null
gh.issues.edit = (params, cb) ->
robot.logger.info "GitHubApi - edit is called", params
cb null
async.waterfall [
(cb) ->
# get team members
params =
id: ghReviwerTeam
per_page: 100
gh.orgs.getTeamMembers params, (err, res) ->
return cb "error on getting team members: #{err.toString()}" if err?
cb null, {reviewers: res}
(ctx, cb) ->
# check if pull req exists
gh.pullRequests.get prParams, (err, res) ->
return cb "error on getting pull request: #{err.toString()}" if err?
ctx['issue'] = res
ctx['creator'] = res.user
ctx['assignee'] = res.assignee
cb null, ctx
(ctx, cb) ->
# pick reviewer
{reviewers, creator, assignee} = ctx
reviewers = reviewers.filter (r) -> r.login != creator.login
# exclude current assignee from reviewer candidates
if assignee?
reviewers = reviewers.filter (r) -> r.login != assignee.login
ctx['reviewer'] = draw reviewers, robot.brain.get(STATS_KEY)
cb null, ctx
(ctx, cb) ->
# post a comment
{reviewer} = ctx
body = "@#{reviewer.login} " + if polite then politeMessage else normalMessage
params = _.extend { body }, prParams
gh.issues.createComment params, (err, res) -> cb err, ctx
(ctx, cb) ->
# change reviewers
{reviewer} = ctx
params = _.extend { reviewers: [reviewer.login] }, prParams
gh.pullRequests.createReviewRequest params, (err, res) -> cb err, ctx
(ctx, cb) ->
{reviewer, issue} = ctx
msg.reply "#{reviewer.login} has been assigned for #{issue.html_url} as a reviewer"
if ghWithAvatar
url = reviewer.avatar_url
url = "#{url}t=#{Date.now()}" # cache buster
url = url.replace(/(#.*|$)/, '#.png') # hipchat needs image-ish url to display inline image
msg.send url
# update stats
stats = (robot.brain.get STATS_KEY) or {}
stats[reviewer.login] or= 0
stats[reviewer.login]++
robot.brain.set STATS_KEY, stats
cb null, ctx
], (err, res) ->
if err?
msg.reply "an error occured.\n#{err}"
| 95904 | # Description:
# assigns random reviewer for a pull request.
#
# Configuration:
# HUBOT_GITHUB_TOKEN (required)
# HUBOT_GITHUB_ORG (required)
# HUBOT_GITHUB_REVIEWER_TEAM (required)
# github team id. this script randomly picks a reviewer from this team members.
#
# Commands:
# hubot reviewer for <repo> <pull> - assigns random reviewer for pull request
# hubot reviewer show stats - proves the lotto has no bias
#
# Author:
# sakatam
_ = require "underscore"
async = require "async"
GitHubApi = require "github"
weighted = require "weighted"
module.exports = (robot) ->
ghToken = process.env.HUBOT_GITHUB_TOKEN
ghOrg = process.env.HUBOT_GITHUB_ORG
ghReviwerTeam = process.env.HUBOT_GITHUB_REVIEWER_TEAM
ghWithAvatar = process.env.HUBOT_GITHUB_WITH_AVATAR in ["1", "true"]
normalMessage = process.env.HUBOT_REVIEWER_LOTTO_MESSAGE || "Please review this."
politeMessage = process.env.HUBOT_REVIEWER_LOTTO_POLITE_MESSAGE || "#{normalMessage} :bow::bow::bow::bow:"
debug = process.env.HUBOT_REVIEWER_LOTTO_DEBUG in ["1", "true"]
STATS_KEY = '<KEY>'
# draw lotto - weighted random selection
draw = (reviewers, stats = null) ->
max = if stats? then (_.max _.map stats, (count) -> count) else 0
arms = {}
sum = 0
for {login} in reviewers
weight = Math.exp max - (stats?[login] || 0)
arms[login] = weight
sum += weight
# normalize weights
for login, weight of arms
arms[login] = if sum > 0 then weight / sum else 1
if debug
robot.logger.info 'arms: ', arms
selected = weighted.select arms
_.find reviewers, ({login}) -> login == selected
if !ghToken? or !ghOrg? or !ghReviwerTeam?
return robot.logger.error """
reviewer-lottery is not loaded due to missing configuration!
#{__filename}
HUBOT_GITHUB_TOKEN: #{ghToken}
HUBOT_GITHUB_ORG: #{ghOrg}
HUBOT_GITHUB_REVIEWER_TEAM: #{ghReviwerTeam}
"""
robot.respond /reviewer reset stats/i, (msg) ->
robot.brain.set STATS_KEY, {}
msg.reply "Reset reviewer stats!"
robot.respond /reviewer show stats$/i, (msg) ->
stats = robot.brain.get STATS_KEY
msgs = ["login, percentage, num assigned"]
total = 0
for login, count of stats
total += count
for login, count of stats
percentage = Math.floor(count * 100.0 / total)
msgs.push "#{login}, #{percentage}%, #{count}"
msg.reply msgs.join "\n"
robot.respond /reviewer for ([\w-\.]+) (\d+)( polite)?$/i, (msg) ->
repo = msg.match[1]
pr = msg.match[2]
polite = msg.match[3]?
prParams =
owner: ghOrg
repo: repo
number: pr
gh = new GitHubApi version: "3.0.0"
gh.authenticate {type: "oauth", token: ghToken}
# mock api if debug mode
if debug
gh.issues.createComment = (params, cb) ->
robot.logger.info "GitHubApi - createComment is called", params
cb null
gh.issues.edit = (params, cb) ->
robot.logger.info "GitHubApi - edit is called", params
cb null
async.waterfall [
(cb) ->
# get team members
params =
id: ghReviwerTeam
per_page: 100
gh.orgs.getTeamMembers params, (err, res) ->
return cb "error on getting team members: #{err.toString()}" if err?
cb null, {reviewers: res}
(ctx, cb) ->
# check if pull req exists
gh.pullRequests.get prParams, (err, res) ->
return cb "error on getting pull request: #{err.toString()}" if err?
ctx['issue'] = res
ctx['creator'] = res.user
ctx['assignee'] = res.assignee
cb null, ctx
(ctx, cb) ->
# pick reviewer
{reviewers, creator, assignee} = ctx
reviewers = reviewers.filter (r) -> r.login != creator.login
# exclude current assignee from reviewer candidates
if assignee?
reviewers = reviewers.filter (r) -> r.login != assignee.login
ctx['reviewer'] = draw reviewers, robot.brain.get(STATS_KEY)
cb null, ctx
(ctx, cb) ->
# post a comment
{reviewer} = ctx
body = "@#{reviewer.login} " + if polite then politeMessage else normalMessage
params = _.extend { body }, prParams
gh.issues.createComment params, (err, res) -> cb err, ctx
(ctx, cb) ->
# change reviewers
{reviewer} = ctx
params = _.extend { reviewers: [reviewer.login] }, prParams
gh.pullRequests.createReviewRequest params, (err, res) -> cb err, ctx
(ctx, cb) ->
{reviewer, issue} = ctx
msg.reply "#{reviewer.login} has been assigned for #{issue.html_url} as a reviewer"
if ghWithAvatar
url = reviewer.avatar_url
url = "#{url}t=#{Date.now()}" # cache buster
url = url.replace(/(#.*|$)/, '#.png') # hipchat needs image-ish url to display inline image
msg.send url
# update stats
stats = (robot.brain.get STATS_KEY) or {}
stats[reviewer.login] or= 0
stats[reviewer.login]++
robot.brain.set STATS_KEY, stats
cb null, ctx
], (err, res) ->
if err?
msg.reply "an error occured.\n#{err}"
| true | # Description:
# assigns random reviewer for a pull request.
#
# Configuration:
# HUBOT_GITHUB_TOKEN (required)
# HUBOT_GITHUB_ORG (required)
# HUBOT_GITHUB_REVIEWER_TEAM (required)
# github team id. this script randomly picks a reviewer from this team members.
#
# Commands:
# hubot reviewer for <repo> <pull> - assigns random reviewer for pull request
# hubot reviewer show stats - proves the lotto has no bias
#
# Author:
# sakatam
_ = require "underscore"
async = require "async"
GitHubApi = require "github"
weighted = require "weighted"
module.exports = (robot) ->
ghToken = process.env.HUBOT_GITHUB_TOKEN
ghOrg = process.env.HUBOT_GITHUB_ORG
ghReviwerTeam = process.env.HUBOT_GITHUB_REVIEWER_TEAM
ghWithAvatar = process.env.HUBOT_GITHUB_WITH_AVATAR in ["1", "true"]
normalMessage = process.env.HUBOT_REVIEWER_LOTTO_MESSAGE || "Please review this."
politeMessage = process.env.HUBOT_REVIEWER_LOTTO_POLITE_MESSAGE || "#{normalMessage} :bow::bow::bow::bow:"
debug = process.env.HUBOT_REVIEWER_LOTTO_DEBUG in ["1", "true"]
STATS_KEY = 'PI:KEY:<KEY>END_PI'
# draw lotto - weighted random selection
draw = (reviewers, stats = null) ->
max = if stats? then (_.max _.map stats, (count) -> count) else 0
arms = {}
sum = 0
for {login} in reviewers
weight = Math.exp max - (stats?[login] || 0)
arms[login] = weight
sum += weight
# normalize weights
for login, weight of arms
arms[login] = if sum > 0 then weight / sum else 1
if debug
robot.logger.info 'arms: ', arms
selected = weighted.select arms
_.find reviewers, ({login}) -> login == selected
if !ghToken? or !ghOrg? or !ghReviwerTeam?
return robot.logger.error """
reviewer-lottery is not loaded due to missing configuration!
#{__filename}
HUBOT_GITHUB_TOKEN: #{ghToken}
HUBOT_GITHUB_ORG: #{ghOrg}
HUBOT_GITHUB_REVIEWER_TEAM: #{ghReviwerTeam}
"""
robot.respond /reviewer reset stats/i, (msg) ->
robot.brain.set STATS_KEY, {}
msg.reply "Reset reviewer stats!"
robot.respond /reviewer show stats$/i, (msg) ->
stats = robot.brain.get STATS_KEY
msgs = ["login, percentage, num assigned"]
total = 0
for login, count of stats
total += count
for login, count of stats
percentage = Math.floor(count * 100.0 / total)
msgs.push "#{login}, #{percentage}%, #{count}"
msg.reply msgs.join "\n"
robot.respond /reviewer for ([\w-\.]+) (\d+)( polite)?$/i, (msg) ->
repo = msg.match[1]
pr = msg.match[2]
polite = msg.match[3]?
prParams =
owner: ghOrg
repo: repo
number: pr
gh = new GitHubApi version: "3.0.0"
gh.authenticate {type: "oauth", token: ghToken}
# mock api if debug mode
if debug
gh.issues.createComment = (params, cb) ->
robot.logger.info "GitHubApi - createComment is called", params
cb null
gh.issues.edit = (params, cb) ->
robot.logger.info "GitHubApi - edit is called", params
cb null
async.waterfall [
(cb) ->
# get team members
params =
id: ghReviwerTeam
per_page: 100
gh.orgs.getTeamMembers params, (err, res) ->
return cb "error on getting team members: #{err.toString()}" if err?
cb null, {reviewers: res}
(ctx, cb) ->
# check if pull req exists
gh.pullRequests.get prParams, (err, res) ->
return cb "error on getting pull request: #{err.toString()}" if err?
ctx['issue'] = res
ctx['creator'] = res.user
ctx['assignee'] = res.assignee
cb null, ctx
(ctx, cb) ->
# pick reviewer
{reviewers, creator, assignee} = ctx
reviewers = reviewers.filter (r) -> r.login != creator.login
# exclude current assignee from reviewer candidates
if assignee?
reviewers = reviewers.filter (r) -> r.login != assignee.login
ctx['reviewer'] = draw reviewers, robot.brain.get(STATS_KEY)
cb null, ctx
(ctx, cb) ->
# post a comment
{reviewer} = ctx
body = "@#{reviewer.login} " + if polite then politeMessage else normalMessage
params = _.extend { body }, prParams
gh.issues.createComment params, (err, res) -> cb err, ctx
(ctx, cb) ->
# change reviewers
{reviewer} = ctx
params = _.extend { reviewers: [reviewer.login] }, prParams
gh.pullRequests.createReviewRequest params, (err, res) -> cb err, ctx
(ctx, cb) ->
{reviewer, issue} = ctx
msg.reply "#{reviewer.login} has been assigned for #{issue.html_url} as a reviewer"
if ghWithAvatar
url = reviewer.avatar_url
url = "#{url}t=#{Date.now()}" # cache buster
url = url.replace(/(#.*|$)/, '#.png') # hipchat needs image-ish url to display inline image
msg.send url
# update stats
stats = (robot.brain.get STATS_KEY) or {}
stats[reviewer.login] or= 0
stats[reviewer.login]++
robot.brain.set STATS_KEY, stats
cb null, ctx
], (err, res) ->
if err?
msg.reply "an error occured.\n#{err}"
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.999164342880249,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-child-process-execsync.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
util = require("util")
os = require("os")
execSync = require("child_process").execSync
execFileSync = require("child_process").execFileSync
TIMER = 200
SLEEP = 2000
start = Date.now()
err = undefined
caught = false
try
cmd = util.format("%s -e \"setTimeout(function(){}, %d);\"", process.execPath, SLEEP)
ret = execSync(cmd,
timeout: TIMER
)
catch e
caught = true
assert.strictEqual e.errno, "ETIMEDOUT"
err = e
finally
assert.strictEqual ret, `undefined`, "we should not have a return value"
assert.strictEqual caught, true, "execSync should throw"
end = Date.now() - start
assert end < SLEEP
assert err.status > 128 or err.signal
assert.throws (->
execSync "iamabadcommand"
return
), /Command failed: iamabadcommand/
msg = "foobar"
msgBuf = new Buffer(msg + "\n")
# console.log ends every line with just '\n', even on Windows.
cmd = util.format("%s -e \"console.log('%s');\"", process.execPath, msg)
ret = execSync(cmd)
assert.strictEqual ret.length, msgBuf.length
assert.deepEqual ret, msgBuf, "execSync result buffer should match"
ret = execSync(cmd,
encoding: "utf8"
)
assert.strictEqual ret, msg + "\n", "execSync encoding result should match"
args = [
"-e"
util.format("console.log(\"%s\");", msg)
]
ret = execFileSync(process.execPath, args)
assert.deepEqual ret, msgBuf
ret = execFileSync(process.execPath, args,
encoding: "utf8"
)
assert.strictEqual ret, msg + "\n", "execFileSync encoding result should match"
# Verify that the cwd option works - GH #7824
(->
response = undefined
cwd = undefined
if process.platform is "win32"
cwd = "c:\\"
response = execSync("echo %cd%",
cwd: cwd
)
else
cwd = "/"
response = execSync("pwd",
cwd: cwd
)
assert.strictEqual response.toString().trim(), cwd
return
)()
# Verify that stderr is not accessed when stdio = 'ignore' - GH #7966
(->
assert.throws (->
execSync "exit -1",
stdio: "ignore"
return
), /Command failed: exit -1/
return
)()
| 70924 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
util = require("util")
os = require("os")
execSync = require("child_process").execSync
execFileSync = require("child_process").execFileSync
TIMER = 200
SLEEP = 2000
start = Date.now()
err = undefined
caught = false
try
cmd = util.format("%s -e \"setTimeout(function(){}, %d);\"", process.execPath, SLEEP)
ret = execSync(cmd,
timeout: TIMER
)
catch e
caught = true
assert.strictEqual e.errno, "ETIMEDOUT"
err = e
finally
assert.strictEqual ret, `undefined`, "we should not have a return value"
assert.strictEqual caught, true, "execSync should throw"
end = Date.now() - start
assert end < SLEEP
assert err.status > 128 or err.signal
assert.throws (->
execSync "iamabadcommand"
return
), /Command failed: iamabadcommand/
msg = "foobar"
msgBuf = new Buffer(msg + "\n")
# console.log ends every line with just '\n', even on Windows.
cmd = util.format("%s -e \"console.log('%s');\"", process.execPath, msg)
ret = execSync(cmd)
assert.strictEqual ret.length, msgBuf.length
assert.deepEqual ret, msgBuf, "execSync result buffer should match"
ret = execSync(cmd,
encoding: "utf8"
)
assert.strictEqual ret, msg + "\n", "execSync encoding result should match"
args = [
"-e"
util.format("console.log(\"%s\");", msg)
]
ret = execFileSync(process.execPath, args)
assert.deepEqual ret, msgBuf
ret = execFileSync(process.execPath, args,
encoding: "utf8"
)
assert.strictEqual ret, msg + "\n", "execFileSync encoding result should match"
# Verify that the cwd option works - GH #7824
(->
response = undefined
cwd = undefined
if process.platform is "win32"
cwd = "c:\\"
response = execSync("echo %cd%",
cwd: cwd
)
else
cwd = "/"
response = execSync("pwd",
cwd: cwd
)
assert.strictEqual response.toString().trim(), cwd
return
)()
# Verify that stderr is not accessed when stdio = 'ignore' - GH #7966
(->
assert.throws (->
execSync "exit -1",
stdio: "ignore"
return
), /Command failed: exit -1/
return
)()
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
util = require("util")
os = require("os")
execSync = require("child_process").execSync
execFileSync = require("child_process").execFileSync
TIMER = 200
SLEEP = 2000
start = Date.now()
err = undefined
caught = false
try
cmd = util.format("%s -e \"setTimeout(function(){}, %d);\"", process.execPath, SLEEP)
ret = execSync(cmd,
timeout: TIMER
)
catch e
caught = true
assert.strictEqual e.errno, "ETIMEDOUT"
err = e
finally
assert.strictEqual ret, `undefined`, "we should not have a return value"
assert.strictEqual caught, true, "execSync should throw"
end = Date.now() - start
assert end < SLEEP
assert err.status > 128 or err.signal
assert.throws (->
execSync "iamabadcommand"
return
), /Command failed: iamabadcommand/
msg = "foobar"
msgBuf = new Buffer(msg + "\n")
# console.log ends every line with just '\n', even on Windows.
cmd = util.format("%s -e \"console.log('%s');\"", process.execPath, msg)
ret = execSync(cmd)
assert.strictEqual ret.length, msgBuf.length
assert.deepEqual ret, msgBuf, "execSync result buffer should match"
ret = execSync(cmd,
encoding: "utf8"
)
assert.strictEqual ret, msg + "\n", "execSync encoding result should match"
args = [
"-e"
util.format("console.log(\"%s\");", msg)
]
ret = execFileSync(process.execPath, args)
assert.deepEqual ret, msgBuf
ret = execFileSync(process.execPath, args,
encoding: "utf8"
)
assert.strictEqual ret, msg + "\n", "execFileSync encoding result should match"
# Verify that the cwd option works - GH #7824
(->
response = undefined
cwd = undefined
if process.platform is "win32"
cwd = "c:\\"
response = execSync("echo %cd%",
cwd: cwd
)
else
cwd = "/"
response = execSync("pwd",
cwd: cwd
)
assert.strictEqual response.toString().trim(), cwd
return
)()
# Verify that stderr is not accessed when stdio = 'ignore' - GH #7966
(->
assert.throws (->
execSync "exit -1",
stdio: "ignore"
return
), /Command failed: exit -1/
return
)()
|
[
{
"context": "es = []\nclientKey = undefined\ndefaultClientKey = 'client-id'\nqueryParamPattern = '([-A-z0-9\\/?@:%$_&=\\+.~#])*",
"end": 140,
"score": 0.9986392855644226,
"start": 131,
"tag": "KEY",
"value": "client-id"
}
] | src/index.coffee | elyku/express-client-validator | 4 | _ = require('underscore')
schema = require './schema'
Ajv = require 'ajv'
appRoutes = []
clientKey = undefined
defaultClientKey = 'client-id'
queryParamPattern = '([-A-z0-9\/?@:%$_&=\+.~#])*'
matchPattern = '([-A-z0-9@:%$_\+.~#])+'
findMatchingRoute = (url, method)->
matchedRoute = appRoutes.find (route)->
pattern = route.url.replace(/\?/g,matchPattern) + queryParamPattern
new RegExp(pattern).test(url) and method in route.methods
if not matchedRoute
matchUrl = if url.lastIndexOf('/') > 0 then url.slice(0,url.lastIndexOf('/')) else "/"
if url isnt matchUrl then findMatchingRoute(matchUrl, method) else matchedRoute
else
matchedRoute
clientIdHasAccess = (clientId, originalUrl, method = 'POST') ->
route = findMatchingRoute(originalUrl, method)
not route or route.clientIds.length is 0 or clientId in route.clientIds
validator = (req,res,next)->
if clientIdHasAccess(req.headers[clientKey], req.originalUrl, req.method)
next()
else
res.status(403)
res.send("Invalid Client")
validateSchema = (restrictedRoutes)->
if not restrictedRoutes
throw new Error('restricted routes list need to be provided')
ajv = Ajv({allErrors: true, jsonPointers: true, missingRefs: false })
validate = ajv.compile(schema)
data = validate(restrictedRoutes)
if validate.errors
messages = validate.errors.map (error)->"#{error.dataPath} #{error.message}".trim()
throw new Error(messages)
configure = ({headerClientKey,routes})->
clientKey = if headerClientKey then headerClientKey else defaultClientKey
try
validateSchema(routes)
appRoutes = _(routes).sortBy('url').reverse()
Promise.resolve("routes configured")
catch errors
Promise.reject(errors)
module.exports = {validator,configure,clientIdHasAccess}
| 188296 | _ = require('underscore')
schema = require './schema'
Ajv = require 'ajv'
appRoutes = []
clientKey = undefined
defaultClientKey = '<KEY>'
queryParamPattern = '([-A-z0-9\/?@:%$_&=\+.~#])*'
matchPattern = '([-A-z0-9@:%$_\+.~#])+'
findMatchingRoute = (url, method)->
matchedRoute = appRoutes.find (route)->
pattern = route.url.replace(/\?/g,matchPattern) + queryParamPattern
new RegExp(pattern).test(url) and method in route.methods
if not matchedRoute
matchUrl = if url.lastIndexOf('/') > 0 then url.slice(0,url.lastIndexOf('/')) else "/"
if url isnt matchUrl then findMatchingRoute(matchUrl, method) else matchedRoute
else
matchedRoute
clientIdHasAccess = (clientId, originalUrl, method = 'POST') ->
route = findMatchingRoute(originalUrl, method)
not route or route.clientIds.length is 0 or clientId in route.clientIds
validator = (req,res,next)->
if clientIdHasAccess(req.headers[clientKey], req.originalUrl, req.method)
next()
else
res.status(403)
res.send("Invalid Client")
validateSchema = (restrictedRoutes)->
if not restrictedRoutes
throw new Error('restricted routes list need to be provided')
ajv = Ajv({allErrors: true, jsonPointers: true, missingRefs: false })
validate = ajv.compile(schema)
data = validate(restrictedRoutes)
if validate.errors
messages = validate.errors.map (error)->"#{error.dataPath} #{error.message}".trim()
throw new Error(messages)
configure = ({headerClientKey,routes})->
clientKey = if headerClientKey then headerClientKey else defaultClientKey
try
validateSchema(routes)
appRoutes = _(routes).sortBy('url').reverse()
Promise.resolve("routes configured")
catch errors
Promise.reject(errors)
module.exports = {validator,configure,clientIdHasAccess}
| true | _ = require('underscore')
schema = require './schema'
Ajv = require 'ajv'
appRoutes = []
clientKey = undefined
defaultClientKey = 'PI:KEY:<KEY>END_PI'
queryParamPattern = '([-A-z0-9\/?@:%$_&=\+.~#])*'
matchPattern = '([-A-z0-9@:%$_\+.~#])+'
findMatchingRoute = (url, method)->
matchedRoute = appRoutes.find (route)->
pattern = route.url.replace(/\?/g,matchPattern) + queryParamPattern
new RegExp(pattern).test(url) and method in route.methods
if not matchedRoute
matchUrl = if url.lastIndexOf('/') > 0 then url.slice(0,url.lastIndexOf('/')) else "/"
if url isnt matchUrl then findMatchingRoute(matchUrl, method) else matchedRoute
else
matchedRoute
clientIdHasAccess = (clientId, originalUrl, method = 'POST') ->
route = findMatchingRoute(originalUrl, method)
not route or route.clientIds.length is 0 or clientId in route.clientIds
validator = (req,res,next)->
if clientIdHasAccess(req.headers[clientKey], req.originalUrl, req.method)
next()
else
res.status(403)
res.send("Invalid Client")
validateSchema = (restrictedRoutes)->
if not restrictedRoutes
throw new Error('restricted routes list need to be provided')
ajv = Ajv({allErrors: true, jsonPointers: true, missingRefs: false })
validate = ajv.compile(schema)
data = validate(restrictedRoutes)
if validate.errors
messages = validate.errors.map (error)->"#{error.dataPath} #{error.message}".trim()
throw new Error(messages)
configure = ({headerClientKey,routes})->
clientKey = if headerClientKey then headerClientKey else defaultClientKey
try
validateSchema(routes)
appRoutes = _(routes).sortBy('url').reverse()
Promise.resolve("routes configured")
catch errors
Promise.reject(errors)
module.exports = {validator,configure,clientIdHasAccess}
|
[
{
"context": "d\n scope = $rootScope.$new()\n scope.name = 'saberma'\n scope.phone = '13928452888'\n $httpBackend",
"end": 200,
"score": 0.9889549612998962,
"start": 193,
"tag": "USERNAME",
"value": "saberma"
},
{
"context": " ($rootScope) -> $rootScope.user = {id: 1, nam... | spec/javascripts/events/orders.js.coffee | richard-ma/19wu | 252 | describe "orders", ->
scope = $httpBackend = null
controller = ($rootScope, $injector, $http, $window, $controller) -> # disabled was inited
scope = $rootScope.$new()
scope.name = 'saberma'
scope.phone = '13928452888'
$httpBackend = $injector.get('$httpBackend')
$controller(OrdersCtrl, {$scope: scope, $http: $http, $window: $window})
describe "create", ->
describe 'when user logined', ->
beforeEach -> inject ($rootScope) -> $rootScope.user = {id: 1, name: '张三', phone: '13928452888'}
describe 'when event is not start', ->
beforeEach ->
inject ($rootScope, $injector, $http, $window, $controller) ->
$rootScope.event = {id: 1, started: false}
controller $rootScope, $injector, $http, $window, $controller
it "should not be diabled", ->
expect(scope.disabled).toBe(false)
describe 'with tickets', ->
beforeEach ->
scope.tickets = [{"id":1,"name":"个人票","price":0.01,"require_invoice":false,"description":"","quantity":1}]
$httpBackend.when('POST', '/events/1/orders').respond(200, {result: 'ok', id: 1, link: 'https://alipay.com'})
scope.create()
it "should be success", ->
$httpBackend.flush()
expect(scope.id).toBe(1)
expect(scope.pay_url).toBe('https://alipay.com')
describe 'without tickets', ->
beforeEach ->
scope.tickets = [{"id":1,"name":"个人票","price":0.01,"require_invoice":false,"description":"","quantity":0}]
scope.create()
it "should be fail", ->
expect(scope.errors['tickets']).toBe(true)
describe 'when event is started', ->
beforeEach ->
inject ($rootScope, $injector, $http, $window, $controller) ->
$rootScope.event = {id: 1, started: true}
controller $rootScope, $injector, $http, $window, $controller
scope.create()
it "should be diabled", ->
expect(scope.disabled).toBe(true)
expect(scope.errors['tickets']).toBeUndefined()
| 145093 | describe "orders", ->
scope = $httpBackend = null
controller = ($rootScope, $injector, $http, $window, $controller) -> # disabled was inited
scope = $rootScope.$new()
scope.name = 'saberma'
scope.phone = '13928452888'
$httpBackend = $injector.get('$httpBackend')
$controller(OrdersCtrl, {$scope: scope, $http: $http, $window: $window})
describe "create", ->
describe 'when user logined', ->
beforeEach -> inject ($rootScope) -> $rootScope.user = {id: 1, name: '<NAME>', phone: '13928452888'}
describe 'when event is not start', ->
beforeEach ->
inject ($rootScope, $injector, $http, $window, $controller) ->
$rootScope.event = {id: 1, started: false}
controller $rootScope, $injector, $http, $window, $controller
it "should not be diabled", ->
expect(scope.disabled).toBe(false)
describe 'with tickets', ->
beforeEach ->
scope.tickets = [{"id":1,"name":"个人票","price":0.01,"require_invoice":false,"description":"","quantity":1}]
$httpBackend.when('POST', '/events/1/orders').respond(200, {result: 'ok', id: 1, link: 'https://alipay.com'})
scope.create()
it "should be success", ->
$httpBackend.flush()
expect(scope.id).toBe(1)
expect(scope.pay_url).toBe('https://alipay.com')
describe 'without tickets', ->
beforeEach ->
scope.tickets = [{"id":1,"name":"个人票","price":0.01,"require_invoice":false,"description":"","quantity":0}]
scope.create()
it "should be fail", ->
expect(scope.errors['tickets']).toBe(true)
describe 'when event is started', ->
beforeEach ->
inject ($rootScope, $injector, $http, $window, $controller) ->
$rootScope.event = {id: 1, started: true}
controller $rootScope, $injector, $http, $window, $controller
scope.create()
it "should be diabled", ->
expect(scope.disabled).toBe(true)
expect(scope.errors['tickets']).toBeUndefined()
| true | describe "orders", ->
scope = $httpBackend = null
controller = ($rootScope, $injector, $http, $window, $controller) -> # disabled was inited
scope = $rootScope.$new()
scope.name = 'saberma'
scope.phone = '13928452888'
$httpBackend = $injector.get('$httpBackend')
$controller(OrdersCtrl, {$scope: scope, $http: $http, $window: $window})
describe "create", ->
describe 'when user logined', ->
beforeEach -> inject ($rootScope) -> $rootScope.user = {id: 1, name: 'PI:NAME:<NAME>END_PI', phone: '13928452888'}
describe 'when event is not start', ->
beforeEach ->
inject ($rootScope, $injector, $http, $window, $controller) ->
$rootScope.event = {id: 1, started: false}
controller $rootScope, $injector, $http, $window, $controller
it "should not be diabled", ->
expect(scope.disabled).toBe(false)
describe 'with tickets', ->
beforeEach ->
scope.tickets = [{"id":1,"name":"个人票","price":0.01,"require_invoice":false,"description":"","quantity":1}]
$httpBackend.when('POST', '/events/1/orders').respond(200, {result: 'ok', id: 1, link: 'https://alipay.com'})
scope.create()
it "should be success", ->
$httpBackend.flush()
expect(scope.id).toBe(1)
expect(scope.pay_url).toBe('https://alipay.com')
describe 'without tickets', ->
beforeEach ->
scope.tickets = [{"id":1,"name":"个人票","price":0.01,"require_invoice":false,"description":"","quantity":0}]
scope.create()
it "should be fail", ->
expect(scope.errors['tickets']).toBe(true)
describe 'when event is started', ->
beforeEach ->
inject ($rootScope, $injector, $http, $window, $controller) ->
$rootScope.event = {id: 1, started: true}
controller $rootScope, $injector, $http, $window, $controller
scope.create()
it "should be diabled", ->
expect(scope.disabled).toBe(true)
expect(scope.errors['tickets']).toBeUndefined()
|
[
{
"context": " z '.image.austin'\n z '.name', 'Austin'\n z '.text',\n @mo",
"end": 4002,
"score": 0.999771773815155,
"start": 3996,
"tag": "NAME",
"value": "Austin"
},
{
"context": " z '.image.rachel'\n z '.name', 'R... | src/components/about/index.coffee | FreeRoamApp/free-roam | 14 | z = require 'zorium'
Icon = require '../icon'
PrimaryButton = require '../primary_button'
SecondaryButton = require '../secondary_button'
Tabs = require '../tabs'
colors = require '../../colors'
config = require '../../config'
if window?
require './index.styl'
module.exports = class About
constructor: ({@model, @router}) ->
me = @model.user.getMe()
@$learnMoreButton = new SecondaryButton()
@$donateButton = new SecondaryButton()
@$shareButton = new PrimaryButton()
@$reviewButton = new PrimaryButton()
@$feedbackButton = new PrimaryButton()
@$irsButton = new SecondaryButton()
@$tabs = new Tabs {@model}
@state = z.state
windowSize: @model.window.getSize()
render: =>
{windowSize} = @state.getValue()
z '.z-about',
z '.mission',
z '.content',
z 'h1.title', @model.l.get 'about.missionTitle'
z '.text', @model.l.get 'about.mission'
z '.actions',
z '.action',
z @$learnMoreButton,
text: @model.l.get 'drawer.roamWithCare'
isOutline: true
onclick: =>
@router.go 'roamWithCare'
z '.action',
z @$donateButton,
text: @model.l.get 'general.donate'
onclick: =>
@router.go 'donate'
z '.roadmap',
z '.info',
z '.title', @model.l.get 'about.roadmapTitle'
z '.description', @model.l.get 'about.roadmapDescription'
z @$tabs,
isBarFixed: false
isBarArrow: true
tabs: [
{
$menuText: @model.l.get 'about.phase1'
$el:
z '.z-about_roadmap-phase.phase-1',
z '.image'
z '.phase', @model.l.get('about.phase1')+ ':'
z '.title', @model.l.get 'about.phase1Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase1Bullet1'
z 'li', @model.l.get 'about.phase1Bullet2'
z 'li', @model.l.get 'about.phase1Bullet3'
}
{
$menuText: @model.l.get 'about.phase2'
$el:
z '.z-about_roadmap-phase.phase-2',
z '.image'
z '.phase', @model.l.get('about.phase2')+ ':'
z '.title', @model.l.get 'about.phase2Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase2Bullet1'
z 'li', @model.l.get 'about.phase2Bullet2'
# z 'li', @model.l.get 'about.phase2Bullet3'
}
{
$menuText: @model.l.get 'about.phase3'
$el:
z '.z-about_roadmap-phase.phase-3',
z '.image'
z '.phase', @model.l.get('about.phase3')+ ':'
z '.title', @model.l.get 'about.phase3Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase3Bullet1'
z 'li', @model.l.get 'about.phase3Bullet2'
z 'li', @model.l.get 'about.phase3Bullet3'
}
{
$menuText: @model.l.get 'about.phase4'
$el:
z '.z-about_roadmap-phase.phase-4',
z '.image'
z '.phase', @model.l.get('about.phase4')+ ':'
z '.title', @model.l.get 'about.phase4Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase4Bullet1'
# z 'li', @model.l.get 'about.phase4Bullet2'
# z 'li', @model.l.get 'about.phase4Bullet3'
}
]
z '.meet',
z '.g-grid',
z 'h1.title', @model.l.get 'about.meetTitle'
z '.g-grid',
z '.g-cols',
z '.g-col.g-xs-12.g-md-6',
z '.image.austin'
z '.name', 'Austin'
z '.text',
@model.l.get 'about.meetAustinText'
z '.g-col.g-xs-12.g-md-6',
z '.image.rachel'
z '.name', 'Rachel'
z '.text',
@model.l.get 'about.meetRachelText'
z '.help',
z '.g-grid',
z 'h1.title', @model.l.get 'about.helpTitle'
z '.text', @model.l.get 'about.help'
z '.g-grid',
z '.g-cols',
z '.g-col.g-xs-12.g-md-4',
z '.image.share'
z '.title', @model.l.get 'about.helpShareTitle'
z '.description', @model.l.get 'about.helpShare'
z '.button',
z @$shareButton,
text: @model.l.get 'about.helpShareButton'
onclick: =>
@model.portal.call 'share.any', {
text: 'FreeRoam'
path: ''
url: "https://#{config.HOST}"
}
z '.g-col.g-xs-12.g-md-4',
z '.image.review'
z '.title', @model.l.get 'about.helpReviewTitle'
z '.description', @model.l.get 'about.helpReview'
z '.button',
z @$reviewButton,
text: @model.l.get 'about.helpReviewButton'
onclick: =>
@router.go 'home'
colors:
c200: colors.$skyBlue200
c500: colors.$skyBlue500
c600: colors.$skyBlue600
c700: colors.$skyBlue700
ink: colors.$white
z '.g-col.g-xs-12.g-md-4',
z '.image.feedback'
z '.title', @model.l.get 'about.helpFeedbackTitle'
z '.description', @model.l.get 'about.helpFeedback'
z '.button',
z @$feedbackButton,
text: @model.l.get 'about.helpFeedbackButton'
onclick: =>
@router.go 'groupChat', {
groupId: 'freeroam'
}
colors:
c200: colors.$yellow200
c500: colors.$yellow500
c600: colors.$yellow600
c700: colors.$yellow700
ink: colors.$white
z '.transparency',
z '.g-grid',
z 'h1.title', @model.l.get 'about.transparencyTitle'
z '.text',
@model.l.get 'about.transparency1'
@router.link z 'a', {
href: 'http://github.com/freeroamapp'
}, @model.l.get 'general.here'
' '
@model.l.get 'about.transparency2'
z '.button',
z @$irsButton,
text: @model.l.get 'about.irsDetermination'
isOutline: true
# isFullWidth: false
onclick: =>
@router.openLink(
'https://fdn.uno/d/documents/irs-determination.pdf'
)
# z 'p.disclaimer', @model.l.get 'about.amazon'
z '.disclaimer',
z '.g-grid',
@model.l.get 'about.opencellid'
| 15956 | z = require 'zorium'
Icon = require '../icon'
PrimaryButton = require '../primary_button'
SecondaryButton = require '../secondary_button'
Tabs = require '../tabs'
colors = require '../../colors'
config = require '../../config'
if window?
require './index.styl'
module.exports = class About
constructor: ({@model, @router}) ->
me = @model.user.getMe()
@$learnMoreButton = new SecondaryButton()
@$donateButton = new SecondaryButton()
@$shareButton = new PrimaryButton()
@$reviewButton = new PrimaryButton()
@$feedbackButton = new PrimaryButton()
@$irsButton = new SecondaryButton()
@$tabs = new Tabs {@model}
@state = z.state
windowSize: @model.window.getSize()
render: =>
{windowSize} = @state.getValue()
z '.z-about',
z '.mission',
z '.content',
z 'h1.title', @model.l.get 'about.missionTitle'
z '.text', @model.l.get 'about.mission'
z '.actions',
z '.action',
z @$learnMoreButton,
text: @model.l.get 'drawer.roamWithCare'
isOutline: true
onclick: =>
@router.go 'roamWithCare'
z '.action',
z @$donateButton,
text: @model.l.get 'general.donate'
onclick: =>
@router.go 'donate'
z '.roadmap',
z '.info',
z '.title', @model.l.get 'about.roadmapTitle'
z '.description', @model.l.get 'about.roadmapDescription'
z @$tabs,
isBarFixed: false
isBarArrow: true
tabs: [
{
$menuText: @model.l.get 'about.phase1'
$el:
z '.z-about_roadmap-phase.phase-1',
z '.image'
z '.phase', @model.l.get('about.phase1')+ ':'
z '.title', @model.l.get 'about.phase1Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase1Bullet1'
z 'li', @model.l.get 'about.phase1Bullet2'
z 'li', @model.l.get 'about.phase1Bullet3'
}
{
$menuText: @model.l.get 'about.phase2'
$el:
z '.z-about_roadmap-phase.phase-2',
z '.image'
z '.phase', @model.l.get('about.phase2')+ ':'
z '.title', @model.l.get 'about.phase2Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase2Bullet1'
z 'li', @model.l.get 'about.phase2Bullet2'
# z 'li', @model.l.get 'about.phase2Bullet3'
}
{
$menuText: @model.l.get 'about.phase3'
$el:
z '.z-about_roadmap-phase.phase-3',
z '.image'
z '.phase', @model.l.get('about.phase3')+ ':'
z '.title', @model.l.get 'about.phase3Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase3Bullet1'
z 'li', @model.l.get 'about.phase3Bullet2'
z 'li', @model.l.get 'about.phase3Bullet3'
}
{
$menuText: @model.l.get 'about.phase4'
$el:
z '.z-about_roadmap-phase.phase-4',
z '.image'
z '.phase', @model.l.get('about.phase4')+ ':'
z '.title', @model.l.get 'about.phase4Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase4Bullet1'
# z 'li', @model.l.get 'about.phase4Bullet2'
# z 'li', @model.l.get 'about.phase4Bullet3'
}
]
z '.meet',
z '.g-grid',
z 'h1.title', @model.l.get 'about.meetTitle'
z '.g-grid',
z '.g-cols',
z '.g-col.g-xs-12.g-md-6',
z '.image.austin'
z '.name', '<NAME>'
z '.text',
@model.l.get 'about.meetAustinText'
z '.g-col.g-xs-12.g-md-6',
z '.image.rachel'
z '.name', '<NAME>'
z '.text',
@model.l.get 'about.meetRachelText'
z '.help',
z '.g-grid',
z 'h1.title', @model.l.get 'about.helpTitle'
z '.text', @model.l.get 'about.help'
z '.g-grid',
z '.g-cols',
z '.g-col.g-xs-12.g-md-4',
z '.image.share'
z '.title', @model.l.get 'about.helpShareTitle'
z '.description', @model.l.get 'about.helpShare'
z '.button',
z @$shareButton,
text: @model.l.get 'about.helpShareButton'
onclick: =>
@model.portal.call 'share.any', {
text: 'FreeRoam'
path: ''
url: "https://#{config.HOST}"
}
z '.g-col.g-xs-12.g-md-4',
z '.image.review'
z '.title', @model.l.get 'about.helpReviewTitle'
z '.description', @model.l.get 'about.helpReview'
z '.button',
z @$reviewButton,
text: @model.l.get 'about.helpReviewButton'
onclick: =>
@router.go 'home'
colors:
c200: colors.$skyBlue200
c500: colors.$skyBlue500
c600: colors.$skyBlue600
c700: colors.$skyBlue700
ink: colors.$white
z '.g-col.g-xs-12.g-md-4',
z '.image.feedback'
z '.title', @model.l.get 'about.helpFeedbackTitle'
z '.description', @model.l.get 'about.helpFeedback'
z '.button',
z @$feedbackButton,
text: @model.l.get 'about.helpFeedbackButton'
onclick: =>
@router.go 'groupChat', {
groupId: 'freeroam'
}
colors:
c200: colors.$yellow200
c500: colors.$yellow500
c600: colors.$yellow600
c700: colors.$yellow700
ink: colors.$white
z '.transparency',
z '.g-grid',
z 'h1.title', @model.l.get 'about.transparencyTitle'
z '.text',
@model.l.get 'about.transparency1'
@router.link z 'a', {
href: 'http://github.com/freeroamapp'
}, @model.l.get 'general.here'
' '
@model.l.get 'about.transparency2'
z '.button',
z @$irsButton,
text: @model.l.get 'about.irsDetermination'
isOutline: true
# isFullWidth: false
onclick: =>
@router.openLink(
'https://fdn.uno/d/documents/irs-determination.pdf'
)
# z 'p.disclaimer', @model.l.get 'about.amazon'
z '.disclaimer',
z '.g-grid',
@model.l.get 'about.opencellid'
| true | z = require 'zorium'
Icon = require '../icon'
PrimaryButton = require '../primary_button'
SecondaryButton = require '../secondary_button'
Tabs = require '../tabs'
colors = require '../../colors'
config = require '../../config'
if window?
require './index.styl'
module.exports = class About
constructor: ({@model, @router}) ->
me = @model.user.getMe()
@$learnMoreButton = new SecondaryButton()
@$donateButton = new SecondaryButton()
@$shareButton = new PrimaryButton()
@$reviewButton = new PrimaryButton()
@$feedbackButton = new PrimaryButton()
@$irsButton = new SecondaryButton()
@$tabs = new Tabs {@model}
@state = z.state
windowSize: @model.window.getSize()
render: =>
{windowSize} = @state.getValue()
z '.z-about',
z '.mission',
z '.content',
z 'h1.title', @model.l.get 'about.missionTitle'
z '.text', @model.l.get 'about.mission'
z '.actions',
z '.action',
z @$learnMoreButton,
text: @model.l.get 'drawer.roamWithCare'
isOutline: true
onclick: =>
@router.go 'roamWithCare'
z '.action',
z @$donateButton,
text: @model.l.get 'general.donate'
onclick: =>
@router.go 'donate'
z '.roadmap',
z '.info',
z '.title', @model.l.get 'about.roadmapTitle'
z '.description', @model.l.get 'about.roadmapDescription'
z @$tabs,
isBarFixed: false
isBarArrow: true
tabs: [
{
$menuText: @model.l.get 'about.phase1'
$el:
z '.z-about_roadmap-phase.phase-1',
z '.image'
z '.phase', @model.l.get('about.phase1')+ ':'
z '.title', @model.l.get 'about.phase1Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase1Bullet1'
z 'li', @model.l.get 'about.phase1Bullet2'
z 'li', @model.l.get 'about.phase1Bullet3'
}
{
$menuText: @model.l.get 'about.phase2'
$el:
z '.z-about_roadmap-phase.phase-2',
z '.image'
z '.phase', @model.l.get('about.phase2')+ ':'
z '.title', @model.l.get 'about.phase2Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase2Bullet1'
z 'li', @model.l.get 'about.phase2Bullet2'
# z 'li', @model.l.get 'about.phase2Bullet3'
}
{
$menuText: @model.l.get 'about.phase3'
$el:
z '.z-about_roadmap-phase.phase-3',
z '.image'
z '.phase', @model.l.get('about.phase3')+ ':'
z '.title', @model.l.get 'about.phase3Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase3Bullet1'
z 'li', @model.l.get 'about.phase3Bullet2'
z 'li', @model.l.get 'about.phase3Bullet3'
}
{
$menuText: @model.l.get 'about.phase4'
$el:
z '.z-about_roadmap-phase.phase-4',
z '.image'
z '.phase', @model.l.get('about.phase4')+ ':'
z '.title', @model.l.get 'about.phase4Title'
z 'ul.bullets',
z 'li', @model.l.get 'about.phase4Bullet1'
# z 'li', @model.l.get 'about.phase4Bullet2'
# z 'li', @model.l.get 'about.phase4Bullet3'
}
]
z '.meet',
z '.g-grid',
z 'h1.title', @model.l.get 'about.meetTitle'
z '.g-grid',
z '.g-cols',
z '.g-col.g-xs-12.g-md-6',
z '.image.austin'
z '.name', 'PI:NAME:<NAME>END_PI'
z '.text',
@model.l.get 'about.meetAustinText'
z '.g-col.g-xs-12.g-md-6',
z '.image.rachel'
z '.name', 'PI:NAME:<NAME>END_PI'
z '.text',
@model.l.get 'about.meetRachelText'
z '.help',
z '.g-grid',
z 'h1.title', @model.l.get 'about.helpTitle'
z '.text', @model.l.get 'about.help'
z '.g-grid',
z '.g-cols',
z '.g-col.g-xs-12.g-md-4',
z '.image.share'
z '.title', @model.l.get 'about.helpShareTitle'
z '.description', @model.l.get 'about.helpShare'
z '.button',
z @$shareButton,
text: @model.l.get 'about.helpShareButton'
onclick: =>
@model.portal.call 'share.any', {
text: 'FreeRoam'
path: ''
url: "https://#{config.HOST}"
}
z '.g-col.g-xs-12.g-md-4',
z '.image.review'
z '.title', @model.l.get 'about.helpReviewTitle'
z '.description', @model.l.get 'about.helpReview'
z '.button',
z @$reviewButton,
text: @model.l.get 'about.helpReviewButton'
onclick: =>
@router.go 'home'
colors:
c200: colors.$skyBlue200
c500: colors.$skyBlue500
c600: colors.$skyBlue600
c700: colors.$skyBlue700
ink: colors.$white
z '.g-col.g-xs-12.g-md-4',
z '.image.feedback'
z '.title', @model.l.get 'about.helpFeedbackTitle'
z '.description', @model.l.get 'about.helpFeedback'
z '.button',
z @$feedbackButton,
text: @model.l.get 'about.helpFeedbackButton'
onclick: =>
@router.go 'groupChat', {
groupId: 'freeroam'
}
colors:
c200: colors.$yellow200
c500: colors.$yellow500
c600: colors.$yellow600
c700: colors.$yellow700
ink: colors.$white
z '.transparency',
z '.g-grid',
z 'h1.title', @model.l.get 'about.transparencyTitle'
z '.text',
@model.l.get 'about.transparency1'
@router.link z 'a', {
href: 'http://github.com/freeroamapp'
}, @model.l.get 'general.here'
' '
@model.l.get 'about.transparency2'
z '.button',
z @$irsButton,
text: @model.l.get 'about.irsDetermination'
isOutline: true
# isFullWidth: false
onclick: =>
@router.openLink(
'https://fdn.uno/d/documents/irs-determination.pdf'
)
# z 'p.disclaimer', @model.l.get 'about.amazon'
z '.disclaimer',
z '.g-grid',
@model.l.get 'about.opencellid'
|
[
{
"context": "* 999999999)\n user_id: localStorage['purpleUserId']\n account_id: fleetLocationId\n vin",
"end": 4498,
"score": 0.8199235200881958,
"start": 4492,
"tag": "USERNAME",
"value": "UserId"
},
{
"context": "l.VERSION_NUMBER\n user_id: localStora... | src/app/controller/Fleet.coffee | Purple-Services/app | 2 | Ext.define 'Purple.controller.Fleet',
extend: 'Ext.app.Controller'
config:
refs:
fleet: 'fleet'
addFleetOrderFormHeading: '[ctype=addFleetOrderFormHeading]'
addFleetOrderButtonContainer: '[ctype=addFleetOrderButtonContainer]'
cancelEditFleetOrderButtonContainer: '[ctype=cancelEditFleetOrderButtonContainer]'
sendSavedDeliveriesButtonContainer: '[ctype=sendSavedDeliveriesButtonContainer]'
scanVinBarcodeButtonContainer: '[ctype=scanVinBarcodeButtonContainer]'
fleetAccountSelectField: '[ctype=fleetAccountSelectField]'
fleetVinField: '[ctype=fleetVinField]'
fleetLicensePlateField: '[ctype=fleetLicensePlateField]'
fleetGallonsField: '[ctype=fleetGallonsField]'
fleetGasTypeSelectField: '[ctype=fleetGasTypeSelectField]'
fleetIsTopTierField: '[ctype=fleetIsTopTierField]'
deliveriesList: '[ctype=deliveriesList]'
control:
fleet:
initialize: 'doInitialize'
addFleetOrderButtonContainer:
addFleetOrder: 'addFleetOrder'
cancelEditFleetOrderButtonContainer:
cancelEditFleetOrder: 'cancelEditFleetOrder'
scanVinBarcodeButtonContainer:
scanBarcode: 'scanBarcode'
fleetAccountSelectField:
fleetAccountSelectFieldChange: 'fleetAccountSelectFieldChange'
launch: ->
doInitialize: ->
@getAccounts()
getAccounts: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/get-accounts"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
lat: util.ctl('Main').lat
lng: util.ctl('Main').lng
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
localStorage['purpleFleetAccounts'] = JSON.stringify response.accounts
localStorage['purpleDefaultFleetAccount'] = response.default_account_id
@initFleetAccountSelectField()
else
@initFleetAccountSelectField()
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
@initFleetAccountSelectField()
initFleetAccountSelectField: ->
localStorage['purpleFleetAccounts'] ?= "[]"
localStorage['purpleDefaultFleetAccount'] ?= ""
accounts = JSON.parse localStorage['purpleFleetAccounts']
@currShowingFleetLocations = accounts
opts = []
for b,a of accounts
opts.push
text: "#{a.name}"
value: "#{a.id}"
@getFleetAccountSelectField().setOptions opts
@getFleetAccountSelectField().setValue(
localStorage['purpleDefaultFleetAccount']
)
@getFleetAccountSelectField().setDisabled no
fleetAccountSelectFieldChange: ->
if not @editingId?
@loadDeliveriesList()
getFleetLocationObjectById: (fleetLocationId) ->
@currShowingFleetLocations.filter(
(x) -> (x.id is fleetLocationId)
)[0]
addFleetOrder: (bypassLocationCheck = false) ->
values = @getFleet().getValues()
fleetLocationId = @getFleetAccountSelectField().getValue()
fleetLocationObject = @getFleetLocationObjectById fleetLocationId
if not bypassLocationCheck and
fleetLocationObject.lat and
util.ctl('Main').lat and
not util.withinRadius(
fleetLocationObject.lat,
fleetLocationObject.lng,
1500
)
util.confirm(
"You aren't near #{fleetLocationObject.name}. Are you sure this is the right location?",
"Confirm",
(=> @addFleetOrder true),
(->)
)
# refresh gps location to make sure this doesn't get too annoying
util.ctl('Main').updateLatlng()
else if values['gallons'] is "" or values['gallons'] <= 0
util.alert "'Gallons' must be a number greater than 0.", "Error", (->)
else if values['vin'] is "" and values['license_plate'] is ""
util.alert "You must enter either a VIN or License Plate / Stock #.", "Error", (->)
else
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
formData =
id: "local" + Math.floor(Math.random() * 999999999)
user_id: localStorage['purpleUserId']
account_id: fleetLocationId
vin: values['vin']
license_plate: values['license_plate'].toUpperCase()
gallons: values['gallons']
gas_type: values['gas_type']
is_top_tier: values['is_top_tier']
timestamp_recorded: Math.floor(Date.now() / 1000)
if @editingId
@saveFleetDelivery formData, @editingId
else
params = JSON.parse JSON.stringify(formData) # copy
params.version = util.VERSION_NUMBER
params.token = localStorage['purpleToken']
params.os = Ext.os.name
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/add-delivery"
params: Ext.JSON.encode params
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
util.alert "Fleet Delivery Added!", "Success", (->)
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.confirm(
"Save delivery details for later?",
"Unable to Connect",
(=>
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
savedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
formData.savedLocally = true
savedDeliveries.push formData
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify savedDeliveries
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
@renderDeliveriesList()))
sendSavedDeliveries: ->
fleetLocationId = @getFleetAccountSelectField().getValue()
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id is fleetLocationId)
)
if savedDeliveries.length
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/add-deliveries"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name
fleet_location_id: fleetLocationId
deliveries: savedDeliveries
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
util.alert "#{savedDeliveries.length} fleet deliveries added!", "Success", (->)
remainingSavedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id isnt fleetLocationId)
)
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify remainingSavedDeliveries
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "Saved deliveries not sent. Still saved for when you have a connection.", "Unable to Connect", (->)
else
util.alert "No saved deliveries.", "Error", (->)
scanBarcode: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
cordova.plugins.barcodeScanner.scan ((result) =>
# alert("We got a barcode\n" +
# "Result: " + result.text + "\n" +
# "Format: " + result.format + "\n" +
# "Cancelled: " + result.cancelled);
if not result.cancelled
@getFleetVinField().setValue result.text.substr(-17)
else
alert "Not sure if actually a VIN number."
Ext.Viewport.setMasked false
), ((error) =>
alert 'Scanning failed: ' + error
Ext.Viewport.setMasked false
), {
'preferFrontCamera': false
'showFlipCameraButton': false
'prompt': 'Place a barcode inside the scan area'
#'formats': 'CODE_128'
'formats': 'DATA_MATRIX,CODE_128,CODE_39,QR_CODE'
'orientation': 'portrait' # todo: only works on Android, problematic in iOS because you often have to tilt the app to see inside of car door
}
loadDeliveriesList: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
# recent remote orders
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/get-deliveries"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
fleet_location_id: @getFleetAccountSelectField().getValue()
headers:
'Content-Type': 'application/json'
timeout: 7000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
@renderDeliveriesList()
renderDeliveriesList: (deliveries) ->
# prepend with locally stored deliveries
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id is @getFleetAccountSelectField().getValue())
).sort (a, b) -> b.timestamp_recorded - a.timestamp_recorded
if deliveries?
onlyShowLocal = false
deliveries = savedDeliveries.concat deliveries
else
onlyShowLocal = true
deliveries = savedDeliveries
@currShowingDeliveries = deliveries
list = @getDeliveriesList()
if not list?
return
list.removeAll yes, yes
if deliveries.length is 0
list.add
xtype: 'component'
flex: 0
html: """
No deliveries at this location.
"""
cls: "loose-text"
style: "text-align: center;"
else
if savedDeliveries.length
list.add
xtype: 'container'
ctype: 'sendSavedDeliveriesButtonContainer'
flex: 0
height: 70
width: '100%'
padding: '0 0 15 0'
layout:
type: 'vbox'
pack: 'center'
align: 'center'
cls: 'smaller-button-pop'
items: [
{
xtype: 'button'
ui: 'action'
cls: [
'button-pop'
'button-pop-orange'
]
text: 'Send Saved Deliveries'
flex: 0
handler: => @sendSavedDeliveries()
}
]
list.add
xtype: 'component'
flex: 0
style:
color: '#555555'
fontSize: '14px'
html: 'Tap on a delivery to edit or delete.'
for o in deliveries
cls = [
'bottom-margin'
'order-list-item'
]
if o.savedLocally
cls.push 'highlighted'
list.add
xtype: 'textfield'
id: "oid_#{o.id}"
flex: 0
label: """
<span>
#{Ext.util.Format.date(
new Date(
if o.timestamp_recorded?
o.timestamp_recorded * 1000
else
o.timestamp_created
),
"n/j g:i a"
)} - #{o.license_plate} - $#{util.centsToDollars o.total_price}
</span>
<br /><span class="subtext">
#{o.gallons} gal #{if o.vin then "- " + o.vin else ""}
</span>
"""
labelWidth: '100%'
cls: cls
disabled: yes
listeners:
initialize: (field) =>
field.element.on 'tap', =>
oid = field.getId().split('_')[1]
field.addCls 'order-edit-mode'
setTimeout (=>
delivery = @getDeliveryObject oid
util.confirmDialog "",
((index) => switch index
when 1
@askDeleteFleetDelivery oid, (oid.substring(0, 5) is "local")
field.removeCls 'order-edit-mode'
when 2
@editFleetDelivery oid, (oid.substring(0, 5) is "local")
field.removeCls 'order-edit-mode'
else
field.removeCls 'order-edit-mode'
),
Ext.util.Format.date(
new Date(
if delivery.timestamp_recorded?
delivery.timestamp_recorded * 1000
else
delivery.timestamp_created
),
"n/j g:i a"
),
["Delete Delivery",
"Edit Delivery",
"Cancel"]
), 100 # wait for UI to update
# get the delivery details of an order that is currently showing in the deliveries list
# can be local or remote
getDeliveryObject: (id) ->
@currShowingDeliveries.filter(
(x) -> (x.id is id)
)[0]
editFleetDelivery: (id, isLocal) ->
@editingId = id
delivery = @getDeliveryObject id
# change title
@defaultAddFleetOrderFormHeading = @getAddFleetOrderFormHeading().getHtml()
@getAddFleetOrderFormHeading().setHtml "Edit Fleet Delivery"
# change submit button text
@defaultAddFleetOrderButtonContainer =
@getAddFleetOrderButtonContainer().getAt(0).getText()
@getAddFleetOrderButtonContainer().getAt(0).setText "Save Changes"
@getCancelEditFleetOrderButtonContainer().setHidden false
@getFleet().getScrollable().getScroller().scrollTo 'top', 0
@getDeliveriesList().setHidden true
# populate form fields
@getFleetVinField().setValue delivery['vin']
@getFleetLicensePlateField().setValue delivery['license_plate']
@getFleetGallonsField().setValue delivery['gallons']
@getFleetGasTypeSelectField().setValue delivery['gas_type']
@getFleetIsTopTierField().setValue delivery['is_top_tier']
# add logic branch in addFleetOrder for @isEditing
#
#
exitEditMode: ->
@editingId = null
@getAddFleetOrderFormHeading().setHtml @defaultAddFleetOrderFormHeading
@getAddFleetOrderButtonContainer().getAt(0).setText(
@defaultAddFleetOrderButtonContainer
)
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
@getCancelEditFleetOrderButtonContainer().setHidden true
@getDeliveriesList().setHidden false
saveFleetDelivery: (formData, id) ->
delivery = @getDeliveryObject id
# keep old id and timestamp_recorded
formData.id = id
formData.timestamp_recorded = delivery.timestamp_recorded
if delivery.savedLocally
console.log "update fleet delivery detials locally"
savedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
# savedDeliveriesWithThisOneRemoved
sdwtor = savedDeliveries.filter(
(x) -> (x.id isnt id)
)
formData.savedLocally = true
sdwtor.push formData
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify sdwtor
@exitEditMode()
@renderDeliveriesList() # consider doing a loadOrderslist here instead, if online
Ext.Viewport.setMasked false
util.alert "Fleet Delivery changes saved!", "Success", (->)
else
console.log "update fleet delivery detials remotely"
params = JSON.parse JSON.stringify(formData) # copy
params.version = util.VERSION_NUMBER
params.token = localStorage['purpleToken']
params.os = Ext.os.name
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/edit-delivery"
params: Ext.JSON.encode params
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@exitEditMode()
util.alert "Fleet Delivery changes saved!", "Success", (->)
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "No internet connection.", "Unable to Connect", (->)
cancelEditFleetOrder: ->
@exitEditMode()
askDeleteFleetDelivery: (id, isLocal) ->
util.confirm(
"Are you sure you want to delete this delivery permanently?",
'Confirm',
(=> @doDeleteFleetDelivery id, isLocal),
null,
'Yes',
'No'
)
doDeleteFleetDelivery: (id, isLocal) ->
if isLocal
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.id isnt id)
)
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify savedDeliveries
@renderDeliveriesList()
else
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/delete-delivery"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
fleet_location_id: @getFleetAccountSelectField().getValue()
delivery_id: id
headers:
'Content-Type': 'application/json'
timeout: 30000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "No internet connection.", "Unable to Connect", (->)
# response = Ext.JSON.decode response_obj.responseText
# console.log response
| 92105 | Ext.define 'Purple.controller.Fleet',
extend: 'Ext.app.Controller'
config:
refs:
fleet: 'fleet'
addFleetOrderFormHeading: '[ctype=addFleetOrderFormHeading]'
addFleetOrderButtonContainer: '[ctype=addFleetOrderButtonContainer]'
cancelEditFleetOrderButtonContainer: '[ctype=cancelEditFleetOrderButtonContainer]'
sendSavedDeliveriesButtonContainer: '[ctype=sendSavedDeliveriesButtonContainer]'
scanVinBarcodeButtonContainer: '[ctype=scanVinBarcodeButtonContainer]'
fleetAccountSelectField: '[ctype=fleetAccountSelectField]'
fleetVinField: '[ctype=fleetVinField]'
fleetLicensePlateField: '[ctype=fleetLicensePlateField]'
fleetGallonsField: '[ctype=fleetGallonsField]'
fleetGasTypeSelectField: '[ctype=fleetGasTypeSelectField]'
fleetIsTopTierField: '[ctype=fleetIsTopTierField]'
deliveriesList: '[ctype=deliveriesList]'
control:
fleet:
initialize: 'doInitialize'
addFleetOrderButtonContainer:
addFleetOrder: 'addFleetOrder'
cancelEditFleetOrderButtonContainer:
cancelEditFleetOrder: 'cancelEditFleetOrder'
scanVinBarcodeButtonContainer:
scanBarcode: 'scanBarcode'
fleetAccountSelectField:
fleetAccountSelectFieldChange: 'fleetAccountSelectFieldChange'
launch: ->
doInitialize: ->
@getAccounts()
getAccounts: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/get-accounts"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
lat: util.ctl('Main').lat
lng: util.ctl('Main').lng
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
localStorage['purpleFleetAccounts'] = JSON.stringify response.accounts
localStorage['purpleDefaultFleetAccount'] = response.default_account_id
@initFleetAccountSelectField()
else
@initFleetAccountSelectField()
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
@initFleetAccountSelectField()
initFleetAccountSelectField: ->
localStorage['purpleFleetAccounts'] ?= "[]"
localStorage['purpleDefaultFleetAccount'] ?= ""
accounts = JSON.parse localStorage['purpleFleetAccounts']
@currShowingFleetLocations = accounts
opts = []
for b,a of accounts
opts.push
text: "#{a.name}"
value: "#{a.id}"
@getFleetAccountSelectField().setOptions opts
@getFleetAccountSelectField().setValue(
localStorage['purpleDefaultFleetAccount']
)
@getFleetAccountSelectField().setDisabled no
fleetAccountSelectFieldChange: ->
if not @editingId?
@loadDeliveriesList()
getFleetLocationObjectById: (fleetLocationId) ->
@currShowingFleetLocations.filter(
(x) -> (x.id is fleetLocationId)
)[0]
addFleetOrder: (bypassLocationCheck = false) ->
values = @getFleet().getValues()
fleetLocationId = @getFleetAccountSelectField().getValue()
fleetLocationObject = @getFleetLocationObjectById fleetLocationId
if not bypassLocationCheck and
fleetLocationObject.lat and
util.ctl('Main').lat and
not util.withinRadius(
fleetLocationObject.lat,
fleetLocationObject.lng,
1500
)
util.confirm(
"You aren't near #{fleetLocationObject.name}. Are you sure this is the right location?",
"Confirm",
(=> @addFleetOrder true),
(->)
)
# refresh gps location to make sure this doesn't get too annoying
util.ctl('Main').updateLatlng()
else if values['gallons'] is "" or values['gallons'] <= 0
util.alert "'Gallons' must be a number greater than 0.", "Error", (->)
else if values['vin'] is "" and values['license_plate'] is ""
util.alert "You must enter either a VIN or License Plate / Stock #.", "Error", (->)
else
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
formData =
id: "local" + Math.floor(Math.random() * 999999999)
user_id: localStorage['purpleUserId']
account_id: fleetLocationId
vin: values['vin']
license_plate: values['license_plate'].toUpperCase()
gallons: values['gallons']
gas_type: values['gas_type']
is_top_tier: values['is_top_tier']
timestamp_recorded: Math.floor(Date.now() / 1000)
if @editingId
@saveFleetDelivery formData, @editingId
else
params = JSON.parse JSON.stringify(formData) # copy
params.version = util.VERSION_NUMBER
params.token = localStorage['purpleToken']
params.os = Ext.os.name
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/add-delivery"
params: Ext.JSON.encode params
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
util.alert "Fleet Delivery Added!", "Success", (->)
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.confirm(
"Save delivery details for later?",
"Unable to Connect",
(=>
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
savedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
formData.savedLocally = true
savedDeliveries.push formData
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify savedDeliveries
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
@renderDeliveriesList()))
sendSavedDeliveries: ->
fleetLocationId = @getFleetAccountSelectField().getValue()
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id is fleetLocationId)
)
if savedDeliveries.length
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/add-deliveries"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name
fleet_location_id: fleetLocationId
deliveries: savedDeliveries
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
util.alert "#{savedDeliveries.length} fleet deliveries added!", "Success", (->)
remainingSavedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id isnt fleetLocationId)
)
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify remainingSavedDeliveries
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "Saved deliveries not sent. Still saved for when you have a connection.", "Unable to Connect", (->)
else
util.alert "No saved deliveries.", "Error", (->)
scanBarcode: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
cordova.plugins.barcodeScanner.scan ((result) =>
# alert("We got a barcode\n" +
# "Result: " + result.text + "\n" +
# "Format: " + result.format + "\n" +
# "Cancelled: " + result.cancelled);
if not result.cancelled
@getFleetVinField().setValue result.text.substr(-17)
else
alert "Not sure if actually a VIN number."
Ext.Viewport.setMasked false
), ((error) =>
alert 'Scanning failed: ' + error
Ext.Viewport.setMasked false
), {
'preferFrontCamera': false
'showFlipCameraButton': false
'prompt': 'Place a barcode inside the scan area'
#'formats': 'CODE_128'
'formats': 'DATA_MATRIX,CODE_128,CODE_39,QR_CODE'
'orientation': 'portrait' # todo: only works on Android, problematic in iOS because you often have to tilt the app to see inside of car door
}
loadDeliveriesList: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
# recent remote orders
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/get-deliveries"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
fleet_location_id: @getFleetAccountSelectField().getValue()
headers:
'Content-Type': 'application/json'
timeout: 7000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
@renderDeliveriesList()
renderDeliveriesList: (deliveries) ->
# prepend with locally stored deliveries
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id is @getFleetAccountSelectField().getValue())
).sort (a, b) -> b.timestamp_recorded - a.timestamp_recorded
if deliveries?
onlyShowLocal = false
deliveries = savedDeliveries.concat deliveries
else
onlyShowLocal = true
deliveries = savedDeliveries
@currShowingDeliveries = deliveries
list = @getDeliveriesList()
if not list?
return
list.removeAll yes, yes
if deliveries.length is 0
list.add
xtype: 'component'
flex: 0
html: """
No deliveries at this location.
"""
cls: "loose-text"
style: "text-align: center;"
else
if savedDeliveries.length
list.add
xtype: 'container'
ctype: 'sendSavedDeliveriesButtonContainer'
flex: 0
height: 70
width: '100%'
padding: '0 0 15 0'
layout:
type: 'vbox'
pack: 'center'
align: 'center'
cls: 'smaller-button-pop'
items: [
{
xtype: 'button'
ui: 'action'
cls: [
'button-pop'
'button-pop-orange'
]
text: 'Send Saved Deliveries'
flex: 0
handler: => @sendSavedDeliveries()
}
]
list.add
xtype: 'component'
flex: 0
style:
color: '#555555'
fontSize: '14px'
html: 'Tap on a delivery to edit or delete.'
for o in deliveries
cls = [
'bottom-margin'
'order-list-item'
]
if o.savedLocally
cls.push 'highlighted'
list.add
xtype: 'textfield'
id: "oid_#{o.id}"
flex: 0
label: """
<span>
#{Ext.util.Format.date(
new Date(
if o.timestamp_recorded?
o.timestamp_recorded * 1000
else
o.timestamp_created
),
"n/j g:i a"
)} - #{o.license_plate} - $#{util.centsToDollars o.total_price}
</span>
<br /><span class="subtext">
#{o.gallons} gal #{if o.vin then "- " + o.vin else ""}
</span>
"""
labelWidth: '100%'
cls: cls
disabled: yes
listeners:
initialize: (field) =>
field.element.on 'tap', =>
oid = field.getId().split('_')[1]
field.addCls 'order-edit-mode'
setTimeout (=>
delivery = @getDeliveryObject oid
util.confirmDialog "",
((index) => switch index
when 1
@askDeleteFleetDelivery oid, (oid.substring(0, 5) is "local")
field.removeCls 'order-edit-mode'
when 2
@editFleetDelivery oid, (oid.substring(0, 5) is "local")
field.removeCls 'order-edit-mode'
else
field.removeCls 'order-edit-mode'
),
Ext.util.Format.date(
new Date(
if delivery.timestamp_recorded?
delivery.timestamp_recorded * 1000
else
delivery.timestamp_created
),
"n/j g:i a"
),
["Delete Delivery",
"Edit Delivery",
"Cancel"]
), 100 # wait for UI to update
# get the delivery details of an order that is currently showing in the deliveries list
# can be local or remote
getDeliveryObject: (id) ->
@currShowingDeliveries.filter(
(x) -> (x.id is id)
)[0]
editFleetDelivery: (id, isLocal) ->
@editingId = id
delivery = @getDeliveryObject id
# change title
@defaultAddFleetOrderFormHeading = @getAddFleetOrderFormHeading().getHtml()
@getAddFleetOrderFormHeading().setHtml "Edit Fleet Delivery"
# change submit button text
@defaultAddFleetOrderButtonContainer =
@getAddFleetOrderButtonContainer().getAt(0).getText()
@getAddFleetOrderButtonContainer().getAt(0).setText "Save Changes"
@getCancelEditFleetOrderButtonContainer().setHidden false
@getFleet().getScrollable().getScroller().scrollTo 'top', 0
@getDeliveriesList().setHidden true
# populate form fields
@getFleetVinField().setValue delivery['vin']
@getFleetLicensePlateField().setValue delivery['license_plate']
@getFleetGallonsField().setValue delivery['gallons']
@getFleetGasTypeSelectField().setValue delivery['gas_type']
@getFleetIsTopTierField().setValue delivery['is_top_tier']
# add logic branch in addFleetOrder for @isEditing
#
#
exitEditMode: ->
@editingId = null
@getAddFleetOrderFormHeading().setHtml @defaultAddFleetOrderFormHeading
@getAddFleetOrderButtonContainer().getAt(0).setText(
@defaultAddFleetOrderButtonContainer
)
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
@getCancelEditFleetOrderButtonContainer().setHidden true
@getDeliveriesList().setHidden false
saveFleetDelivery: (formData, id) ->
delivery = @getDeliveryObject id
# keep old id and timestamp_recorded
formData.id = id
formData.timestamp_recorded = delivery.timestamp_recorded
if delivery.savedLocally
console.log "update fleet delivery detials locally"
savedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
# savedDeliveriesWithThisOneRemoved
sdwtor = savedDeliveries.filter(
(x) -> (x.id isnt id)
)
formData.savedLocally = true
sdwtor.push formData
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify sdwtor
@exitEditMode()
@renderDeliveriesList() # consider doing a loadOrderslist here instead, if online
Ext.Viewport.setMasked false
util.alert "Fleet Delivery changes saved!", "Success", (->)
else
console.log "update fleet delivery detials remotely"
params = JSON.parse JSON.stringify(formData) # copy
params.version = util.VERSION_NUMBER
params.token = localStorage['<KEY>']
params.os = Ext.os.name
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/edit-delivery"
params: Ext.JSON.encode params
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@exitEditMode()
util.alert "Fleet Delivery changes saved!", "Success", (->)
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "No internet connection.", "Unable to Connect", (->)
cancelEditFleetOrder: ->
@exitEditMode()
askDeleteFleetDelivery: (id, isLocal) ->
util.confirm(
"Are you sure you want to delete this delivery permanently?",
'Confirm',
(=> @doDeleteFleetDelivery id, isLocal),
null,
'Yes',
'No'
)
doDeleteFleetDelivery: (id, isLocal) ->
if isLocal
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.id isnt id)
)
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify savedDeliveries
@renderDeliveriesList()
else
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/delete-delivery"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
fleet_location_id: @getFleetAccountSelectField().getValue()
delivery_id: id
headers:
'Content-Type': 'application/json'
timeout: 30000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "No internet connection.", "Unable to Connect", (->)
# response = Ext.JSON.decode response_obj.responseText
# console.log response
| true | Ext.define 'Purple.controller.Fleet',
extend: 'Ext.app.Controller'
config:
refs:
fleet: 'fleet'
addFleetOrderFormHeading: '[ctype=addFleetOrderFormHeading]'
addFleetOrderButtonContainer: '[ctype=addFleetOrderButtonContainer]'
cancelEditFleetOrderButtonContainer: '[ctype=cancelEditFleetOrderButtonContainer]'
sendSavedDeliveriesButtonContainer: '[ctype=sendSavedDeliveriesButtonContainer]'
scanVinBarcodeButtonContainer: '[ctype=scanVinBarcodeButtonContainer]'
fleetAccountSelectField: '[ctype=fleetAccountSelectField]'
fleetVinField: '[ctype=fleetVinField]'
fleetLicensePlateField: '[ctype=fleetLicensePlateField]'
fleetGallonsField: '[ctype=fleetGallonsField]'
fleetGasTypeSelectField: '[ctype=fleetGasTypeSelectField]'
fleetIsTopTierField: '[ctype=fleetIsTopTierField]'
deliveriesList: '[ctype=deliveriesList]'
control:
fleet:
initialize: 'doInitialize'
addFleetOrderButtonContainer:
addFleetOrder: 'addFleetOrder'
cancelEditFleetOrderButtonContainer:
cancelEditFleetOrder: 'cancelEditFleetOrder'
scanVinBarcodeButtonContainer:
scanBarcode: 'scanBarcode'
fleetAccountSelectField:
fleetAccountSelectFieldChange: 'fleetAccountSelectFieldChange'
launch: ->
doInitialize: ->
@getAccounts()
getAccounts: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/get-accounts"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
lat: util.ctl('Main').lat
lng: util.ctl('Main').lng
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
localStorage['purpleFleetAccounts'] = JSON.stringify response.accounts
localStorage['purpleDefaultFleetAccount'] = response.default_account_id
@initFleetAccountSelectField()
else
@initFleetAccountSelectField()
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
@initFleetAccountSelectField()
initFleetAccountSelectField: ->
localStorage['purpleFleetAccounts'] ?= "[]"
localStorage['purpleDefaultFleetAccount'] ?= ""
accounts = JSON.parse localStorage['purpleFleetAccounts']
@currShowingFleetLocations = accounts
opts = []
for b,a of accounts
opts.push
text: "#{a.name}"
value: "#{a.id}"
@getFleetAccountSelectField().setOptions opts
@getFleetAccountSelectField().setValue(
localStorage['purpleDefaultFleetAccount']
)
@getFleetAccountSelectField().setDisabled no
fleetAccountSelectFieldChange: ->
if not @editingId?
@loadDeliveriesList()
getFleetLocationObjectById: (fleetLocationId) ->
@currShowingFleetLocations.filter(
(x) -> (x.id is fleetLocationId)
)[0]
addFleetOrder: (bypassLocationCheck = false) ->
values = @getFleet().getValues()
fleetLocationId = @getFleetAccountSelectField().getValue()
fleetLocationObject = @getFleetLocationObjectById fleetLocationId
if not bypassLocationCheck and
fleetLocationObject.lat and
util.ctl('Main').lat and
not util.withinRadius(
fleetLocationObject.lat,
fleetLocationObject.lng,
1500
)
util.confirm(
"You aren't near #{fleetLocationObject.name}. Are you sure this is the right location?",
"Confirm",
(=> @addFleetOrder true),
(->)
)
# refresh gps location to make sure this doesn't get too annoying
util.ctl('Main').updateLatlng()
else if values['gallons'] is "" or values['gallons'] <= 0
util.alert "'Gallons' must be a number greater than 0.", "Error", (->)
else if values['vin'] is "" and values['license_plate'] is ""
util.alert "You must enter either a VIN or License Plate / Stock #.", "Error", (->)
else
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
formData =
id: "local" + Math.floor(Math.random() * 999999999)
user_id: localStorage['purpleUserId']
account_id: fleetLocationId
vin: values['vin']
license_plate: values['license_plate'].toUpperCase()
gallons: values['gallons']
gas_type: values['gas_type']
is_top_tier: values['is_top_tier']
timestamp_recorded: Math.floor(Date.now() / 1000)
if @editingId
@saveFleetDelivery formData, @editingId
else
params = JSON.parse JSON.stringify(formData) # copy
params.version = util.VERSION_NUMBER
params.token = localStorage['purpleToken']
params.os = Ext.os.name
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/add-delivery"
params: Ext.JSON.encode params
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
util.alert "Fleet Delivery Added!", "Success", (->)
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.confirm(
"Save delivery details for later?",
"Unable to Connect",
(=>
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
savedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
formData.savedLocally = true
savedDeliveries.push formData
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify savedDeliveries
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
@renderDeliveriesList()))
sendSavedDeliveries: ->
fleetLocationId = @getFleetAccountSelectField().getValue()
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id is fleetLocationId)
)
if savedDeliveries.length
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/add-deliveries"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name
fleet_location_id: fleetLocationId
deliveries: savedDeliveries
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
util.alert "#{savedDeliveries.length} fleet deliveries added!", "Success", (->)
remainingSavedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id isnt fleetLocationId)
)
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify remainingSavedDeliveries
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "Saved deliveries not sent. Still saved for when you have a connection.", "Unable to Connect", (->)
else
util.alert "No saved deliveries.", "Error", (->)
scanBarcode: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
cordova.plugins.barcodeScanner.scan ((result) =>
# alert("We got a barcode\n" +
# "Result: " + result.text + "\n" +
# "Format: " + result.format + "\n" +
# "Cancelled: " + result.cancelled);
if not result.cancelled
@getFleetVinField().setValue result.text.substr(-17)
else
alert "Not sure if actually a VIN number."
Ext.Viewport.setMasked false
), ((error) =>
alert 'Scanning failed: ' + error
Ext.Viewport.setMasked false
), {
'preferFrontCamera': false
'showFlipCameraButton': false
'prompt': 'Place a barcode inside the scan area'
#'formats': 'CODE_128'
'formats': 'DATA_MATRIX,CODE_128,CODE_39,QR_CODE'
'orientation': 'portrait' # todo: only works on Android, problematic in iOS because you often have to tilt the app to see inside of car door
}
loadDeliveriesList: ->
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
# recent remote orders
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/get-deliveries"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
fleet_location_id: @getFleetAccountSelectField().getValue()
headers:
'Content-Type': 'application/json'
timeout: 7000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
@renderDeliveriesList()
renderDeliveriesList: (deliveries) ->
# prepend with locally stored deliveries
localStorage['purpleSavedFleetDeliveries'] ?= "[]"
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.account_id is @getFleetAccountSelectField().getValue())
).sort (a, b) -> b.timestamp_recorded - a.timestamp_recorded
if deliveries?
onlyShowLocal = false
deliveries = savedDeliveries.concat deliveries
else
onlyShowLocal = true
deliveries = savedDeliveries
@currShowingDeliveries = deliveries
list = @getDeliveriesList()
if not list?
return
list.removeAll yes, yes
if deliveries.length is 0
list.add
xtype: 'component'
flex: 0
html: """
No deliveries at this location.
"""
cls: "loose-text"
style: "text-align: center;"
else
if savedDeliveries.length
list.add
xtype: 'container'
ctype: 'sendSavedDeliveriesButtonContainer'
flex: 0
height: 70
width: '100%'
padding: '0 0 15 0'
layout:
type: 'vbox'
pack: 'center'
align: 'center'
cls: 'smaller-button-pop'
items: [
{
xtype: 'button'
ui: 'action'
cls: [
'button-pop'
'button-pop-orange'
]
text: 'Send Saved Deliveries'
flex: 0
handler: => @sendSavedDeliveries()
}
]
list.add
xtype: 'component'
flex: 0
style:
color: '#555555'
fontSize: '14px'
html: 'Tap on a delivery to edit or delete.'
for o in deliveries
cls = [
'bottom-margin'
'order-list-item'
]
if o.savedLocally
cls.push 'highlighted'
list.add
xtype: 'textfield'
id: "oid_#{o.id}"
flex: 0
label: """
<span>
#{Ext.util.Format.date(
new Date(
if o.timestamp_recorded?
o.timestamp_recorded * 1000
else
o.timestamp_created
),
"n/j g:i a"
)} - #{o.license_plate} - $#{util.centsToDollars o.total_price}
</span>
<br /><span class="subtext">
#{o.gallons} gal #{if o.vin then "- " + o.vin else ""}
</span>
"""
labelWidth: '100%'
cls: cls
disabled: yes
listeners:
initialize: (field) =>
field.element.on 'tap', =>
oid = field.getId().split('_')[1]
field.addCls 'order-edit-mode'
setTimeout (=>
delivery = @getDeliveryObject oid
util.confirmDialog "",
((index) => switch index
when 1
@askDeleteFleetDelivery oid, (oid.substring(0, 5) is "local")
field.removeCls 'order-edit-mode'
when 2
@editFleetDelivery oid, (oid.substring(0, 5) is "local")
field.removeCls 'order-edit-mode'
else
field.removeCls 'order-edit-mode'
),
Ext.util.Format.date(
new Date(
if delivery.timestamp_recorded?
delivery.timestamp_recorded * 1000
else
delivery.timestamp_created
),
"n/j g:i a"
),
["Delete Delivery",
"Edit Delivery",
"Cancel"]
), 100 # wait for UI to update
# get the delivery details of an order that is currently showing in the deliveries list
# can be local or remote
getDeliveryObject: (id) ->
@currShowingDeliveries.filter(
(x) -> (x.id is id)
)[0]
editFleetDelivery: (id, isLocal) ->
@editingId = id
delivery = @getDeliveryObject id
# change title
@defaultAddFleetOrderFormHeading = @getAddFleetOrderFormHeading().getHtml()
@getAddFleetOrderFormHeading().setHtml "Edit Fleet Delivery"
# change submit button text
@defaultAddFleetOrderButtonContainer =
@getAddFleetOrderButtonContainer().getAt(0).getText()
@getAddFleetOrderButtonContainer().getAt(0).setText "Save Changes"
@getCancelEditFleetOrderButtonContainer().setHidden false
@getFleet().getScrollable().getScroller().scrollTo 'top', 0
@getDeliveriesList().setHidden true
# populate form fields
@getFleetVinField().setValue delivery['vin']
@getFleetLicensePlateField().setValue delivery['license_plate']
@getFleetGallonsField().setValue delivery['gallons']
@getFleetGasTypeSelectField().setValue delivery['gas_type']
@getFleetIsTopTierField().setValue delivery['is_top_tier']
# add logic branch in addFleetOrder for @isEditing
#
#
exitEditMode: ->
@editingId = null
@getAddFleetOrderFormHeading().setHtml @defaultAddFleetOrderFormHeading
@getAddFleetOrderButtonContainer().getAt(0).setText(
@defaultAddFleetOrderButtonContainer
)
@getFleetVinField().reset()
@getFleetLicensePlateField().reset()
@getFleetGallonsField().reset()
@getCancelEditFleetOrderButtonContainer().setHidden true
@getDeliveriesList().setHidden false
saveFleetDelivery: (formData, id) ->
delivery = @getDeliveryObject id
# keep old id and timestamp_recorded
formData.id = id
formData.timestamp_recorded = delivery.timestamp_recorded
if delivery.savedLocally
console.log "update fleet delivery detials locally"
savedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
# savedDeliveriesWithThisOneRemoved
sdwtor = savedDeliveries.filter(
(x) -> (x.id isnt id)
)
formData.savedLocally = true
sdwtor.push formData
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify sdwtor
@exitEditMode()
@renderDeliveriesList() # consider doing a loadOrderslist here instead, if online
Ext.Viewport.setMasked false
util.alert "Fleet Delivery changes saved!", "Success", (->)
else
console.log "update fleet delivery detials remotely"
params = JSON.parse JSON.stringify(formData) # copy
params.version = util.VERSION_NUMBER
params.token = localStorage['PI:KEY:<KEY>END_PI']
params.os = Ext.os.name
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/edit-delivery"
params: Ext.JSON.encode params
headers:
'Content-Type': 'application/json'
timeout: 15000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@exitEditMode()
util.alert "Fleet Delivery changes saved!", "Success", (->)
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "No internet connection.", "Unable to Connect", (->)
cancelEditFleetOrder: ->
@exitEditMode()
askDeleteFleetDelivery: (id, isLocal) ->
util.confirm(
"Are you sure you want to delete this delivery permanently?",
'Confirm',
(=> @doDeleteFleetDelivery id, isLocal),
null,
'Yes',
'No'
)
doDeleteFleetDelivery: (id, isLocal) ->
if isLocal
allSavedDeliveries = JSON.parse localStorage['purpleSavedFleetDeliveries']
savedDeliveries = allSavedDeliveries.filter(
(x) => (x.id isnt id)
)
localStorage['purpleSavedFleetDeliveries'] = JSON.stringify savedDeliveries
@renderDeliveriesList()
else
Ext.Viewport.setMasked
xtype: 'loadmask'
message: ''
Ext.Ajax.request
url: "#{util.WEB_SERVICE_BASE_URL}fleet/delete-delivery"
params: Ext.JSON.encode
version: util.VERSION_NUMBER
user_id: localStorage['purpleUserId']
token: localStorage['purpleToken']
os: Ext.os.name # just an additional info
fleet_location_id: @getFleetAccountSelectField().getValue()
delivery_id: id
headers:
'Content-Type': 'application/json'
timeout: 30000
method: 'POST'
scope: this
success: (response_obj) ->
Ext.Viewport.setMasked false
response = Ext.JSON.decode response_obj.responseText
if response.success
@renderDeliveriesList response.deliveries
else
util.alert response.message, "Error", (->)
failure: (response_obj) ->
Ext.Viewport.setMasked false
util.alert "No internet connection.", "Unable to Connect", (->)
# response = Ext.JSON.decode response_obj.responseText
# console.log response
|
[
{
"context": "###\n* @author Andrew D.Laptev <a.d.laptev@gmail.com>\n###\n\n###global describe, b",
"end": 29,
"score": 0.9998908638954163,
"start": 14,
"tag": "NAME",
"value": "Andrew D.Laptev"
},
{
"context": "###\n* @author Andrew D.Laptev <a.d.laptev@gmail.com>\n###\n\n###global ... | test/test.get.coffee | agsh/boobst | 16 | ###
* @author Andrew D.Laptev <a.d.laptev@gmail.com>
###
###global describe, beforeEach, afterEach, it###
'use strict'
assert = require 'assert'
boobst = require '../boobst'
BoobstSocket = boobst.BoobstSocket
GLOBAL = '^testObject';
describe 'get', () ->
this.timeout 1000
bs = new BoobstSocket(require './test.config')
# bs.on('debug', console.log); # uncomment for debug messages
beforeEach (done) ->
bs.connect (err) ->
throw err if err
bs.kill GLOBAL, (err) ->
throw err if err
done()
afterEach (done) ->
bs.kill GLOBAL, (err) ->
throw err if err
bs.disconnect () ->
done()
describe '#get', () ->
object = {
"array": ["a", "ab", "a\"bc"]
"object":
"a": "a"
"b": 2
"boolean": true
"number": 42
"quotes": 'some"thing'
}
subscript = ['a', 'b']
nodeData = 'node data'
fulfill = (nd, callback, sub) ->
bs.set GLOBAL, sub or [], object, (err) ->
assert.equal err, null
if nd
bs.set GLOBAL, sub or [], nodeData, (err) ->
assert.equal err, null
callback()
else
callback()
it 'sould return error if we don\'t have data in global', (done) ->
bs.get GLOBAL, [], (err, data) ->
assert.notEqual err, null
assert.equal data, undefined
done()
it 'should return node data if we have $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, [], (err, data) ->
assert.equal err, null
assert.equal data, nodeData
fulfill true, () ->
bs.get GLOBAL, (err, data) ->
assert.equal err, null
assert.equal data, nodeData
done()
it 'should return json if we have $data(node)=10', (done) ->
fulfill false, () ->
bs.get GLOBAL, [], (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
fulfill false, () ->
bs.get GLOBAL, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
it 'should return json if we have forceJSON flag and $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, [], true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
fulfill true, () ->
bs.get GLOBAL, [], true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
it '(with subscripts) should return node data if we have $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, subscript, (err, data) ->
assert.equal err, null
assert.equal data, nodeData
done()
, subscript
it '(with subscripts) should return json if we have $data(node)=10', (done) ->
fulfill false, () ->
bs.get GLOBAL, subscript, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
, subscript
it '(with subscripts) should return json if we have forceJSON flag and $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, subscript, true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
, subscript | 34982 | ###
* @author <NAME> <<EMAIL>>
###
###global describe, beforeEach, afterEach, it###
'use strict'
assert = require 'assert'
boobst = require '../boobst'
BoobstSocket = boobst.BoobstSocket
GLOBAL = '^testObject';
describe 'get', () ->
this.timeout 1000
bs = new BoobstSocket(require './test.config')
# bs.on('debug', console.log); # uncomment for debug messages
beforeEach (done) ->
bs.connect (err) ->
throw err if err
bs.kill GLOBAL, (err) ->
throw err if err
done()
afterEach (done) ->
bs.kill GLOBAL, (err) ->
throw err if err
bs.disconnect () ->
done()
describe '#get', () ->
object = {
"array": ["a", "ab", "a\"bc"]
"object":
"a": "a"
"b": 2
"boolean": true
"number": 42
"quotes": 'some"thing'
}
subscript = ['a', 'b']
nodeData = 'node data'
fulfill = (nd, callback, sub) ->
bs.set GLOBAL, sub or [], object, (err) ->
assert.equal err, null
if nd
bs.set GLOBAL, sub or [], nodeData, (err) ->
assert.equal err, null
callback()
else
callback()
it 'sould return error if we don\'t have data in global', (done) ->
bs.get GLOBAL, [], (err, data) ->
assert.notEqual err, null
assert.equal data, undefined
done()
it 'should return node data if we have $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, [], (err, data) ->
assert.equal err, null
assert.equal data, nodeData
fulfill true, () ->
bs.get GLOBAL, (err, data) ->
assert.equal err, null
assert.equal data, nodeData
done()
it 'should return json if we have $data(node)=10', (done) ->
fulfill false, () ->
bs.get GLOBAL, [], (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
fulfill false, () ->
bs.get GLOBAL, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
it 'should return json if we have forceJSON flag and $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, [], true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
fulfill true, () ->
bs.get GLOBAL, [], true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
it '(with subscripts) should return node data if we have $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, subscript, (err, data) ->
assert.equal err, null
assert.equal data, nodeData
done()
, subscript
it '(with subscripts) should return json if we have $data(node)=10', (done) ->
fulfill false, () ->
bs.get GLOBAL, subscript, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
, subscript
it '(with subscripts) should return json if we have forceJSON flag and $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, subscript, true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
, subscript | true | ###
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
###global describe, beforeEach, afterEach, it###
'use strict'
assert = require 'assert'
boobst = require '../boobst'
BoobstSocket = boobst.BoobstSocket
GLOBAL = '^testObject';
describe 'get', () ->
this.timeout 1000
bs = new BoobstSocket(require './test.config')
# bs.on('debug', console.log); # uncomment for debug messages
beforeEach (done) ->
bs.connect (err) ->
throw err if err
bs.kill GLOBAL, (err) ->
throw err if err
done()
afterEach (done) ->
bs.kill GLOBAL, (err) ->
throw err if err
bs.disconnect () ->
done()
describe '#get', () ->
object = {
"array": ["a", "ab", "a\"bc"]
"object":
"a": "a"
"b": 2
"boolean": true
"number": 42
"quotes": 'some"thing'
}
subscript = ['a', 'b']
nodeData = 'node data'
fulfill = (nd, callback, sub) ->
bs.set GLOBAL, sub or [], object, (err) ->
assert.equal err, null
if nd
bs.set GLOBAL, sub or [], nodeData, (err) ->
assert.equal err, null
callback()
else
callback()
it 'sould return error if we don\'t have data in global', (done) ->
bs.get GLOBAL, [], (err, data) ->
assert.notEqual err, null
assert.equal data, undefined
done()
it 'should return node data if we have $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, [], (err, data) ->
assert.equal err, null
assert.equal data, nodeData
fulfill true, () ->
bs.get GLOBAL, (err, data) ->
assert.equal err, null
assert.equal data, nodeData
done()
it 'should return json if we have $data(node)=10', (done) ->
fulfill false, () ->
bs.get GLOBAL, [], (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
fulfill false, () ->
bs.get GLOBAL, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
it 'should return json if we have forceJSON flag and $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, [], true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
fulfill true, () ->
bs.get GLOBAL, [], true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
it '(with subscripts) should return node data if we have $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, subscript, (err, data) ->
assert.equal err, null
assert.equal data, nodeData
done()
, subscript
it '(with subscripts) should return json if we have $data(node)=10', (done) ->
fulfill false, () ->
bs.get GLOBAL, subscript, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
, subscript
it '(with subscripts) should return json if we have forceJSON flag and $data(node)=11', (done) ->
fulfill true, () ->
bs.get GLOBAL, subscript, true, (err, data) ->
assert.equal err, null
assert.deepEqual JSON.parse(data), object
done()
, subscript |
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9993944764137268,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/pummel/test-net-pause.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
net = require("net")
N = 200
recv = ""
chars_recved = 0
server = net.createServer((connection) ->
write = (j) ->
if j >= N
connection.end()
return
setTimeout (->
connection.write "C"
write j + 1
return
), 10
return
write 0
return
)
server.on "listening", ->
client = net.createConnection(common.PORT)
client.setEncoding "ascii"
client.on "data", (d) ->
common.print d
recv += d
return
setTimeout (->
chars_recved = recv.length
console.log "pause at: " + chars_recved
assert.equal true, chars_recved > 1
client.pause()
setTimeout (->
console.log "resume at: " + chars_recved
assert.equal chars_recved, recv.length
client.resume()
setTimeout (->
chars_recved = recv.length
console.log "pause at: " + chars_recved
client.pause()
setTimeout (->
console.log "resume at: " + chars_recved
assert.equal chars_recved, recv.length
client.resume()
return
), 500
return
), 500
return
), 500
return
), 500
client.on "end", ->
server.close()
client.end()
return
return
server.listen common.PORT
process.on "exit", ->
assert.equal N, recv.length
common.debug "Exit"
return
| 107113 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
net = require("net")
N = 200
recv = ""
chars_recved = 0
server = net.createServer((connection) ->
write = (j) ->
if j >= N
connection.end()
return
setTimeout (->
connection.write "C"
write j + 1
return
), 10
return
write 0
return
)
server.on "listening", ->
client = net.createConnection(common.PORT)
client.setEncoding "ascii"
client.on "data", (d) ->
common.print d
recv += d
return
setTimeout (->
chars_recved = recv.length
console.log "pause at: " + chars_recved
assert.equal true, chars_recved > 1
client.pause()
setTimeout (->
console.log "resume at: " + chars_recved
assert.equal chars_recved, recv.length
client.resume()
setTimeout (->
chars_recved = recv.length
console.log "pause at: " + chars_recved
client.pause()
setTimeout (->
console.log "resume at: " + chars_recved
assert.equal chars_recved, recv.length
client.resume()
return
), 500
return
), 500
return
), 500
return
), 500
client.on "end", ->
server.close()
client.end()
return
return
server.listen common.PORT
process.on "exit", ->
assert.equal N, recv.length
common.debug "Exit"
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
net = require("net")
N = 200
recv = ""
chars_recved = 0
server = net.createServer((connection) ->
write = (j) ->
if j >= N
connection.end()
return
setTimeout (->
connection.write "C"
write j + 1
return
), 10
return
write 0
return
)
server.on "listening", ->
client = net.createConnection(common.PORT)
client.setEncoding "ascii"
client.on "data", (d) ->
common.print d
recv += d
return
setTimeout (->
chars_recved = recv.length
console.log "pause at: " + chars_recved
assert.equal true, chars_recved > 1
client.pause()
setTimeout (->
console.log "resume at: " + chars_recved
assert.equal chars_recved, recv.length
client.resume()
setTimeout (->
chars_recved = recv.length
console.log "pause at: " + chars_recved
client.pause()
setTimeout (->
console.log "resume at: " + chars_recved
assert.equal chars_recved, recv.length
client.resume()
return
), 500
return
), 500
return
), 500
return
), 500
client.on "end", ->
server.close()
client.end()
return
return
server.listen common.PORT
process.on "exit", ->
assert.equal N, recv.length
common.debug "Exit"
return
|
[
{
"context": " url:[]\n id:[]\n locale:'es-mx'\n name:\"Boilerplate\"\n userInfo:null\n\n constructor:(@callback=nu",
"end": 109,
"score": 0.9527460932731628,
"start": 98,
"tag": "NAME",
"value": "Boilerplate"
},
{
"context": "son\"\n data:\n ... | src/Data.coffee | amorino/bones | 0 | Requester = require "lib/requester"
class Data
url:[]
id:[]
locale:'es-mx'
name:"Boilerplate"
userInfo:null
constructor:(@callback=null)->
@init()
# @getUserInfo()
init:=>
if App.LIVE
@url.endPoint = ""
@id.FB = ""
@id.GA = ""
else
@url.endPoint = ""
@id.FB = ""
@id.GA = ""
# @getUserInfo()
@callback?()
null
getUserInfo:=>
Requester.request
type: 'POST'
url: @url.endPoint + "/gallery"
dataType: "json"
data:
"email" : "foo@bar.com"
"raceNumber": "12345"
done: (e)=>
@isRequestUserInfo = true
@onRequestDone(e,"userInfo")
fail: (e)=>
@isRequestingSuggestion = false
@onRequestError(e)
null
onRequestDone:(e, type=null) =>
if type is "userInfo"
@userInfo = e
null
onRequestError:()->
console.log "something is wrong"
module.exports = Data
| 55146 | Requester = require "lib/requester"
class Data
url:[]
id:[]
locale:'es-mx'
name:"<NAME>"
userInfo:null
constructor:(@callback=null)->
@init()
# @getUserInfo()
init:=>
if App.LIVE
@url.endPoint = ""
@id.FB = ""
@id.GA = ""
else
@url.endPoint = ""
@id.FB = ""
@id.GA = ""
# @getUserInfo()
@callback?()
null
getUserInfo:=>
Requester.request
type: 'POST'
url: @url.endPoint + "/gallery"
dataType: "json"
data:
"email" : "<EMAIL>"
"raceNumber": "12345"
done: (e)=>
@isRequestUserInfo = true
@onRequestDone(e,"userInfo")
fail: (e)=>
@isRequestingSuggestion = false
@onRequestError(e)
null
onRequestDone:(e, type=null) =>
if type is "userInfo"
@userInfo = e
null
onRequestError:()->
console.log "something is wrong"
module.exports = Data
| true | Requester = require "lib/requester"
class Data
url:[]
id:[]
locale:'es-mx'
name:"PI:NAME:<NAME>END_PI"
userInfo:null
constructor:(@callback=null)->
@init()
# @getUserInfo()
init:=>
if App.LIVE
@url.endPoint = ""
@id.FB = ""
@id.GA = ""
else
@url.endPoint = ""
@id.FB = ""
@id.GA = ""
# @getUserInfo()
@callback?()
null
getUserInfo:=>
Requester.request
type: 'POST'
url: @url.endPoint + "/gallery"
dataType: "json"
data:
"email" : "PI:EMAIL:<EMAIL>END_PI"
"raceNumber": "12345"
done: (e)=>
@isRequestUserInfo = true
@onRequestDone(e,"userInfo")
fail: (e)=>
@isRequestingSuggestion = false
@onRequestError(e)
null
onRequestDone:(e, type=null) =>
if type is "userInfo"
@userInfo = e
null
onRequestError:()->
console.log "something is wrong"
module.exports = Data
|
[
{
"context": "\n email: $scope.createEmail\n password: $scope.createPassword\n",
"end": 458,
"score": 0.9505672454833984,
"start": 438,
"tag": "PASSWORD",
"value": "scope.createPassword"
}
] | www/client/user/controllers/login.coffee | paulmolin42/online-swiss-knife | 0 | angular.module 'online-swiss-knife.user'
.controller 'login', ($scope, $state, ImprovedUser) ->
$scope.login = ->
ImprovedUser.login
email: $scope.loginEmail
password: $scope.loginPassword
.$promise
.then () ->
$state.go 'home'
$scope.createUser = ->
ImprovedUser.create
firstName: $scope.createFirstName
lastName: $scope.createLastName
email: $scope.createEmail
password: $scope.createPassword
| 116259 | angular.module 'online-swiss-knife.user'
.controller 'login', ($scope, $state, ImprovedUser) ->
$scope.login = ->
ImprovedUser.login
email: $scope.loginEmail
password: $scope.loginPassword
.$promise
.then () ->
$state.go 'home'
$scope.createUser = ->
ImprovedUser.create
firstName: $scope.createFirstName
lastName: $scope.createLastName
email: $scope.createEmail
password: $<PASSWORD>
| true | angular.module 'online-swiss-knife.user'
.controller 'login', ($scope, $state, ImprovedUser) ->
$scope.login = ->
ImprovedUser.login
email: $scope.loginEmail
password: $scope.loginPassword
.$promise
.then () ->
$state.go 'home'
$scope.createUser = ->
ImprovedUser.create
firstName: $scope.createFirstName
lastName: $scope.createLastName
email: $scope.createEmail
password: $PI:PASSWORD:<PASSWORD>END_PI
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9993476271629333,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-net-server-max-connections.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# This test creates 200 connections to a server and sets the server's
# maxConnections property to 100. The first 100 connections make it through
# and the last 100 connections are rejected.
# TODO: test that the server can accept more connections after it reaches
# its maximum and some are closed.
makeConnection = (index) ->
c = net.createConnection(common.PORT)
gotData = false
c.on "connect", ->
makeConnection index + 1 if index + 1 < N
return
c.on "end", ->
c.end()
return
c.on "data", (b) ->
gotData = true
assert.ok 0 < b.length
return
c.on "error", (e) ->
console.error "error %d: %s", index, e
return
c.on "close", ->
console.error "closed %d", index
closes++
assert.ok server.maxConnections <= index, index + " was one of the first closed connections " + "but shouldnt have been" if closes < N / 2
if closes is N / 2
cb = undefined
console.error "calling wait callback."
cb() while cb = waits.shift()
server.close()
if index < server.maxConnections
assert.equal true, gotData, index + " didn't get data, but should have"
else
assert.equal false, gotData, index + " got data, but shouldn't have"
return
return
common = require("../common")
assert = require("assert")
net = require("net")
N = 200
count = 0
closes = 0
waits = []
server = net.createServer((connection) ->
console.error "connect %d", count++
connection.write "hello"
waits.push ->
connection.end()
return
return
)
server.listen common.PORT, ->
makeConnection 0
return
server.maxConnections = N / 2
console.error "server.maxConnections = %d", server.maxConnections
process.on "exit", ->
assert.equal N, closes
return
| 47760 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# This test creates 200 connections to a server and sets the server's
# maxConnections property to 100. The first 100 connections make it through
# and the last 100 connections are rejected.
# TODO: test that the server can accept more connections after it reaches
# its maximum and some are closed.
makeConnection = (index) ->
c = net.createConnection(common.PORT)
gotData = false
c.on "connect", ->
makeConnection index + 1 if index + 1 < N
return
c.on "end", ->
c.end()
return
c.on "data", (b) ->
gotData = true
assert.ok 0 < b.length
return
c.on "error", (e) ->
console.error "error %d: %s", index, e
return
c.on "close", ->
console.error "closed %d", index
closes++
assert.ok server.maxConnections <= index, index + " was one of the first closed connections " + "but shouldnt have been" if closes < N / 2
if closes is N / 2
cb = undefined
console.error "calling wait callback."
cb() while cb = waits.shift()
server.close()
if index < server.maxConnections
assert.equal true, gotData, index + " didn't get data, but should have"
else
assert.equal false, gotData, index + " got data, but shouldn't have"
return
return
common = require("../common")
assert = require("assert")
net = require("net")
N = 200
count = 0
closes = 0
waits = []
server = net.createServer((connection) ->
console.error "connect %d", count++
connection.write "hello"
waits.push ->
connection.end()
return
return
)
server.listen common.PORT, ->
makeConnection 0
return
server.maxConnections = N / 2
console.error "server.maxConnections = %d", server.maxConnections
process.on "exit", ->
assert.equal N, closes
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# This test creates 200 connections to a server and sets the server's
# maxConnections property to 100. The first 100 connections make it through
# and the last 100 connections are rejected.
# TODO: test that the server can accept more connections after it reaches
# its maximum and some are closed.
makeConnection = (index) ->
c = net.createConnection(common.PORT)
gotData = false
c.on "connect", ->
makeConnection index + 1 if index + 1 < N
return
c.on "end", ->
c.end()
return
c.on "data", (b) ->
gotData = true
assert.ok 0 < b.length
return
c.on "error", (e) ->
console.error "error %d: %s", index, e
return
c.on "close", ->
console.error "closed %d", index
closes++
assert.ok server.maxConnections <= index, index + " was one of the first closed connections " + "but shouldnt have been" if closes < N / 2
if closes is N / 2
cb = undefined
console.error "calling wait callback."
cb() while cb = waits.shift()
server.close()
if index < server.maxConnections
assert.equal true, gotData, index + " didn't get data, but should have"
else
assert.equal false, gotData, index + " got data, but shouldn't have"
return
return
common = require("../common")
assert = require("assert")
net = require("net")
N = 200
count = 0
closes = 0
waits = []
server = net.createServer((connection) ->
console.error "connect %d", count++
connection.write "hello"
waits.push ->
connection.end()
return
return
)
server.listen common.PORT, ->
makeConnection 0
return
server.maxConnections = N / 2
console.error "server.maxConnections = %d", server.maxConnections
process.on "exit", ->
assert.equal N, closes
return
|
[
{
"context": "###\n# @author Jinzulen\n# @license Apache 2.0\n# @copyright Copyright 2020",
"end": 22,
"score": 0.9998749494552612,
"start": 14,
"tag": "NAME",
"value": "Jinzulen"
},
{
"context": "\n# @license Apache 2.0\n# @copyright Copyright 2020 Khalil G. <https://github.com/Jinz... | src/Methods/Emotes/Author.coffee | DiaxManPl/DEmojiJS | 24 | ###
# @author Jinzulen
# @license Apache 2.0
# @copyright Copyright 2020 Khalil G. <https://github.com/Jinzulen>
###
API = require "../../API/EmojiAPI"
module.exports = class GrabAuthor
constructor: (Author, GIF, Callback) ->
return new Promise (Resolve, Reject) ->
if !Author then Reject "# [DEmojiJS] Please enter an emote uplaoder to continue."
if typeof Author != "string"
Reject "# [DEmojiJS] Emote author has to be a string."
API.contactAPI "https://emoji.gg/api/", (Error, Data) ->
if Error then Reject Error
Store = []
if GIF then Type = ".gif" else Type = "."
for e in Data
e["submitted_by"] = e["submitted_by"].toLowerCase()
if e["submitted_by"] == Author.toLowerCase() && e["image"].includes Type
Store.push e
Resolve Store | 114646 | ###
# @author <NAME>
# @license Apache 2.0
# @copyright Copyright 2020 <NAME>. <https://github.com/Jinzulen>
###
API = require "../../API/EmojiAPI"
module.exports = class GrabAuthor
constructor: (Author, GIF, Callback) ->
return new Promise (Resolve, Reject) ->
if !Author then Reject "# [DEmojiJS] Please enter an emote uplaoder to continue."
if typeof Author != "string"
Reject "# [DEmojiJS] Emote author has to be a string."
API.contactAPI "https://emoji.gg/api/", (Error, Data) ->
if Error then Reject Error
Store = []
if GIF then Type = ".gif" else Type = "."
for e in Data
e["submitted_by"] = e["submitted_by"].toLowerCase()
if e["submitted_by"] == Author.toLowerCase() && e["image"].includes Type
Store.push e
Resolve Store | true | ###
# @author PI:NAME:<NAME>END_PI
# @license Apache 2.0
# @copyright Copyright 2020 PI:NAME:<NAME>END_PI. <https://github.com/Jinzulen>
###
API = require "../../API/EmojiAPI"
module.exports = class GrabAuthor
constructor: (Author, GIF, Callback) ->
return new Promise (Resolve, Reject) ->
if !Author then Reject "# [DEmojiJS] Please enter an emote uplaoder to continue."
if typeof Author != "string"
Reject "# [DEmojiJS] Emote author has to be a string."
API.contactAPI "https://emoji.gg/api/", (Error, Data) ->
if Error then Reject Error
Store = []
if GIF then Type = ".gif" else Type = "."
for e in Data
e["submitted_by"] = e["submitted_by"].toLowerCase()
if e["submitted_by"] == Author.toLowerCase() && e["image"].includes Type
Store.push e
Resolve Store |
[
{
"context": "../src/index\"\n\ndescribe \"Mailer\", ->\n emailTo = 'toto@toto.com'\n emailFrom = 'no-reply@toto.com'\n locale = 'en",
"end": 186,
"score": 0.9999133348464966,
"start": 173,
"tag": "EMAIL",
"value": "toto@toto.com"
},
{
"context": "r\", ->\n emailTo = 'toto@toto.c... | test/unit/index.coffee | daemon1981/node-service-mailer | 0 | assert = require 'assert'
should = require 'should'
sinon = require 'sinon'
Mailer = require "../../src/index"
describe "Mailer", ->
emailTo = 'toto@toto.com'
emailFrom = 'no-reply@toto.com'
locale = 'en'
mailer = {}
createMailer = (templateRootDir) ->
mailer = new Mailer("Sendmail", templateRootDir)
mailer.doSendMail = sinon.stub(mailer, 'doSendMail', (mailOptions, callback) -> return callback(null, {}))
return mailer
checkSendMailArgs = (args, textVariables) ->
objectData = args[0][0]
assert.equal emailFrom, objectData.from
assert.equal emailTo, objectData.to
for variable in textVariables
assert new RegExp(variable).test(objectData.text), variable + ' should be in text body'
for variable in textVariables
assert new RegExp(variable).test(objectData.html), variable + ' should be in html body'
describe "getTemplatesDir", ->
it "should get write path if last slash is not present", () ->
templateRootDir = require('path').join(__dirname, './templates')
mailer = createMailer templateRootDir
assert.equal(mailer.getTemplatesDir(locale), require('path').join(__dirname, './templates') + '/' + locale)
it "should get write path if last slash is present", () ->
templateRootDir = require('path').join(__dirname, './templates/')
mailer = createMailer templateRootDir
assert.equal(mailer.getTemplatesDir(locale), require('path').join(__dirname, './templates') + '/' + locale)
describe "sendMail", ->
it "should send email and insert email body data", (done) ->
templateRootDir = require('path').join(__dirname, './templates/')
mailer = createMailer templateRootDir
bodyData =
name: 'dummy name'
url: 'http://dummy-url'
mailer.sendMail locale, "testTemplateName", 'dummy-subject', emailFrom, emailTo, bodyData, (err, response) ->
should.not.exists(err)
assert(mailer.doSendMail.called)
checkSendMailArgs(mailer.doSendMail.args, bodyData)
done()
| 65382 | assert = require 'assert'
should = require 'should'
sinon = require 'sinon'
Mailer = require "../../src/index"
describe "Mailer", ->
emailTo = '<EMAIL>'
emailFrom = '<EMAIL>'
locale = 'en'
mailer = {}
createMailer = (templateRootDir) ->
mailer = new Mailer("Sendmail", templateRootDir)
mailer.doSendMail = sinon.stub(mailer, 'doSendMail', (mailOptions, callback) -> return callback(null, {}))
return mailer
checkSendMailArgs = (args, textVariables) ->
objectData = args[0][0]
assert.equal emailFrom, objectData.from
assert.equal emailTo, objectData.to
for variable in textVariables
assert new RegExp(variable).test(objectData.text), variable + ' should be in text body'
for variable in textVariables
assert new RegExp(variable).test(objectData.html), variable + ' should be in html body'
describe "getTemplatesDir", ->
it "should get write path if last slash is not present", () ->
templateRootDir = require('path').join(__dirname, './templates')
mailer = createMailer templateRootDir
assert.equal(mailer.getTemplatesDir(locale), require('path').join(__dirname, './templates') + '/' + locale)
it "should get write path if last slash is present", () ->
templateRootDir = require('path').join(__dirname, './templates/')
mailer = createMailer templateRootDir
assert.equal(mailer.getTemplatesDir(locale), require('path').join(__dirname, './templates') + '/' + locale)
describe "sendMail", ->
it "should send email and insert email body data", (done) ->
templateRootDir = require('path').join(__dirname, './templates/')
mailer = createMailer templateRootDir
bodyData =
name: '<NAME>'
url: 'http://dummy-url'
mailer.sendMail locale, "testTemplateName", 'dummy-subject', emailFrom, emailTo, bodyData, (err, response) ->
should.not.exists(err)
assert(mailer.doSendMail.called)
checkSendMailArgs(mailer.doSendMail.args, bodyData)
done()
| true | assert = require 'assert'
should = require 'should'
sinon = require 'sinon'
Mailer = require "../../src/index"
describe "Mailer", ->
emailTo = 'PI:EMAIL:<EMAIL>END_PI'
emailFrom = 'PI:EMAIL:<EMAIL>END_PI'
locale = 'en'
mailer = {}
createMailer = (templateRootDir) ->
mailer = new Mailer("Sendmail", templateRootDir)
mailer.doSendMail = sinon.stub(mailer, 'doSendMail', (mailOptions, callback) -> return callback(null, {}))
return mailer
checkSendMailArgs = (args, textVariables) ->
objectData = args[0][0]
assert.equal emailFrom, objectData.from
assert.equal emailTo, objectData.to
for variable in textVariables
assert new RegExp(variable).test(objectData.text), variable + ' should be in text body'
for variable in textVariables
assert new RegExp(variable).test(objectData.html), variable + ' should be in html body'
describe "getTemplatesDir", ->
it "should get write path if last slash is not present", () ->
templateRootDir = require('path').join(__dirname, './templates')
mailer = createMailer templateRootDir
assert.equal(mailer.getTemplatesDir(locale), require('path').join(__dirname, './templates') + '/' + locale)
it "should get write path if last slash is present", () ->
templateRootDir = require('path').join(__dirname, './templates/')
mailer = createMailer templateRootDir
assert.equal(mailer.getTemplatesDir(locale), require('path').join(__dirname, './templates') + '/' + locale)
describe "sendMail", ->
it "should send email and insert email body data", (done) ->
templateRootDir = require('path').join(__dirname, './templates/')
mailer = createMailer templateRootDir
bodyData =
name: 'PI:NAME:<NAME>END_PI'
url: 'http://dummy-url'
mailer.sendMail locale, "testTemplateName", 'dummy-subject', emailFrom, emailTo, bodyData, (err, response) ->
should.not.exists(err)
assert(mailer.doSendMail.called)
checkSendMailArgs(mailer.doSendMail.args, bodyData)
done()
|
[
{
"context": ",\n \"status\": 200\n \"data\":\n \"login\": \"ntkme\",\n \"id\": 899645,\n \"avatar_url\": \"https:",
"end": 12680,
"score": 0.9989709854125977,
"start": 12675,
"tag": "USERNAME",
"value": "ntkme"
},
{
"context": "\": \"\",\n \"url\": \"http... | test/browser/src/core.coffee | DimensionDataResearch/buttons | 0 | describe 'Element', ->
describe '#constructor()', ->
it 'should use element when element is given', ->
element = document.createElement "a"
expect new Element(element).$
.to.equal element
it 'should create new element when tag name is given', ->
expect new Element("i").$.nodeType
.to.equal 1
it 'should callback with this', ->
_this = null
_ = new Element "em", (element) -> _this = @
expect _this
.to.equal _
it 'should callback with argument element', (done) ->
b = document.createElement "b"
new Element b, (element) ->
expect element
.to.equal b
done()
describe '#on()', ->
input = null
beforeEach ->
input = new Element "input", (element) -> document.body.appendChild element
afterEach ->
document.body.removeChild input.$
it 'should call the function on single event type', ->
spy = sinon.spy()
input.on "click", spy
input.$.click()
expect spy
.to.have.been.calledOnce
input.$.click()
expect spy
.to.have.been.calledTwice
it 'should call the function on multiple event types', ->
spy = sinon.spy()
input.on "focus", "blur", "click", spy
input.$.focus()
expect spy
.to.have.been.calledOnce
input.$.blur()
expect spy
.to.have.been.calledTwice
input.$.click()
expect spy
.to.have.been.calledThrice
it 'should call the function with this', (done) ->
a = document.createElement "a"
_this = new Element a
_this.on "click", ->
expect @
.to.equal _this
done()
a.click()
it 'should call the function with event', (done) ->
b = document.createElement "b"
new Element b
.on "click", (event) ->
expect event.type
.to.equal "click"
done()
b.click()
describe '#once()', ->
input = null
beforeEach ->
input = new Element "input", (element) -> document.body.appendChild element
afterEach ->
document.body.removeChild input.$
it 'should call the function on single event type only once', ->
spy = sinon.spy()
input.once "click", spy
input.$.click()
expect spy
.to.have.been.calledOnce
input.$.click()
input.$.click()
expect spy
.to.have.been.calledOnce
it 'should call the function on multiple event types only once', ->
spy = sinon.spy()
input.once "focus", "blur", spy
input.$.focus()
expect spy
.to.have.been.calledOnce
input.$.blur()
input.$.focus()
expect spy
.to.have.been.calledOnce
it 'should call the function with this', (done) ->
a = document.createElement "a"
_this = new Element a
_this.once "click", ->
expect @
.to.equal _this
done()
a.click()
it 'should call the function with event', (done) ->
b = document.createElement "b"
new Element b
.once "click", (event) ->
expect event.type
.to.equal "click"
done()
b.click()
describe '#addClass()', ->
it 'should add class to element', ->
element = document.createElement "a"
element.className = "hello"
a = new Element element
a.addClass "world"
expect a.$.className
.to.equal "hello world"
a.addClass "world"
expect a.$.className
.to.equal "hello world"
describe '#removeClass()', ->
it 'should remove class from element', ->
element = document.createElement "a"
element.className = "hello world"
a = new Element element
a.removeClass "hello"
expect a.$.className
.to.equal "world"
a.removeClass "hello"
expect a.$.className
.to.equal "world"
describe '#hasClass()', ->
it 'should return whether element has class', ->
element = document.createElement "a"
element.className = "world"
a = new Element element
expect a.hasClass "hello"
.to.be.false
expect a.hasClass "world"
.to.be.true
describe 'Frame', ->
frame = null
html = \
"""
<!DOCTYPE html>
<html lang="ja">
<head>
<meta charset="utf-8">
<title></title>
</head>
<body style="margin: 0;">
<div style="width: 200.5px; height: 100px;"></div>
</body>
</html>
"""
beforeEach ->
frame = new Frame (iframe) -> document.body.appendChild iframe
afterEach ->
document.body.removeChild frame.$
describe '#constructor()', ->
it 'should callback with the new iframe', ->
expect frame.$.nodeType
.to.equal 1
expect frame.$.tagName
.to.equal "IFRAME"
describe '#html()', ->
it 'should write html when iframe is in same-origin', (done) ->
frame.on "load", ->
expect frame.$.contentWindow.document.documentElement.getAttribute "lang"
.to.equal "ja"
done()
frame.html html
describe '#load()', ->
it 'should load the src url', ->
frame.load "../../buttons.html"
expect frame.$.src
.to.match /buttons\.html$/
describe '#size()', ->
it 'should return the iframe content size', (done) ->
frame.on "load", ->
switch window.devicePixelRatio
when 2
expect @size()
.to.deep.equal
width: "200.5px"
height: "100px"
when 3
expect @size()
.to.deep.equal
width: "201px"
height: "100px"
done()
frame.html html
describe '#resize()', ->
it 'should resize the iframe', (done) ->
frame.resize
width: "20px"
height: "10px"
expect frame.$.style.width
.to.equal "20px"
expect frame.$.style.height
.to.equal "10px"
done()
describe 'ButtonAnchor', ->
a = null
beforeEach ->
a = document.createElement "a"
describe '.parse()', ->
it 'should parse the anchor without attribute', ->
expect ButtonAnchor.parse a
.to.deep.equal
href: ""
text: ""
data:
count:
api: ""
href: ""
aria:
label: ""
style: ""
icon: ""
aria:
label: ""
it 'should parse the attribute href', ->
a.href = "https://buttons.github.io/"
expect ButtonAnchor.parse a
.to.have.property "href"
.and.equal a.href
it 'should parse the attribute data-text', ->
text = "test"
a.setAttribute "data-text", text
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should parse the text content', ->
text = "something"
a.appendChild document.createTextNode text
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should ignore the text content when the attribute data-text is given', ->
text = "something"
a.setAttribute "data-text", text
a.appendChild document.createTextNode "something else"
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should parse the attribute data-count-api', ->
api = "/repos/:user/:repo#item"
a.setAttribute "data-count-api", api
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.api"
.and.equal api
it 'should parse the attribute data-count-href', ->
href = "https://github.com/"
a.setAttribute "data-count-href", href
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.href"
.and.equal href
it 'should fallback data.cout.href to the attribute href when the attribute data-count-href is not given', ->
a.href = "https://github.com/"
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.href"
.and.equal a.href
it 'should parse the attribute data-style', ->
style = "mega"
a.setAttribute "data-style", style
expect ButtonAnchor.parse a
.to.have.deep.property "data.style"
.and.equal style
it 'should parse the attribute data-icon', ->
icon = "octicon"
a.setAttribute "data-icon", icon
expect ButtonAnchor.parse a
.to.have.deep.property "data.icon"
.and.equal icon
describe 'ButtonFrame', ->
describe '#constructor()', ->
hash = Hash.encode ButtonAnchor.parse document.createElement "a"
it 'should callback with this twice', (done) ->
_this = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
_this = @
, (iframe) ->
expect _this
.to.equal @
done()
it 'should callback with the iframe as argument twice', (done) ->
frame = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
frame = iframe
expect iframe.tagName
.to.equal "IFRAME"
, (iframe) ->
expect iframe
.to.equal frame
done()
it 'should load the iframe twice after insert it into DOM', (done) ->
spy = sinon.spy()
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
@on "load", -> spy()
, (iframe) ->
@once "load", ->
expect spy
.to.have.been.calledTwice
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
it 'should load the iframe the first time by writing html', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
, (iframe) ->
expect @html
.to.have.been.calledOnce
@html.restore()
done()
it 'should set document.location.hash when load the first time by writing html', (done) ->
_hash = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
@once "load", ->
_hash = iframe.contentWindow.document.location.hash
, (iframe) ->
expect _hash
.to.equal hash
done()
it 'should load the iframe the second time by setting the src attribute', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
sinon.spy @, "load"
, (iframe) ->
expect @load
.to.have.been.calledOnce
expect @load
.to.have.been.calledAfter @html
@html.restore()
@load.restore()
done()
it 'should set document.location.href when load the second time by setting the src attribute', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
, (iframe) ->
@once "load", ->
expect iframe.contentWindow.document.location.hash
.to.equal hash
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
it 'should resize the iframe after the second load', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
sinon.spy @, "load"
sinon.spy @, "size"
sinon.spy @, "resize"
, (iframe) ->
expect @size
.to.have.been.calledOnce
expect @size
.to.have.been.calledAfter @html
@once "load", ->
expect @resize
.to.have.been.calledOnce
expect @resize
.to.have.been.calledAfter @load
expect @resize.args[0][0]
.to.deep.equal @size.returnValues[0]
expect iframe.style.width
.to.equal @size.returnValues[0].width
expect iframe.style.height
.to.equal @size.returnValues[0].height
@html.restore()
@load.restore()
@size.restore()
@resize.restore()
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
describe 'ButtonFrameContent', ->
head = document.getElementsByTagName("head")[0]
base = null
bodyClassName = null
data =
"meta":
"X-RateLimit-Limit": "60",
"X-RateLimit-Remaining": "59",
"X-RateLimit-Reset": "1423391706",
"Cache-Control": "public, max-age=60, s-maxage=60",
"Last-Modified": "Sun, 08 Feb 2015 07:39:11 GMT",
"Vary": "Accept",
"X-GitHub-Media-Type": "github.v3",
"status": 200
"data":
"login": "ntkme",
"id": 899645,
"avatar_url": "https://avatars.githubusercontent.com/u/899645?v=3",
"gravatar_id": "",
"url": "https://api.github.com/users/ntkme",
"html_url": "https://github.com/ntkme",
"followers_url": "https://api.github.com/users/ntkme/followers",
"following_url": "https://api.github.com/users/ntkme/following{/other_user}",
"gists_url": "https://api.github.com/users/ntkme/gists{/gist_id}",
"starred_url": "https://api.github.com/users/ntkme/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ntkme/subscriptions",
"organizations_url": "https://api.github.com/users/ntkme/orgs",
"repos_url": "https://api.github.com/users/ntkme/repos",
"events_url": "https://api.github.com/users/ntkme/events{/privacy}",
"received_events_url": "https://api.github.com/users/ntkme/received_events",
"type": "User",
"site_admin": false,
"name": "なつき",
"company": "",
"blog": "https://ntk.me",
"location": "California",
"email": "i@ntk.me",
"hireable": true,
"bio": null,
"public_repos": 10,
"public_gists": 0,
"followers": 26,
"following": 0,
"created_at": "2011-07-07T03:26:58Z",
"updated_at": "2015-02-08T07:39:11Z"
javascript_protocals = [
"javascript:"
"JAVASCRIPT:"
"JavaScript:"
" javascript:"
" javascript:"
"\tjavascript:"
"\njavascript:"
"\rjavascript:"
"\fjavascript:"
]
beforeEach ->
bodyClassName= document.body.getAttribute "class"
base = document.getElementsByTagName("base")[0]
sinon.stub document.body, "appendChild"
afterEach ->
if bodyClassName
document.body.className = bodyClassName
else
document.body.removeAttribute "class"
document.body.appendChild.restore()
describe '#constructor()', ->
it 'should do nothing when options are missing', ->
new ButtonFrameContent()
expect base.getAttribute "href"
.to.be.null
expect document.body.appendChild
.to.have.not.been.called
it 'should not set base.href', ->
options =
href: "https://github.com/"
data: {}
aria: {}
new ButtonFrameContent options
expect base.getAttribute "href"
.to.be.null
it 'should set document.body.className when a style is given', ->
options =
data: style: "mega"
aria: {}
new ButtonFrameContent options
expect document.body.className
.to.equal options.data.style
it 'should append the button to document.body when the necessary options are given', ->
options =
data: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledOnce
button = document.body.appendChild.args[0][0]
expect button
.to.have.property "className"
.and.equal "button"
it 'should append the button with given href', ->
options =
href: "https://ntkme.github.com/"
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.getAttribute "href"
.to.equal options.href
it 'should filter javascript in the href', ->
for href, i in javascript_protocals
options =
href: href
data: count: href: href
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[i][0]
if button.protocol
expect button.protocol
.to.not.equal "javascript:"
else
expect button.href
.to.not.match /^javascript:/i
it 'should append the button with the default icon', ->
options =
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect " #{button.firstChild.className} ".indexOf " #{CONFIG_ICON_DEFAULT} "
.to.be.at.least 0
it 'should append the button with given icon', ->
options =
data: icon: "octicon-star"
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect " #{button.firstChild.className} ".indexOf " #{options.data.icon} "
.to.be.at.least 0
it 'should append the button with given text', ->
options =
text: "Follow"
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.lastChild.innerHTML
.to.equal options.text
it 'should append the button with given aria label', ->
options =
data: {}
aria: label: "GitHub"
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.getAttribute "aria-label"
.to.equal options.aria.label
it 'should append the count to document.body when the necessary options are given', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledTwice
count = document.body.appendChild.args[1][0]
expect count
.to.have.property "className"
.and.equal "count"
head.appendChild.restore()
it 'should append the count with given data.count.href', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
href: "https://gist.github.com/"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.getAttribute "href"
.to.equal options.data.count.href
head.appendChild.restore()
it 'should append the count with #entry from api response', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "26"
head.appendChild.restore()
it 'should append the count with #entry from api response by prepending missing / to api', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "26"
head.appendChild.restore()
it 'should append the count with large number split by comma', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#id"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "899,645"
head.appendChild.restore()
it 'should append the count with given aria label', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: label: "# followers on GitHub"
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.getAttribute "aria-label"
.to.equal "26 followers on GitHub"
head.appendChild.restore()
it 'should append the count with text undefined when missing # in api', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "undefined"
head.appendChild.restore()
it 'should append the count with text undefined when api #entry does not exist', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#fail"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "undefined"
head.appendChild.restore()
it 'should not append the count when it fails to pull api data', ->
sinon.stub head, "appendChild", -> window.callback meta: status: 404
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledOnce
button = document.body.appendChild.args[0][0]
expect button
.to.have.property "className"
.and.equal "button"
head.appendChild.restore()
| 220896 | describe 'Element', ->
describe '#constructor()', ->
it 'should use element when element is given', ->
element = document.createElement "a"
expect new Element(element).$
.to.equal element
it 'should create new element when tag name is given', ->
expect new Element("i").$.nodeType
.to.equal 1
it 'should callback with this', ->
_this = null
_ = new Element "em", (element) -> _this = @
expect _this
.to.equal _
it 'should callback with argument element', (done) ->
b = document.createElement "b"
new Element b, (element) ->
expect element
.to.equal b
done()
describe '#on()', ->
input = null
beforeEach ->
input = new Element "input", (element) -> document.body.appendChild element
afterEach ->
document.body.removeChild input.$
it 'should call the function on single event type', ->
spy = sinon.spy()
input.on "click", spy
input.$.click()
expect spy
.to.have.been.calledOnce
input.$.click()
expect spy
.to.have.been.calledTwice
it 'should call the function on multiple event types', ->
spy = sinon.spy()
input.on "focus", "blur", "click", spy
input.$.focus()
expect spy
.to.have.been.calledOnce
input.$.blur()
expect spy
.to.have.been.calledTwice
input.$.click()
expect spy
.to.have.been.calledThrice
it 'should call the function with this', (done) ->
a = document.createElement "a"
_this = new Element a
_this.on "click", ->
expect @
.to.equal _this
done()
a.click()
it 'should call the function with event', (done) ->
b = document.createElement "b"
new Element b
.on "click", (event) ->
expect event.type
.to.equal "click"
done()
b.click()
describe '#once()', ->
input = null
beforeEach ->
input = new Element "input", (element) -> document.body.appendChild element
afterEach ->
document.body.removeChild input.$
it 'should call the function on single event type only once', ->
spy = sinon.spy()
input.once "click", spy
input.$.click()
expect spy
.to.have.been.calledOnce
input.$.click()
input.$.click()
expect spy
.to.have.been.calledOnce
it 'should call the function on multiple event types only once', ->
spy = sinon.spy()
input.once "focus", "blur", spy
input.$.focus()
expect spy
.to.have.been.calledOnce
input.$.blur()
input.$.focus()
expect spy
.to.have.been.calledOnce
it 'should call the function with this', (done) ->
a = document.createElement "a"
_this = new Element a
_this.once "click", ->
expect @
.to.equal _this
done()
a.click()
it 'should call the function with event', (done) ->
b = document.createElement "b"
new Element b
.once "click", (event) ->
expect event.type
.to.equal "click"
done()
b.click()
describe '#addClass()', ->
it 'should add class to element', ->
element = document.createElement "a"
element.className = "hello"
a = new Element element
a.addClass "world"
expect a.$.className
.to.equal "hello world"
a.addClass "world"
expect a.$.className
.to.equal "hello world"
describe '#removeClass()', ->
it 'should remove class from element', ->
element = document.createElement "a"
element.className = "hello world"
a = new Element element
a.removeClass "hello"
expect a.$.className
.to.equal "world"
a.removeClass "hello"
expect a.$.className
.to.equal "world"
describe '#hasClass()', ->
it 'should return whether element has class', ->
element = document.createElement "a"
element.className = "world"
a = new Element element
expect a.hasClass "hello"
.to.be.false
expect a.hasClass "world"
.to.be.true
describe 'Frame', ->
frame = null
html = \
"""
<!DOCTYPE html>
<html lang="ja">
<head>
<meta charset="utf-8">
<title></title>
</head>
<body style="margin: 0;">
<div style="width: 200.5px; height: 100px;"></div>
</body>
</html>
"""
beforeEach ->
frame = new Frame (iframe) -> document.body.appendChild iframe
afterEach ->
document.body.removeChild frame.$
describe '#constructor()', ->
it 'should callback with the new iframe', ->
expect frame.$.nodeType
.to.equal 1
expect frame.$.tagName
.to.equal "IFRAME"
describe '#html()', ->
it 'should write html when iframe is in same-origin', (done) ->
frame.on "load", ->
expect frame.$.contentWindow.document.documentElement.getAttribute "lang"
.to.equal "ja"
done()
frame.html html
describe '#load()', ->
it 'should load the src url', ->
frame.load "../../buttons.html"
expect frame.$.src
.to.match /buttons\.html$/
describe '#size()', ->
it 'should return the iframe content size', (done) ->
frame.on "load", ->
switch window.devicePixelRatio
when 2
expect @size()
.to.deep.equal
width: "200.5px"
height: "100px"
when 3
expect @size()
.to.deep.equal
width: "201px"
height: "100px"
done()
frame.html html
describe '#resize()', ->
it 'should resize the iframe', (done) ->
frame.resize
width: "20px"
height: "10px"
expect frame.$.style.width
.to.equal "20px"
expect frame.$.style.height
.to.equal "10px"
done()
describe 'ButtonAnchor', ->
a = null
beforeEach ->
a = document.createElement "a"
describe '.parse()', ->
it 'should parse the anchor without attribute', ->
expect ButtonAnchor.parse a
.to.deep.equal
href: ""
text: ""
data:
count:
api: ""
href: ""
aria:
label: ""
style: ""
icon: ""
aria:
label: ""
it 'should parse the attribute href', ->
a.href = "https://buttons.github.io/"
expect ButtonAnchor.parse a
.to.have.property "href"
.and.equal a.href
it 'should parse the attribute data-text', ->
text = "test"
a.setAttribute "data-text", text
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should parse the text content', ->
text = "something"
a.appendChild document.createTextNode text
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should ignore the text content when the attribute data-text is given', ->
text = "something"
a.setAttribute "data-text", text
a.appendChild document.createTextNode "something else"
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should parse the attribute data-count-api', ->
api = "/repos/:user/:repo#item"
a.setAttribute "data-count-api", api
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.api"
.and.equal api
it 'should parse the attribute data-count-href', ->
href = "https://github.com/"
a.setAttribute "data-count-href", href
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.href"
.and.equal href
it 'should fallback data.cout.href to the attribute href when the attribute data-count-href is not given', ->
a.href = "https://github.com/"
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.href"
.and.equal a.href
it 'should parse the attribute data-style', ->
style = "mega"
a.setAttribute "data-style", style
expect ButtonAnchor.parse a
.to.have.deep.property "data.style"
.and.equal style
it 'should parse the attribute data-icon', ->
icon = "octicon"
a.setAttribute "data-icon", icon
expect ButtonAnchor.parse a
.to.have.deep.property "data.icon"
.and.equal icon
describe 'ButtonFrame', ->
describe '#constructor()', ->
hash = Hash.encode ButtonAnchor.parse document.createElement "a"
it 'should callback with this twice', (done) ->
_this = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
_this = @
, (iframe) ->
expect _this
.to.equal @
done()
it 'should callback with the iframe as argument twice', (done) ->
frame = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
frame = iframe
expect iframe.tagName
.to.equal "IFRAME"
, (iframe) ->
expect iframe
.to.equal frame
done()
it 'should load the iframe twice after insert it into DOM', (done) ->
spy = sinon.spy()
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
@on "load", -> spy()
, (iframe) ->
@once "load", ->
expect spy
.to.have.been.calledTwice
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
it 'should load the iframe the first time by writing html', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
, (iframe) ->
expect @html
.to.have.been.calledOnce
@html.restore()
done()
it 'should set document.location.hash when load the first time by writing html', (done) ->
_hash = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
@once "load", ->
_hash = iframe.contentWindow.document.location.hash
, (iframe) ->
expect _hash
.to.equal hash
done()
it 'should load the iframe the second time by setting the src attribute', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
sinon.spy @, "load"
, (iframe) ->
expect @load
.to.have.been.calledOnce
expect @load
.to.have.been.calledAfter @html
@html.restore()
@load.restore()
done()
it 'should set document.location.href when load the second time by setting the src attribute', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
, (iframe) ->
@once "load", ->
expect iframe.contentWindow.document.location.hash
.to.equal hash
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
it 'should resize the iframe after the second load', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
sinon.spy @, "load"
sinon.spy @, "size"
sinon.spy @, "resize"
, (iframe) ->
expect @size
.to.have.been.calledOnce
expect @size
.to.have.been.calledAfter @html
@once "load", ->
expect @resize
.to.have.been.calledOnce
expect @resize
.to.have.been.calledAfter @load
expect @resize.args[0][0]
.to.deep.equal @size.returnValues[0]
expect iframe.style.width
.to.equal @size.returnValues[0].width
expect iframe.style.height
.to.equal @size.returnValues[0].height
@html.restore()
@load.restore()
@size.restore()
@resize.restore()
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
describe 'ButtonFrameContent', ->
head = document.getElementsByTagName("head")[0]
base = null
bodyClassName = null
data =
"meta":
"X-RateLimit-Limit": "60",
"X-RateLimit-Remaining": "59",
"X-RateLimit-Reset": "1423391706",
"Cache-Control": "public, max-age=60, s-maxage=60",
"Last-Modified": "Sun, 08 Feb 2015 07:39:11 GMT",
"Vary": "Accept",
"X-GitHub-Media-Type": "github.v3",
"status": 200
"data":
"login": "ntkme",
"id": 899645,
"avatar_url": "https://avatars.githubusercontent.com/u/899645?v=3",
"gravatar_id": "",
"url": "https://api.github.com/users/ntkme",
"html_url": "https://github.com/ntkme",
"followers_url": "https://api.github.com/users/ntkme/followers",
"following_url": "https://api.github.com/users/ntkme/following{/other_user}",
"gists_url": "https://api.github.com/users/ntkme/gists{/gist_id}",
"starred_url": "https://api.github.com/users/ntkme/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ntkme/subscriptions",
"organizations_url": "https://api.github.com/users/ntkme/orgs",
"repos_url": "https://api.github.com/users/ntkme/repos",
"events_url": "https://api.github.com/users/ntkme/events{/privacy}",
"received_events_url": "https://api.github.com/users/ntkme/received_events",
"type": "User",
"site_admin": false,
"name": "な<NAME>き",
"company": "",
"blog": "https://ntk.me",
"location": "California",
"email": "<EMAIL>",
"hireable": true,
"bio": null,
"public_repos": 10,
"public_gists": 0,
"followers": 26,
"following": 0,
"created_at": "2011-07-07T03:26:58Z",
"updated_at": "2015-02-08T07:39:11Z"
javascript_protocals = [
"javascript:"
"JAVASCRIPT:"
"JavaScript:"
" javascript:"
" javascript:"
"\tjavascript:"
"\njavascript:"
"\rjavascript:"
"\fjavascript:"
]
beforeEach ->
bodyClassName= document.body.getAttribute "class"
base = document.getElementsByTagName("base")[0]
sinon.stub document.body, "appendChild"
afterEach ->
if bodyClassName
document.body.className = bodyClassName
else
document.body.removeAttribute "class"
document.body.appendChild.restore()
describe '#constructor()', ->
it 'should do nothing when options are missing', ->
new ButtonFrameContent()
expect base.getAttribute "href"
.to.be.null
expect document.body.appendChild
.to.have.not.been.called
it 'should not set base.href', ->
options =
href: "https://github.com/"
data: {}
aria: {}
new ButtonFrameContent options
expect base.getAttribute "href"
.to.be.null
it 'should set document.body.className when a style is given', ->
options =
data: style: "mega"
aria: {}
new ButtonFrameContent options
expect document.body.className
.to.equal options.data.style
it 'should append the button to document.body when the necessary options are given', ->
options =
data: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledOnce
button = document.body.appendChild.args[0][0]
expect button
.to.have.property "className"
.and.equal "button"
it 'should append the button with given href', ->
options =
href: "https://ntkme.github.com/"
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.getAttribute "href"
.to.equal options.href
it 'should filter javascript in the href', ->
for href, i in javascript_protocals
options =
href: href
data: count: href: href
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[i][0]
if button.protocol
expect button.protocol
.to.not.equal "javascript:"
else
expect button.href
.to.not.match /^javascript:/i
it 'should append the button with the default icon', ->
options =
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect " #{button.firstChild.className} ".indexOf " #{CONFIG_ICON_DEFAULT} "
.to.be.at.least 0
it 'should append the button with given icon', ->
options =
data: icon: "octicon-star"
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect " #{button.firstChild.className} ".indexOf " #{options.data.icon} "
.to.be.at.least 0
it 'should append the button with given text', ->
options =
text: "Follow"
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.lastChild.innerHTML
.to.equal options.text
it 'should append the button with given aria label', ->
options =
data: {}
aria: label: "GitHub"
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.getAttribute "aria-label"
.to.equal options.aria.label
it 'should append the count to document.body when the necessary options are given', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledTwice
count = document.body.appendChild.args[1][0]
expect count
.to.have.property "className"
.and.equal "count"
head.appendChild.restore()
it 'should append the count with given data.count.href', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
href: "https://gist.github.com/"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.getAttribute "href"
.to.equal options.data.count.href
head.appendChild.restore()
it 'should append the count with #entry from api response', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "26"
head.appendChild.restore()
it 'should append the count with #entry from api response by prepending missing / to api', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "26"
head.appendChild.restore()
it 'should append the count with large number split by comma', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#id"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "899,645"
head.appendChild.restore()
it 'should append the count with given aria label', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: label: "# followers on GitHub"
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.getAttribute "aria-label"
.to.equal "26 followers on GitHub"
head.appendChild.restore()
it 'should append the count with text undefined when missing # in api', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "undefined"
head.appendChild.restore()
it 'should append the count with text undefined when api #entry does not exist', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#fail"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "undefined"
head.appendChild.restore()
it 'should not append the count when it fails to pull api data', ->
sinon.stub head, "appendChild", -> window.callback meta: status: 404
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledOnce
button = document.body.appendChild.args[0][0]
expect button
.to.have.property "className"
.and.equal "button"
head.appendChild.restore()
| true | describe 'Element', ->
describe '#constructor()', ->
it 'should use element when element is given', ->
element = document.createElement "a"
expect new Element(element).$
.to.equal element
it 'should create new element when tag name is given', ->
expect new Element("i").$.nodeType
.to.equal 1
it 'should callback with this', ->
_this = null
_ = new Element "em", (element) -> _this = @
expect _this
.to.equal _
it 'should callback with argument element', (done) ->
b = document.createElement "b"
new Element b, (element) ->
expect element
.to.equal b
done()
describe '#on()', ->
input = null
beforeEach ->
input = new Element "input", (element) -> document.body.appendChild element
afterEach ->
document.body.removeChild input.$
it 'should call the function on single event type', ->
spy = sinon.spy()
input.on "click", spy
input.$.click()
expect spy
.to.have.been.calledOnce
input.$.click()
expect spy
.to.have.been.calledTwice
it 'should call the function on multiple event types', ->
spy = sinon.spy()
input.on "focus", "blur", "click", spy
input.$.focus()
expect spy
.to.have.been.calledOnce
input.$.blur()
expect spy
.to.have.been.calledTwice
input.$.click()
expect spy
.to.have.been.calledThrice
it 'should call the function with this', (done) ->
a = document.createElement "a"
_this = new Element a
_this.on "click", ->
expect @
.to.equal _this
done()
a.click()
it 'should call the function with event', (done) ->
b = document.createElement "b"
new Element b
.on "click", (event) ->
expect event.type
.to.equal "click"
done()
b.click()
describe '#once()', ->
input = null
beforeEach ->
input = new Element "input", (element) -> document.body.appendChild element
afterEach ->
document.body.removeChild input.$
it 'should call the function on single event type only once', ->
spy = sinon.spy()
input.once "click", spy
input.$.click()
expect spy
.to.have.been.calledOnce
input.$.click()
input.$.click()
expect spy
.to.have.been.calledOnce
it 'should call the function on multiple event types only once', ->
spy = sinon.spy()
input.once "focus", "blur", spy
input.$.focus()
expect spy
.to.have.been.calledOnce
input.$.blur()
input.$.focus()
expect spy
.to.have.been.calledOnce
it 'should call the function with this', (done) ->
a = document.createElement "a"
_this = new Element a
_this.once "click", ->
expect @
.to.equal _this
done()
a.click()
it 'should call the function with event', (done) ->
b = document.createElement "b"
new Element b
.once "click", (event) ->
expect event.type
.to.equal "click"
done()
b.click()
describe '#addClass()', ->
it 'should add class to element', ->
element = document.createElement "a"
element.className = "hello"
a = new Element element
a.addClass "world"
expect a.$.className
.to.equal "hello world"
a.addClass "world"
expect a.$.className
.to.equal "hello world"
describe '#removeClass()', ->
it 'should remove class from element', ->
element = document.createElement "a"
element.className = "hello world"
a = new Element element
a.removeClass "hello"
expect a.$.className
.to.equal "world"
a.removeClass "hello"
expect a.$.className
.to.equal "world"
describe '#hasClass()', ->
it 'should return whether element has class', ->
element = document.createElement "a"
element.className = "world"
a = new Element element
expect a.hasClass "hello"
.to.be.false
expect a.hasClass "world"
.to.be.true
describe 'Frame', ->
frame = null
html = \
"""
<!DOCTYPE html>
<html lang="ja">
<head>
<meta charset="utf-8">
<title></title>
</head>
<body style="margin: 0;">
<div style="width: 200.5px; height: 100px;"></div>
</body>
</html>
"""
beforeEach ->
frame = new Frame (iframe) -> document.body.appendChild iframe
afterEach ->
document.body.removeChild frame.$
describe '#constructor()', ->
it 'should callback with the new iframe', ->
expect frame.$.nodeType
.to.equal 1
expect frame.$.tagName
.to.equal "IFRAME"
describe '#html()', ->
it 'should write html when iframe is in same-origin', (done) ->
frame.on "load", ->
expect frame.$.contentWindow.document.documentElement.getAttribute "lang"
.to.equal "ja"
done()
frame.html html
describe '#load()', ->
it 'should load the src url', ->
frame.load "../../buttons.html"
expect frame.$.src
.to.match /buttons\.html$/
describe '#size()', ->
it 'should return the iframe content size', (done) ->
frame.on "load", ->
switch window.devicePixelRatio
when 2
expect @size()
.to.deep.equal
width: "200.5px"
height: "100px"
when 3
expect @size()
.to.deep.equal
width: "201px"
height: "100px"
done()
frame.html html
describe '#resize()', ->
it 'should resize the iframe', (done) ->
frame.resize
width: "20px"
height: "10px"
expect frame.$.style.width
.to.equal "20px"
expect frame.$.style.height
.to.equal "10px"
done()
describe 'ButtonAnchor', ->
a = null
beforeEach ->
a = document.createElement "a"
describe '.parse()', ->
it 'should parse the anchor without attribute', ->
expect ButtonAnchor.parse a
.to.deep.equal
href: ""
text: ""
data:
count:
api: ""
href: ""
aria:
label: ""
style: ""
icon: ""
aria:
label: ""
it 'should parse the attribute href', ->
a.href = "https://buttons.github.io/"
expect ButtonAnchor.parse a
.to.have.property "href"
.and.equal a.href
it 'should parse the attribute data-text', ->
text = "test"
a.setAttribute "data-text", text
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should parse the text content', ->
text = "something"
a.appendChild document.createTextNode text
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should ignore the text content when the attribute data-text is given', ->
text = "something"
a.setAttribute "data-text", text
a.appendChild document.createTextNode "something else"
expect ButtonAnchor.parse a
.to.have.property "text"
.and.equal text
it 'should parse the attribute data-count-api', ->
api = "/repos/:user/:repo#item"
a.setAttribute "data-count-api", api
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.api"
.and.equal api
it 'should parse the attribute data-count-href', ->
href = "https://github.com/"
a.setAttribute "data-count-href", href
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.href"
.and.equal href
it 'should fallback data.cout.href to the attribute href when the attribute data-count-href is not given', ->
a.href = "https://github.com/"
expect ButtonAnchor.parse a
.to.have.deep.property "data.count.href"
.and.equal a.href
it 'should parse the attribute data-style', ->
style = "mega"
a.setAttribute "data-style", style
expect ButtonAnchor.parse a
.to.have.deep.property "data.style"
.and.equal style
it 'should parse the attribute data-icon', ->
icon = "octicon"
a.setAttribute "data-icon", icon
expect ButtonAnchor.parse a
.to.have.deep.property "data.icon"
.and.equal icon
describe 'ButtonFrame', ->
describe '#constructor()', ->
hash = Hash.encode ButtonAnchor.parse document.createElement "a"
it 'should callback with this twice', (done) ->
_this = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
_this = @
, (iframe) ->
expect _this
.to.equal @
done()
it 'should callback with the iframe as argument twice', (done) ->
frame = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
frame = iframe
expect iframe.tagName
.to.equal "IFRAME"
, (iframe) ->
expect iframe
.to.equal frame
done()
it 'should load the iframe twice after insert it into DOM', (done) ->
spy = sinon.spy()
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
@on "load", -> spy()
, (iframe) ->
@once "load", ->
expect spy
.to.have.been.calledTwice
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
it 'should load the iframe the first time by writing html', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
, (iframe) ->
expect @html
.to.have.been.calledOnce
@html.restore()
done()
it 'should set document.location.hash when load the first time by writing html', (done) ->
_hash = null
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
@once "load", ->
_hash = iframe.contentWindow.document.location.hash
, (iframe) ->
expect _hash
.to.equal hash
done()
it 'should load the iframe the second time by setting the src attribute', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
sinon.spy @, "load"
, (iframe) ->
expect @load
.to.have.been.calledOnce
expect @load
.to.have.been.calledAfter @html
@html.restore()
@load.restore()
done()
it 'should set document.location.href when load the second time by setting the src attribute', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
, (iframe) ->
@once "load", ->
expect iframe.contentWindow.document.location.hash
.to.equal hash
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
it 'should resize the iframe after the second load', (done) ->
new ButtonFrame hash, (iframe) ->
document.body.appendChild iframe
sinon.spy @, "html"
sinon.spy @, "load"
sinon.spy @, "size"
sinon.spy @, "resize"
, (iframe) ->
expect @size
.to.have.been.calledOnce
expect @size
.to.have.been.calledAfter @html
@once "load", ->
expect @resize
.to.have.been.calledOnce
expect @resize
.to.have.been.calledAfter @load
expect @resize.args[0][0]
.to.deep.equal @size.returnValues[0]
expect iframe.style.width
.to.equal @size.returnValues[0].width
expect iframe.style.height
.to.equal @size.returnValues[0].height
@html.restore()
@load.restore()
@size.restore()
@resize.restore()
iframe.parentNode.removeChild iframe
done()
document.body.appendChild iframe
describe 'ButtonFrameContent', ->
head = document.getElementsByTagName("head")[0]
base = null
bodyClassName = null
data =
"meta":
"X-RateLimit-Limit": "60",
"X-RateLimit-Remaining": "59",
"X-RateLimit-Reset": "1423391706",
"Cache-Control": "public, max-age=60, s-maxage=60",
"Last-Modified": "Sun, 08 Feb 2015 07:39:11 GMT",
"Vary": "Accept",
"X-GitHub-Media-Type": "github.v3",
"status": 200
"data":
"login": "ntkme",
"id": 899645,
"avatar_url": "https://avatars.githubusercontent.com/u/899645?v=3",
"gravatar_id": "",
"url": "https://api.github.com/users/ntkme",
"html_url": "https://github.com/ntkme",
"followers_url": "https://api.github.com/users/ntkme/followers",
"following_url": "https://api.github.com/users/ntkme/following{/other_user}",
"gists_url": "https://api.github.com/users/ntkme/gists{/gist_id}",
"starred_url": "https://api.github.com/users/ntkme/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ntkme/subscriptions",
"organizations_url": "https://api.github.com/users/ntkme/orgs",
"repos_url": "https://api.github.com/users/ntkme/repos",
"events_url": "https://api.github.com/users/ntkme/events{/privacy}",
"received_events_url": "https://api.github.com/users/ntkme/received_events",
"type": "User",
"site_admin": false,
"name": "なPI:NAME:<NAME>END_PIき",
"company": "",
"blog": "https://ntk.me",
"location": "California",
"email": "PI:EMAIL:<EMAIL>END_PI",
"hireable": true,
"bio": null,
"public_repos": 10,
"public_gists": 0,
"followers": 26,
"following": 0,
"created_at": "2011-07-07T03:26:58Z",
"updated_at": "2015-02-08T07:39:11Z"
javascript_protocals = [
"javascript:"
"JAVASCRIPT:"
"JavaScript:"
" javascript:"
" javascript:"
"\tjavascript:"
"\njavascript:"
"\rjavascript:"
"\fjavascript:"
]
beforeEach ->
bodyClassName= document.body.getAttribute "class"
base = document.getElementsByTagName("base")[0]
sinon.stub document.body, "appendChild"
afterEach ->
if bodyClassName
document.body.className = bodyClassName
else
document.body.removeAttribute "class"
document.body.appendChild.restore()
describe '#constructor()', ->
it 'should do nothing when options are missing', ->
new ButtonFrameContent()
expect base.getAttribute "href"
.to.be.null
expect document.body.appendChild
.to.have.not.been.called
it 'should not set base.href', ->
options =
href: "https://github.com/"
data: {}
aria: {}
new ButtonFrameContent options
expect base.getAttribute "href"
.to.be.null
it 'should set document.body.className when a style is given', ->
options =
data: style: "mega"
aria: {}
new ButtonFrameContent options
expect document.body.className
.to.equal options.data.style
it 'should append the button to document.body when the necessary options are given', ->
options =
data: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledOnce
button = document.body.appendChild.args[0][0]
expect button
.to.have.property "className"
.and.equal "button"
it 'should append the button with given href', ->
options =
href: "https://ntkme.github.com/"
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.getAttribute "href"
.to.equal options.href
it 'should filter javascript in the href', ->
for href, i in javascript_protocals
options =
href: href
data: count: href: href
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[i][0]
if button.protocol
expect button.protocol
.to.not.equal "javascript:"
else
expect button.href
.to.not.match /^javascript:/i
it 'should append the button with the default icon', ->
options =
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect " #{button.firstChild.className} ".indexOf " #{CONFIG_ICON_DEFAULT} "
.to.be.at.least 0
it 'should append the button with given icon', ->
options =
data: icon: "octicon-star"
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect " #{button.firstChild.className} ".indexOf " #{options.data.icon} "
.to.be.at.least 0
it 'should append the button with given text', ->
options =
text: "Follow"
data: {}
aria: {}
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.lastChild.innerHTML
.to.equal options.text
it 'should append the button with given aria label', ->
options =
data: {}
aria: label: "GitHub"
new ButtonFrameContent options
button = document.body.appendChild.args[0][0]
expect button.getAttribute "aria-label"
.to.equal options.aria.label
it 'should append the count to document.body when the necessary options are given', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledTwice
count = document.body.appendChild.args[1][0]
expect count
.to.have.property "className"
.and.equal "count"
head.appendChild.restore()
it 'should append the count with given data.count.href', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
href: "https://gist.github.com/"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.getAttribute "href"
.to.equal options.data.count.href
head.appendChild.restore()
it 'should append the count with #entry from api response', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "26"
head.appendChild.restore()
it 'should append the count with #entry from api response by prepending missing / to api', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "26"
head.appendChild.restore()
it 'should append the count with large number split by comma', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#id"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "899,645"
head.appendChild.restore()
it 'should append the count with given aria label', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#followers"
aria: label: "# followers on GitHub"
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.getAttribute "aria-label"
.to.equal "26 followers on GitHub"
head.appendChild.restore()
it 'should append the count with text undefined when missing # in api', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "undefined"
head.appendChild.restore()
it 'should append the count with text undefined when api #entry does not exist', ->
sinon.stub head, "appendChild", -> window.callback data
options =
data: count:
api: "/dummy/api#fail"
aria: {}
aria: {}
new ButtonFrameContent options
count = document.body.appendChild.args[1][0]
expect count.lastChild.innerHTML
.to.equal "undefined"
head.appendChild.restore()
it 'should not append the count when it fails to pull api data', ->
sinon.stub head, "appendChild", -> window.callback meta: status: 404
options =
data: count:
api: "/dummy/api#followers"
aria: {}
aria: {}
new ButtonFrameContent options
expect document.body.appendChild
.to.be.calledOnce
button = document.body.appendChild.args[0][0]
expect button
.to.have.property "className"
.and.equal "button"
head.appendChild.restore()
|
[
{
"context": "l\n\n before (done) ->\n k1 = new Kitten(name : 'Nina')\n k2 = new Kitten(name : 'Margarita')\n k1.",
"end": 333,
"score": 0.9993351697921753,
"start": 329,
"tag": "NAME",
"value": "Nina"
},
{
"context": "Kitten(name : 'Nina')\n k2 = new Kitten(name : 'Marg... | servertest/integration/kittens.coffee | mafiuss/sandcat | 1 | request = require 'supertest'
express = require 'express'
app = require '../../app'
mongoose = require 'mongoose'
Kitten = require '../../subapps/kittens/models/kitten'
mongoose.connect 'mongodb://localhost/cat-test'
describe 'kiitens web application', ->
k1 = null
k2 = null
before (done) ->
k1 = new Kitten(name : 'Nina')
k2 = new Kitten(name : 'Margarita')
k1.save (err) ->
if err? then throw err
k2.save (err) ->
if err? then throw err
done()
describe 'GET all kittens', ->
it 'respond with json', (done) ->
request app
.get '/cat/kittens/all'
.set 'Accept', 'application/json'
.expect 'Content-Type', /json/
.expect 200, done
describe 'GET a specific kitten', ->
it 'respond with json and finds a kitten', (done) ->
console.log 'kitten 1 ', k1
request app
.get "/cat/kittens/#{k1._id}"
.set 'Accept', 'application/json'
.expect 'Content-Type', /json/
.expect 200, done
after (done) ->
Kitten.remove name: 'Nina', ->
Kitten.remove name: 'Margarita', ->
done()
| 224469 | request = require 'supertest'
express = require 'express'
app = require '../../app'
mongoose = require 'mongoose'
Kitten = require '../../subapps/kittens/models/kitten'
mongoose.connect 'mongodb://localhost/cat-test'
describe 'kiitens web application', ->
k1 = null
k2 = null
before (done) ->
k1 = new Kitten(name : '<NAME>')
k2 = new Kitten(name : '<NAME>')
k1.save (err) ->
if err? then throw err
k2.save (err) ->
if err? then throw err
done()
describe 'GET all kittens', ->
it 'respond with json', (done) ->
request app
.get '/cat/kittens/all'
.set 'Accept', 'application/json'
.expect 'Content-Type', /json/
.expect 200, done
describe 'GET a specific kitten', ->
it 'respond with json and finds a kitten', (done) ->
console.log 'kitten 1 ', k1
request app
.get "/cat/kittens/#{k1._id}"
.set 'Accept', 'application/json'
.expect 'Content-Type', /json/
.expect 200, done
after (done) ->
Kitten.remove name: '<NAME>', ->
Kitten.remove name: '<NAME>', ->
done()
| true | request = require 'supertest'
express = require 'express'
app = require '../../app'
mongoose = require 'mongoose'
Kitten = require '../../subapps/kittens/models/kitten'
mongoose.connect 'mongodb://localhost/cat-test'
describe 'kiitens web application', ->
k1 = null
k2 = null
before (done) ->
k1 = new Kitten(name : 'PI:NAME:<NAME>END_PI')
k2 = new Kitten(name : 'PI:NAME:<NAME>END_PI')
k1.save (err) ->
if err? then throw err
k2.save (err) ->
if err? then throw err
done()
describe 'GET all kittens', ->
it 'respond with json', (done) ->
request app
.get '/cat/kittens/all'
.set 'Accept', 'application/json'
.expect 'Content-Type', /json/
.expect 200, done
describe 'GET a specific kitten', ->
it 'respond with json and finds a kitten', (done) ->
console.log 'kitten 1 ', k1
request app
.get "/cat/kittens/#{k1._id}"
.set 'Accept', 'application/json'
.expect 'Content-Type', /json/
.expect 200, done
after (done) ->
Kitten.remove name: 'PI:NAME:<NAME>END_PI', ->
Kitten.remove name: 'PI:NAME:<NAME>END_PI', ->
done()
|
[
{
"context": "###\nCopyright (c) 2013, Alexander Cherniuk <ts33kr@gmail.com>\nAll rights reserved.\n\nRedistri",
"end": 42,
"score": 0.999845027923584,
"start": 24,
"tag": "NAME",
"value": "Alexander Cherniuk"
},
{
"context": "###\nCopyright (c) 2013, Alexander Cherniuk <ts33kr@gmail... | library/gearbox/pinpoint.coffee | ts33kr/granite | 6 | ###
Copyright (c) 2013, Alexander Cherniuk <ts33kr@gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
assert = require "assert"
asciify = require "asciify"
connect = require "connect"
request = require "request"
logger = require "winston"
colors = require "colors"
async = require "async"
nconf = require "nconf"
https = require "https"
http = require "http"
util = require "util"
{external} = require "../membrane/remote"
{Barebones} = require "../membrane/skeleton"
{Preflight} = require "../membrane/preflight"
# This abstract compound provides the curious functionality that is
# greatly improving the way you would construct the client side code
# that is responsible for rendering UI elements. It allows you to run
# client side (external) code once the specified selector is available.
# All of this lets you easily implement the viewport/slow architecture.
# It also allows you to react on when selector changes or disappears.
module.exports.Pinpoint = class Pinpoint extends Preflight
# This is a marker that indicates to some internal subsystems
# that this class has to be considered abstract and therefore
# can not be treated as a complete class implementation. This
# mainly is used to exclude or account for abstract classes.
# Once inherited from, the inheritee is not abstract anymore.
@abstract yes
# This block here defines a set of Bower dependencies that are
# required by the client site part of the code that constitutes
# this service or compound. Dependencies can be restricted to a
# certain version and also they can have customized entrypoint.
# Refer to `BowerSupport` class implementation for information.
@bower "mutation-summary", "src/mutation-summary.js"
# A part of the internal implementation of pinpointing component.
# Provides a common interface (for the components implementation)
# to invoke the `mutation-summary` library with some predefined
# parameters, in addition to the ones that will be passed into
# this method as arguments. Please, do not use method directly,
# but rather use one of the definitions that follow below this.
mutationSummary: external (selector, callback) ->
noSelector = "got no valid selector for mutations"
noCallback = "no valid callback function is given"
noLibrary = "mutation-summary library is missing"
assert _.isFunction(try MutationSummary), noLibrary
assert _.isFunction(c = callback or null), noCallback
assert _.isString(s = selector or null), noSelector
assert instruct = try queries: [element: selector]
assert uservice = try this.service.blue.underline
pp = "Watching mutation of %s CSS selector for %s"
try logger.info pp, selector.bold.blue, uservice
make = -> observer = new MutationSummary instruct
creator = (fn) -> instruct.callback = fn; make()
this.emit "mutation-summary", selector, callback
return creator.call this, callback or _.noop
# Pinpoint when the specified selector vanishes (is parented or
# moved) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector vanished, then renderer
# will be invoked once for every disappeared node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@parented: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element reparented summary"
pe = "reparenting %s elements for %s service"
assert _.isArray(moved = summary.reparented), na
return unless (try moved.length or null) > 0
try logger.info pe, moved.length, this.service
$(node).data owners: go(node) for node in moved
go(n).push this for n in moved unless @ in go(n)
@emit "pp-parented", selector, renderer, moved
_.each moved, (n) => renderer.call @, n, go n
# Pinpoint when the specified selector vanishes (is removed or
# detach) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector vanished, then renderer
# will be invoked once for every disappeared node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@vanished: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element vanishing summary"
pe = "vanishing %s elements for %s service"
assert _.isArray(moved = summary.removed), na
return unless (try moved.length or null) > 0
try logger.info pe, moved.length, this.service
$(node).data owners: go(node) for node in moved
go(n).push this for n in moved unless @ in go(n)
@emit "pp-vanished", selector, renderer, moved
_.each moved, (n) => renderer.call @, n, go n
# Pinpoint when the specified selector appears (or if it already
# exists) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector is found, then renderer
# will be invoked once for every discovered node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@pinpoint: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element addition summary"
pe = "pinpointed %s elements for %s service"
assert _.isArray(added = summary.added), na
return unless (try added.length or null) > 0
try logger.info pe, added.length, this.service
$(node).data owners: go(node) for node in added
go(n).push this for n in added unless @ in go(n)
@emit "pp-pinpoint", selector, renderer, added
_.each added, (n) => renderer.call @, n, go n
| 4447 | ###
Copyright (c) 2013, <NAME> <<EMAIL>>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
assert = require "assert"
asciify = require "asciify"
connect = require "connect"
request = require "request"
logger = require "winston"
colors = require "colors"
async = require "async"
nconf = require "nconf"
https = require "https"
http = require "http"
util = require "util"
{external} = require "../membrane/remote"
{Barebones} = require "../membrane/skeleton"
{Preflight} = require "../membrane/preflight"
# This abstract compound provides the curious functionality that is
# greatly improving the way you would construct the client side code
# that is responsible for rendering UI elements. It allows you to run
# client side (external) code once the specified selector is available.
# All of this lets you easily implement the viewport/slow architecture.
# It also allows you to react on when selector changes or disappears.
module.exports.Pinpoint = class Pinpoint extends Preflight
# This is a marker that indicates to some internal subsystems
# that this class has to be considered abstract and therefore
# can not be treated as a complete class implementation. This
# mainly is used to exclude or account for abstract classes.
# Once inherited from, the inheritee is not abstract anymore.
@abstract yes
# This block here defines a set of Bower dependencies that are
# required by the client site part of the code that constitutes
# this service or compound. Dependencies can be restricted to a
# certain version and also they can have customized entrypoint.
# Refer to `BowerSupport` class implementation for information.
@bower "mutation-summary", "src/mutation-summary.js"
# A part of the internal implementation of pinpointing component.
# Provides a common interface (for the components implementation)
# to invoke the `mutation-summary` library with some predefined
# parameters, in addition to the ones that will be passed into
# this method as arguments. Please, do not use method directly,
# but rather use one of the definitions that follow below this.
mutationSummary: external (selector, callback) ->
noSelector = "got no valid selector for mutations"
noCallback = "no valid callback function is given"
noLibrary = "mutation-summary library is missing"
assert _.isFunction(try MutationSummary), noLibrary
assert _.isFunction(c = callback or null), noCallback
assert _.isString(s = selector or null), noSelector
assert instruct = try queries: [element: selector]
assert uservice = try this.service.blue.underline
pp = "Watching mutation of %s CSS selector for %s"
try logger.info pp, selector.bold.blue, uservice
make = -> observer = new MutationSummary instruct
creator = (fn) -> instruct.callback = fn; make()
this.emit "mutation-summary", selector, callback
return creator.call this, callback or _.noop
# Pinpoint when the specified selector vanishes (is parented or
# moved) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector vanished, then renderer
# will be invoked once for every disappeared node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@parented: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element reparented summary"
pe = "reparenting %s elements for %s service"
assert _.isArray(moved = summary.reparented), na
return unless (try moved.length or null) > 0
try logger.info pe, moved.length, this.service
$(node).data owners: go(node) for node in moved
go(n).push this for n in moved unless @ in go(n)
@emit "pp-parented", selector, renderer, moved
_.each moved, (n) => renderer.call @, n, go n
# Pinpoint when the specified selector vanishes (is removed or
# detach) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector vanished, then renderer
# will be invoked once for every disappeared node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@vanished: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element vanishing summary"
pe = "vanishing %s elements for %s service"
assert _.isArray(moved = summary.removed), na
return unless (try moved.length or null) > 0
try logger.info pe, moved.length, this.service
$(node).data owners: go(node) for node in moved
go(n).push this for n in moved unless @ in go(n)
@emit "pp-vanished", selector, renderer, moved
_.each moved, (n) => renderer.call @, n, go n
# Pinpoint when the specified selector appears (or if it already
# exists) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector is found, then renderer
# will be invoked once for every discovered node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@pinpoint: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element addition summary"
pe = "pinpointed %s elements for %s service"
assert _.isArray(added = summary.added), na
return unless (try added.length or null) > 0
try logger.info pe, added.length, this.service
$(node).data owners: go(node) for node in added
go(n).push this for n in added unless @ in go(n)
@emit "pp-pinpoint", selector, renderer, added
_.each added, (n) => renderer.call @, n, go n
| true | ###
Copyright (c) 2013, PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
_ = require "lodash"
assert = require "assert"
asciify = require "asciify"
connect = require "connect"
request = require "request"
logger = require "winston"
colors = require "colors"
async = require "async"
nconf = require "nconf"
https = require "https"
http = require "http"
util = require "util"
{external} = require "../membrane/remote"
{Barebones} = require "../membrane/skeleton"
{Preflight} = require "../membrane/preflight"
# This abstract compound provides the curious functionality that is
# greatly improving the way you would construct the client side code
# that is responsible for rendering UI elements. It allows you to run
# client side (external) code once the specified selector is available.
# All of this lets you easily implement the viewport/slow architecture.
# It also allows you to react on when selector changes or disappears.
module.exports.Pinpoint = class Pinpoint extends Preflight
# This is a marker that indicates to some internal subsystems
# that this class has to be considered abstract and therefore
# can not be treated as a complete class implementation. This
# mainly is used to exclude or account for abstract classes.
# Once inherited from, the inheritee is not abstract anymore.
@abstract yes
# This block here defines a set of Bower dependencies that are
# required by the client site part of the code that constitutes
# this service or compound. Dependencies can be restricted to a
# certain version and also they can have customized entrypoint.
# Refer to `BowerSupport` class implementation for information.
@bower "mutation-summary", "src/mutation-summary.js"
# A part of the internal implementation of pinpointing component.
# Provides a common interface (for the components implementation)
# to invoke the `mutation-summary` library with some predefined
# parameters, in addition to the ones that will be passed into
# this method as arguments. Please, do not use method directly,
# but rather use one of the definitions that follow below this.
mutationSummary: external (selector, callback) ->
noSelector = "got no valid selector for mutations"
noCallback = "no valid callback function is given"
noLibrary = "mutation-summary library is missing"
assert _.isFunction(try MutationSummary), noLibrary
assert _.isFunction(c = callback or null), noCallback
assert _.isString(s = selector or null), noSelector
assert instruct = try queries: [element: selector]
assert uservice = try this.service.blue.underline
pp = "Watching mutation of %s CSS selector for %s"
try logger.info pp, selector.bold.blue, uservice
make = -> observer = new MutationSummary instruct
creator = (fn) -> instruct.callback = fn; make()
this.emit "mutation-summary", selector, callback
return creator.call this, callback or _.noop
# Pinpoint when the specified selector vanishes (is parented or
# moved) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector vanished, then renderer
# will be invoked once for every disappeared node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@parented: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element reparented summary"
pe = "reparenting %s elements for %s service"
assert _.isArray(moved = summary.reparented), na
return unless (try moved.length or null) > 0
try logger.info pe, moved.length, this.service
$(node).data owners: go(node) for node in moved
go(n).push this for n in moved unless @ in go(n)
@emit "pp-parented", selector, renderer, moved
_.each moved, (n) => renderer.call @, n, go n
# Pinpoint when the specified selector vanishes (is removed or
# detach) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector vanished, then renderer
# will be invoked once for every disappeared node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@vanished: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element vanishing summary"
pe = "vanishing %s elements for %s service"
assert _.isArray(moved = summary.removed), na
return unless (try moved.length or null) > 0
try logger.info pe, moved.length, this.service
$(node).data owners: go(node) for node in moved
go(n).push this for n in moved unless @ in go(n)
@emit "pp-vanished", selector, renderer, moved
_.each moved, (n) => renderer.call @, n, go n
# Pinpoint when the specified selector appears (or if it already
# exists) and then invoke the supplied rendering function, which
# will receive the newly pinpointed node as its first argument.
# If multiple nodes with this selector is found, then renderer
# will be invoked once for every discovered node of a selector.
# Selectors must conform to the strict subset of CSS selectors.
@pinpoint: @transferred (selector, renderer) ->
noSelector = "no valid CSS selector is supplied"
noRenderer = "no valid rendering function given"
assert _.isFunction(renderer or null), noRenderer
assert _.isString(sel = selector or 0), noSelector
go = (n) => try $(n).data("owners") or new Array()
@mutationSummary sel, (s) => _.each s, (summary) =>
na = "missing the element addition summary"
pe = "pinpointed %s elements for %s service"
assert _.isArray(added = summary.added), na
return unless (try added.length or null) > 0
try logger.info pe, added.length, this.service
$(node).data owners: go(node) for node in added
go(n).push this for n in added unless @ in go(n)
@emit "pp-pinpoint", selector, renderer, added
_.each added, (n) => renderer.call @, n, go n
|
[
{
"context": "###\n# middleware/is_authorized.coffee\n#\n# © 2014 Dan Nichols\n# See LICENSE for more details\n#\n# Middleware to ",
"end": 60,
"score": 0.9997053146362305,
"start": 49,
"tag": "NAME",
"value": "Dan Nichols"
}
] | lib/middleware/is_authorized.coffee | dlnichols/h_media | 0 | ###
# middleware/is_authorized.coffee
#
# © 2014 Dan Nichols
# See LICENSE for more details
#
# Middleware to determine if a user is authorized
###
'use strict'
# External libs
debug = require('debug') 'hMedia:middleware:isAuthorized'
###
# isAuthenticated
###
module.exports = exports =
# Return 401 unless a user is authenticated
(req, res, next) ->
if process.env.SKIP_AUTH
debug 'Skipping authorization...'
return next()
debug 'Authorization not yet implemented.'
next()
| 14500 | ###
# middleware/is_authorized.coffee
#
# © 2014 <NAME>
# See LICENSE for more details
#
# Middleware to determine if a user is authorized
###
'use strict'
# External libs
debug = require('debug') 'hMedia:middleware:isAuthorized'
###
# isAuthenticated
###
module.exports = exports =
# Return 401 unless a user is authenticated
(req, res, next) ->
if process.env.SKIP_AUTH
debug 'Skipping authorization...'
return next()
debug 'Authorization not yet implemented.'
next()
| true | ###
# middleware/is_authorized.coffee
#
# © 2014 PI:NAME:<NAME>END_PI
# See LICENSE for more details
#
# Middleware to determine if a user is authorized
###
'use strict'
# External libs
debug = require('debug') 'hMedia:middleware:isAuthorized'
###
# isAuthenticated
###
module.exports = exports =
# Return 401 unless a user is authenticated
(req, res, next) ->
if process.env.SKIP_AUTH
debug 'Skipping authorization...'
return next()
debug 'Authorization not yet implemented.'
next()
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.998561680316925,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-stream2-writable.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
TestWriter = ->
W.apply this, arguments
@buffer = []
@written = 0
return
# simulate a small unpredictable latency
# tiny node-tap lookalike.
test = (name, fn) ->
count++
tests.push [
name
fn
]
return
run = ->
next = tests.shift()
return console.error("ok") unless next
name = next[0]
fn = next[1]
console.log "# %s", name
fn
same: assert.deepEqual
equal: assert.equal
end: ->
count--
run()
return
return
common = require("../common.js")
W = require("_stream_writable")
D = require("_stream_duplex")
assert = require("assert")
util = require("util")
util.inherits TestWriter, W
TestWriter::_write = (chunk, encoding, cb) ->
setTimeout (->
@buffer.push chunk.toString()
@written += chunk.length
cb()
return
).bind(this), Math.floor(Math.random() * 10)
return
chunks = new Array(50)
i = 0
while i < chunks.length
chunks[i] = new Array(i + 1).join("x")
i++
tests = []
count = 0
# ensure all tests have run
process.on "exit", ->
assert.equal count, 0
return
process.nextTick run
test "write fast", (t) ->
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.end()
return
chunks.forEach (chunk) ->
# screw backpressure. Just buffer it all up.
tw.write chunk
return
tw.end()
return
test "write slow", (t) ->
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.end()
return
i = 0
(W = ->
tw.write chunks[i++]
if i < chunks.length
setTimeout W, 10
else
tw.end()
return
)()
return
test "write backpressure", (t) ->
tw = new TestWriter(highWaterMark: 50)
drains = 0
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.equal drains, 17
t.end()
return
tw.on "drain", ->
drains++
return
i = 0
(W = ->
loop
ret = tw.write(chunks[i++])
break unless ret isnt false and i < chunks.length
if i < chunks.length
assert tw._writableState.length >= 50
tw.once "drain", W
else
tw.end()
return
)()
return
test "write bufferize", (t) ->
tw = new TestWriter(highWaterMark: 100)
encodings = [
"hex"
"utf8"
"utf-8"
"ascii"
"binary"
"base64"
"ucs2"
"ucs-2"
"utf16le"
"utf-16le"
`undefined`
]
tw.on "finish", ->
t.same tw.buffer, chunks, "got the expected chunks"
return
chunks.forEach (chunk, i) ->
enc = encodings[i % encodings.length]
chunk = new Buffer(chunk)
tw.write chunk.toString(enc), enc
return
t.end()
return
test "write no bufferize", (t) ->
tw = new TestWriter(
highWaterMark: 100
decodeStrings: false
)
tw._write = (chunk, encoding, cb) ->
assert typeof chunk is "string"
chunk = new Buffer(chunk, encoding)
TestWriter::_write.call this, chunk, encoding, cb
encodings = [
"hex"
"utf8"
"utf-8"
"ascii"
"binary"
"base64"
"ucs2"
"ucs-2"
"utf16le"
"utf-16le"
`undefined`
]
tw.on "finish", ->
t.same tw.buffer, chunks, "got the expected chunks"
return
chunks.forEach (chunk, i) ->
enc = encodings[i % encodings.length]
chunk = new Buffer(chunk)
tw.write chunk.toString(enc), enc
return
t.end()
return
test "write callbacks", (t) ->
callbacks = chunks.map((chunk, i) ->
[
i
(er) ->
callbacks._called[i] = chunk
]
).reduce((set, x) ->
set["callback-" + x[0]] = x[1]
set
, {})
callbacks._called = []
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
process.nextTick ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.same callbacks._called, chunks, "called all callbacks"
t.end()
return
return
chunks.forEach (chunk, i) ->
tw.write chunk, callbacks["callback-" + i]
return
tw.end()
return
test "end callback", (t) ->
tw = new TestWriter()
tw.end ->
t.end()
return
return
test "end callback with chunk", (t) ->
tw = new TestWriter()
tw.end new Buffer("hello world"), ->
t.end()
return
return
test "end callback with chunk and encoding", (t) ->
tw = new TestWriter()
tw.end "hello world", "ascii", ->
t.end()
return
return
test "end callback after .write() call", (t) ->
tw = new TestWriter()
tw.write new Buffer("hello world")
tw.end ->
t.end()
return
return
test "end callback called after write callback", (t) ->
tw = new TestWriter()
writeCalledback = false
tw.write new Buffer("hello world"), ->
writeCalledback = true
return
tw.end ->
t.equal writeCalledback, true
t.end()
return
return
test "encoding should be ignored for buffers", (t) ->
tw = new W()
hex = "018b5e9a8f6236ffe30e31baf80d2cf6eb"
tw._write = (chunk, encoding, cb) ->
t.equal chunk.toString("hex"), hex
t.end()
return
buf = new Buffer(hex, "hex")
tw.write buf, "binary"
return
test "writables are not pipable", (t) ->
w = new W()
w._write = ->
gotError = false
w.on "error", (er) ->
gotError = true
return
w.pipe process.stdout
assert gotError
t.end()
return
test "duplexes are pipable", (t) ->
d = new D()
d._read = ->
d._write = ->
gotError = false
d.on "error", (er) ->
gotError = true
return
d.pipe process.stdout
assert not gotError
t.end()
return
test "end(chunk) two times is an error", (t) ->
w = new W()
w._write = ->
gotError = false
w.on "error", (er) ->
gotError = true
t.equal er.message, "write after end"
return
w.end "this is the end"
w.end "and so is this"
process.nextTick ->
assert gotError
t.end()
return
return
test "dont end while writing", (t) ->
w = new W()
wrote = false
w._write = (chunk, e, cb) ->
assert not @writing
wrote = true
@writing = true
setTimeout ->
@writing = false
cb()
return
return
w.on "finish", ->
assert wrote
t.end()
return
w.write Buffer(0)
w.end()
return
test "finish does not come before write cb", (t) ->
w = new W()
writeCb = false
w._write = (chunk, e, cb) ->
setTimeout (->
writeCb = true
cb()
return
), 10
return
w.on "finish", ->
assert writeCb
t.end()
return
w.write Buffer(0)
w.end()
return
test "finish does not come before sync _write cb", (t) ->
w = new W()
writeCb = false
w._write = (chunk, e, cb) ->
cb()
return
w.on "finish", ->
assert writeCb
t.end()
return
w.write Buffer(0), (er) ->
writeCb = true
return
w.end()
return
test "finish is emitted if last chunk is empty", (t) ->
w = new W()
w._write = (chunk, e, cb) ->
process.nextTick cb
return
w.on "finish", ->
t.end()
return
w.write Buffer(1)
w.end Buffer(0)
return
| 95630 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
TestWriter = ->
W.apply this, arguments
@buffer = []
@written = 0
return
# simulate a small unpredictable latency
# tiny node-tap lookalike.
test = (name, fn) ->
count++
tests.push [
name
fn
]
return
run = ->
next = tests.shift()
return console.error("ok") unless next
name = next[0]
fn = next[1]
console.log "# %s", name
fn
same: assert.deepEqual
equal: assert.equal
end: ->
count--
run()
return
return
common = require("../common.js")
W = require("_stream_writable")
D = require("_stream_duplex")
assert = require("assert")
util = require("util")
util.inherits TestWriter, W
TestWriter::_write = (chunk, encoding, cb) ->
setTimeout (->
@buffer.push chunk.toString()
@written += chunk.length
cb()
return
).bind(this), Math.floor(Math.random() * 10)
return
chunks = new Array(50)
i = 0
while i < chunks.length
chunks[i] = new Array(i + 1).join("x")
i++
tests = []
count = 0
# ensure all tests have run
process.on "exit", ->
assert.equal count, 0
return
process.nextTick run
test "write fast", (t) ->
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.end()
return
chunks.forEach (chunk) ->
# screw backpressure. Just buffer it all up.
tw.write chunk
return
tw.end()
return
test "write slow", (t) ->
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.end()
return
i = 0
(W = ->
tw.write chunks[i++]
if i < chunks.length
setTimeout W, 10
else
tw.end()
return
)()
return
test "write backpressure", (t) ->
tw = new TestWriter(highWaterMark: 50)
drains = 0
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.equal drains, 17
t.end()
return
tw.on "drain", ->
drains++
return
i = 0
(W = ->
loop
ret = tw.write(chunks[i++])
break unless ret isnt false and i < chunks.length
if i < chunks.length
assert tw._writableState.length >= 50
tw.once "drain", W
else
tw.end()
return
)()
return
test "write bufferize", (t) ->
tw = new TestWriter(highWaterMark: 100)
encodings = [
"hex"
"utf8"
"utf-8"
"ascii"
"binary"
"base64"
"ucs2"
"ucs-2"
"utf16le"
"utf-16le"
`undefined`
]
tw.on "finish", ->
t.same tw.buffer, chunks, "got the expected chunks"
return
chunks.forEach (chunk, i) ->
enc = encodings[i % encodings.length]
chunk = new Buffer(chunk)
tw.write chunk.toString(enc), enc
return
t.end()
return
test "write no bufferize", (t) ->
tw = new TestWriter(
highWaterMark: 100
decodeStrings: false
)
tw._write = (chunk, encoding, cb) ->
assert typeof chunk is "string"
chunk = new Buffer(chunk, encoding)
TestWriter::_write.call this, chunk, encoding, cb
encodings = [
"hex"
"utf8"
"utf-8"
"ascii"
"binary"
"base64"
"ucs2"
"ucs-2"
"utf16le"
"utf-16le"
`undefined`
]
tw.on "finish", ->
t.same tw.buffer, chunks, "got the expected chunks"
return
chunks.forEach (chunk, i) ->
enc = encodings[i % encodings.length]
chunk = new Buffer(chunk)
tw.write chunk.toString(enc), enc
return
t.end()
return
test "write callbacks", (t) ->
callbacks = chunks.map((chunk, i) ->
[
i
(er) ->
callbacks._called[i] = chunk
]
).reduce((set, x) ->
set["callback-" + x[0]] = x[1]
set
, {})
callbacks._called = []
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
process.nextTick ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.same callbacks._called, chunks, "called all callbacks"
t.end()
return
return
chunks.forEach (chunk, i) ->
tw.write chunk, callbacks["callback-" + i]
return
tw.end()
return
test "end callback", (t) ->
tw = new TestWriter()
tw.end ->
t.end()
return
return
test "end callback with chunk", (t) ->
tw = new TestWriter()
tw.end new Buffer("hello world"), ->
t.end()
return
return
test "end callback with chunk and encoding", (t) ->
tw = new TestWriter()
tw.end "hello world", "ascii", ->
t.end()
return
return
test "end callback after .write() call", (t) ->
tw = new TestWriter()
tw.write new Buffer("hello world")
tw.end ->
t.end()
return
return
test "end callback called after write callback", (t) ->
tw = new TestWriter()
writeCalledback = false
tw.write new Buffer("hello world"), ->
writeCalledback = true
return
tw.end ->
t.equal writeCalledback, true
t.end()
return
return
test "encoding should be ignored for buffers", (t) ->
tw = new W()
hex = "018b5e9a8f6236ffe30e31baf80d2cf6eb"
tw._write = (chunk, encoding, cb) ->
t.equal chunk.toString("hex"), hex
t.end()
return
buf = new Buffer(hex, "hex")
tw.write buf, "binary"
return
test "writables are not pipable", (t) ->
w = new W()
w._write = ->
gotError = false
w.on "error", (er) ->
gotError = true
return
w.pipe process.stdout
assert gotError
t.end()
return
test "duplexes are pipable", (t) ->
d = new D()
d._read = ->
d._write = ->
gotError = false
d.on "error", (er) ->
gotError = true
return
d.pipe process.stdout
assert not gotError
t.end()
return
test "end(chunk) two times is an error", (t) ->
w = new W()
w._write = ->
gotError = false
w.on "error", (er) ->
gotError = true
t.equal er.message, "write after end"
return
w.end "this is the end"
w.end "and so is this"
process.nextTick ->
assert gotError
t.end()
return
return
test "dont end while writing", (t) ->
w = new W()
wrote = false
w._write = (chunk, e, cb) ->
assert not @writing
wrote = true
@writing = true
setTimeout ->
@writing = false
cb()
return
return
w.on "finish", ->
assert wrote
t.end()
return
w.write Buffer(0)
w.end()
return
test "finish does not come before write cb", (t) ->
w = new W()
writeCb = false
w._write = (chunk, e, cb) ->
setTimeout (->
writeCb = true
cb()
return
), 10
return
w.on "finish", ->
assert writeCb
t.end()
return
w.write Buffer(0)
w.end()
return
test "finish does not come before sync _write cb", (t) ->
w = new W()
writeCb = false
w._write = (chunk, e, cb) ->
cb()
return
w.on "finish", ->
assert writeCb
t.end()
return
w.write Buffer(0), (er) ->
writeCb = true
return
w.end()
return
test "finish is emitted if last chunk is empty", (t) ->
w = new W()
w._write = (chunk, e, cb) ->
process.nextTick cb
return
w.on "finish", ->
t.end()
return
w.write Buffer(1)
w.end Buffer(0)
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
TestWriter = ->
W.apply this, arguments
@buffer = []
@written = 0
return
# simulate a small unpredictable latency
# tiny node-tap lookalike.
test = (name, fn) ->
count++
tests.push [
name
fn
]
return
run = ->
next = tests.shift()
return console.error("ok") unless next
name = next[0]
fn = next[1]
console.log "# %s", name
fn
same: assert.deepEqual
equal: assert.equal
end: ->
count--
run()
return
return
common = require("../common.js")
W = require("_stream_writable")
D = require("_stream_duplex")
assert = require("assert")
util = require("util")
util.inherits TestWriter, W
TestWriter::_write = (chunk, encoding, cb) ->
setTimeout (->
@buffer.push chunk.toString()
@written += chunk.length
cb()
return
).bind(this), Math.floor(Math.random() * 10)
return
chunks = new Array(50)
i = 0
while i < chunks.length
chunks[i] = new Array(i + 1).join("x")
i++
tests = []
count = 0
# ensure all tests have run
process.on "exit", ->
assert.equal count, 0
return
process.nextTick run
test "write fast", (t) ->
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.end()
return
chunks.forEach (chunk) ->
# screw backpressure. Just buffer it all up.
tw.write chunk
return
tw.end()
return
test "write slow", (t) ->
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.end()
return
i = 0
(W = ->
tw.write chunks[i++]
if i < chunks.length
setTimeout W, 10
else
tw.end()
return
)()
return
test "write backpressure", (t) ->
tw = new TestWriter(highWaterMark: 50)
drains = 0
tw.on "finish", ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.equal drains, 17
t.end()
return
tw.on "drain", ->
drains++
return
i = 0
(W = ->
loop
ret = tw.write(chunks[i++])
break unless ret isnt false and i < chunks.length
if i < chunks.length
assert tw._writableState.length >= 50
tw.once "drain", W
else
tw.end()
return
)()
return
test "write bufferize", (t) ->
tw = new TestWriter(highWaterMark: 100)
encodings = [
"hex"
"utf8"
"utf-8"
"ascii"
"binary"
"base64"
"ucs2"
"ucs-2"
"utf16le"
"utf-16le"
`undefined`
]
tw.on "finish", ->
t.same tw.buffer, chunks, "got the expected chunks"
return
chunks.forEach (chunk, i) ->
enc = encodings[i % encodings.length]
chunk = new Buffer(chunk)
tw.write chunk.toString(enc), enc
return
t.end()
return
test "write no bufferize", (t) ->
tw = new TestWriter(
highWaterMark: 100
decodeStrings: false
)
tw._write = (chunk, encoding, cb) ->
assert typeof chunk is "string"
chunk = new Buffer(chunk, encoding)
TestWriter::_write.call this, chunk, encoding, cb
encodings = [
"hex"
"utf8"
"utf-8"
"ascii"
"binary"
"base64"
"ucs2"
"ucs-2"
"utf16le"
"utf-16le"
`undefined`
]
tw.on "finish", ->
t.same tw.buffer, chunks, "got the expected chunks"
return
chunks.forEach (chunk, i) ->
enc = encodings[i % encodings.length]
chunk = new Buffer(chunk)
tw.write chunk.toString(enc), enc
return
t.end()
return
test "write callbacks", (t) ->
callbacks = chunks.map((chunk, i) ->
[
i
(er) ->
callbacks._called[i] = chunk
]
).reduce((set, x) ->
set["callback-" + x[0]] = x[1]
set
, {})
callbacks._called = []
tw = new TestWriter(highWaterMark: 100)
tw.on "finish", ->
process.nextTick ->
t.same tw.buffer, chunks, "got chunks in the right order"
t.same callbacks._called, chunks, "called all callbacks"
t.end()
return
return
chunks.forEach (chunk, i) ->
tw.write chunk, callbacks["callback-" + i]
return
tw.end()
return
test "end callback", (t) ->
tw = new TestWriter()
tw.end ->
t.end()
return
return
test "end callback with chunk", (t) ->
tw = new TestWriter()
tw.end new Buffer("hello world"), ->
t.end()
return
return
test "end callback with chunk and encoding", (t) ->
tw = new TestWriter()
tw.end "hello world", "ascii", ->
t.end()
return
return
test "end callback after .write() call", (t) ->
tw = new TestWriter()
tw.write new Buffer("hello world")
tw.end ->
t.end()
return
return
test "end callback called after write callback", (t) ->
tw = new TestWriter()
writeCalledback = false
tw.write new Buffer("hello world"), ->
writeCalledback = true
return
tw.end ->
t.equal writeCalledback, true
t.end()
return
return
test "encoding should be ignored for buffers", (t) ->
tw = new W()
hex = "018b5e9a8f6236ffe30e31baf80d2cf6eb"
tw._write = (chunk, encoding, cb) ->
t.equal chunk.toString("hex"), hex
t.end()
return
buf = new Buffer(hex, "hex")
tw.write buf, "binary"
return
test "writables are not pipable", (t) ->
w = new W()
w._write = ->
gotError = false
w.on "error", (er) ->
gotError = true
return
w.pipe process.stdout
assert gotError
t.end()
return
test "duplexes are pipable", (t) ->
d = new D()
d._read = ->
d._write = ->
gotError = false
d.on "error", (er) ->
gotError = true
return
d.pipe process.stdout
assert not gotError
t.end()
return
test "end(chunk) two times is an error", (t) ->
w = new W()
w._write = ->
gotError = false
w.on "error", (er) ->
gotError = true
t.equal er.message, "write after end"
return
w.end "this is the end"
w.end "and so is this"
process.nextTick ->
assert gotError
t.end()
return
return
test "dont end while writing", (t) ->
w = new W()
wrote = false
w._write = (chunk, e, cb) ->
assert not @writing
wrote = true
@writing = true
setTimeout ->
@writing = false
cb()
return
return
w.on "finish", ->
assert wrote
t.end()
return
w.write Buffer(0)
w.end()
return
test "finish does not come before write cb", (t) ->
w = new W()
writeCb = false
w._write = (chunk, e, cb) ->
setTimeout (->
writeCb = true
cb()
return
), 10
return
w.on "finish", ->
assert writeCb
t.end()
return
w.write Buffer(0)
w.end()
return
test "finish does not come before sync _write cb", (t) ->
w = new W()
writeCb = false
w._write = (chunk, e, cb) ->
cb()
return
w.on "finish", ->
assert writeCb
t.end()
return
w.write Buffer(0), (er) ->
writeCb = true
return
w.end()
return
test "finish is emitted if last chunk is empty", (t) ->
w = new W()
w._write = (chunk, e, cb) ->
process.nextTick cb
return
w.on "finish", ->
t.end()
return
w.write Buffer(1)
w.end Buffer(0)
return
|
[
{
"context": "text onto HTML canvas elements\n\nWritten in 2013 by Karl Naylor <kpn103@yahoo.com>\n\nTo the extent possible under ",
"end": 106,
"score": 0.9998902082443237,
"start": 95,
"tag": "NAME",
"value": "Karl Naylor"
},
{
"context": " canvas elements\n\nWritten in 2013 by Kar... | src/content/coffee/handywriteOnCanvas/graphemes/cubic_beziers.coffee | karlorg/phonetify | 1 | ###
handywriteOnCanvas - renders handywrite text onto HTML canvas elements
Written in 2013 by Karl Naylor <kpn103@yahoo.com>
To the extent possible under law, the author(s) have dedicated all
copyright and related and neighboring rights to this software to the
public domain worldwide. This software is distributed without any
warranty.
You should have received a copy of the CC0 Public Domain Dedication
along with this software. If not, see
<http://creativecommons.org/publicdomain/zero/1.0/>.
###
define ['../grapheme', '../boxes', '../geometry'], (Grapheme, boxes, geometry) ->
'use strict'
graphemes = {}
graphemes.classes = {}
TAU = 2 * Math.PI # TAU is one full turn in radians
rWidth = 0.5
lWidth = 1
class CubicBezier extends Grapheme
# a base class for a grapheme rendered as a single, fixed cubic bezier
# curve. Subclasses should override _p[1..3] with their control points
# (_p[0] should always be (0,0)).
_p: [
{ x: 0, y: 0 },
{ x: 0, y: 0 },
{ x: 0, y: 0 },
{ x: 0, y: 0 } ]
getBoundingBox: ->
# for now just use the control points. TODO: compute the bounding
# box correctly
new boxes.BoundingBox(
Math.min((p.x for p in @_p)...),
Math.min((p.y for p in @_p)...),
Math.max((p.x for p in @_p)...),
Math.max((p.y for p in @_p)...))
getFinishPoint: -> { x: @_p[3].x, y: @_p[3].y }
getEntryAngle: -> new geometry.Vector(@_p[0], @_p[1]).angle()
getExitAngle: -> new geometry.Vector(@_p[2], @_p[3]).angle()
render: (ctx) ->
ctx.beginPath()
ctx.moveTo(0,0)
ctx.bezierCurveTo(
@_p[1].x, @_p[1].y,
@_p[2].x, @_p[2].y,
@_p[3].x, @_p[3].y)
ctx.stroke()
return
graphemes.classes.b = class B extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: - 3 * lWidth / 12, y: lWidth / 2 },
{ x: - lWidth / 3, y: 11 * lWidth / 12 },
{ x: - lWidth / 4, y: lWidth } ]
graphemes.classes.c = class C extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: - rWidth / 2, y: 0 }
{ x: - rWidth / 2, y: rWidth / 3 }
{ x: 0, y: rWidth / 3 } ]
graphemes.classes.f = class F extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 12, y: rWidth / 12 },
{ x: lWidth / 10, y: rWidth / 2 },
{ x: - lWidth / 6, y: rWidth } ]
graphemes.classes.g = class G extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 6, y: - lWidth / 6 },
{ x: lWidth / 2, y: - lWidth / 6 },
{ x: lWidth, y: 0 } ]
graphemes.classes.l = class L extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 6, y: lWidth / 6 },
{ x: lWidth / 2, y: lWidth / 6 },
{ x: lWidth, y: 0 } ]
graphemes.classes.k = class K extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 6, y: - lWidth / 6 },
{ x: rWidth / 2, y: - lWidth / 6 },
{ x: rWidth, y: 0 } ]
graphemes.classes.o = class O extends CubicBezier
_p: [
{ x: 0, y: 0}
{ x: rWidth / 2, y: - rWidth / 4 }
{ x: rWidth / 2, y: 0 }
{ x: rWidth / 4, y: rWidth / 3 } ]
graphemes.classes.p = class P extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: - 3 * lWidth / 12, y: rWidth / 2 },
{ x: - lWidth / 3, y: 11 * rWidth / 12 },
{ x: - lWidth / 4, y: rWidth } ]
graphemes.classes.r = class R extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 6, y: lWidth / 6 },
{ x: rWidth / 2, y: lWidth / 6 },
{ x: rWidth, y: 0 } ]
graphemes.classes.u = class U extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: 0, y: rWidth / 2 }
{ x: rWidth / 3, y: rWidth / 2 }
{ x: rWidth / 3, y: 0 } ]
graphemes.classes.v = class V extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 12, y: lWidth / 12 },
{ x: lWidth / 10, y: lWidth / 2 },
{ x: - 3 * lWidth / 8, y: lWidth } ]
graphemes.classes.y = class Y extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: lWidth / 10, y: 0 }
{ x: lWidth / 8, y: 0 }
{ x: lWidth / 6, y: lWidth } ]
return graphemes
| 224559 | ###
handywriteOnCanvas - renders handywrite text onto HTML canvas elements
Written in 2013 by <NAME> <<EMAIL>>
To the extent possible under law, the author(s) have dedicated all
copyright and related and neighboring rights to this software to the
public domain worldwide. This software is distributed without any
warranty.
You should have received a copy of the CC0 Public Domain Dedication
along with this software. If not, see
<http://creativecommons.org/publicdomain/zero/1.0/>.
###
define ['../grapheme', '../boxes', '../geometry'], (Grapheme, boxes, geometry) ->
'use strict'
graphemes = {}
graphemes.classes = {}
TAU = 2 * Math.PI # TAU is one full turn in radians
rWidth = 0.5
lWidth = 1
class CubicBezier extends Grapheme
# a base class for a grapheme rendered as a single, fixed cubic bezier
# curve. Subclasses should override _p[1..3] with their control points
# (_p[0] should always be (0,0)).
_p: [
{ x: 0, y: 0 },
{ x: 0, y: 0 },
{ x: 0, y: 0 },
{ x: 0, y: 0 } ]
getBoundingBox: ->
# for now just use the control points. TODO: compute the bounding
# box correctly
new boxes.BoundingBox(
Math.min((p.x for p in @_p)...),
Math.min((p.y for p in @_p)...),
Math.max((p.x for p in @_p)...),
Math.max((p.y for p in @_p)...))
getFinishPoint: -> { x: @_p[3].x, y: @_p[3].y }
getEntryAngle: -> new geometry.Vector(@_p[0], @_p[1]).angle()
getExitAngle: -> new geometry.Vector(@_p[2], @_p[3]).angle()
render: (ctx) ->
ctx.beginPath()
ctx.moveTo(0,0)
ctx.bezierCurveTo(
@_p[1].x, @_p[1].y,
@_p[2].x, @_p[2].y,
@_p[3].x, @_p[3].y)
ctx.stroke()
return
graphemes.classes.b = class B extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: - 3 * lWidth / 12, y: lWidth / 2 },
{ x: - lWidth / 3, y: 11 * lWidth / 12 },
{ x: - lWidth / 4, y: lWidth } ]
graphemes.classes.c = class C extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: - rWidth / 2, y: 0 }
{ x: - rWidth / 2, y: rWidth / 3 }
{ x: 0, y: rWidth / 3 } ]
graphemes.classes.f = class F extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 12, y: rWidth / 12 },
{ x: lWidth / 10, y: rWidth / 2 },
{ x: - lWidth / 6, y: rWidth } ]
graphemes.classes.g = class G extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 6, y: - lWidth / 6 },
{ x: lWidth / 2, y: - lWidth / 6 },
{ x: lWidth, y: 0 } ]
graphemes.classes.l = class L extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 6, y: lWidth / 6 },
{ x: lWidth / 2, y: lWidth / 6 },
{ x: lWidth, y: 0 } ]
graphemes.classes.k = class K extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 6, y: - lWidth / 6 },
{ x: rWidth / 2, y: - lWidth / 6 },
{ x: rWidth, y: 0 } ]
graphemes.classes.o = class O extends CubicBezier
_p: [
{ x: 0, y: 0}
{ x: rWidth / 2, y: - rWidth / 4 }
{ x: rWidth / 2, y: 0 }
{ x: rWidth / 4, y: rWidth / 3 } ]
graphemes.classes.p = class P extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: - 3 * lWidth / 12, y: rWidth / 2 },
{ x: - lWidth / 3, y: 11 * rWidth / 12 },
{ x: - lWidth / 4, y: rWidth } ]
graphemes.classes.r = class R extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 6, y: lWidth / 6 },
{ x: rWidth / 2, y: lWidth / 6 },
{ x: rWidth, y: 0 } ]
graphemes.classes.u = class U extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: 0, y: rWidth / 2 }
{ x: rWidth / 3, y: rWidth / 2 }
{ x: rWidth / 3, y: 0 } ]
graphemes.classes.v = class V extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 12, y: lWidth / 12 },
{ x: lWidth / 10, y: lWidth / 2 },
{ x: - 3 * lWidth / 8, y: lWidth } ]
graphemes.classes.y = class Y extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: lWidth / 10, y: 0 }
{ x: lWidth / 8, y: 0 }
{ x: lWidth / 6, y: lWidth } ]
return graphemes
| true | ###
handywriteOnCanvas - renders handywrite text onto HTML canvas elements
Written in 2013 by PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
To the extent possible under law, the author(s) have dedicated all
copyright and related and neighboring rights to this software to the
public domain worldwide. This software is distributed without any
warranty.
You should have received a copy of the CC0 Public Domain Dedication
along with this software. If not, see
<http://creativecommons.org/publicdomain/zero/1.0/>.
###
define ['../grapheme', '../boxes', '../geometry'], (Grapheme, boxes, geometry) ->
'use strict'
graphemes = {}
graphemes.classes = {}
TAU = 2 * Math.PI # TAU is one full turn in radians
rWidth = 0.5
lWidth = 1
class CubicBezier extends Grapheme
# a base class for a grapheme rendered as a single, fixed cubic bezier
# curve. Subclasses should override _p[1..3] with their control points
# (_p[0] should always be (0,0)).
_p: [
{ x: 0, y: 0 },
{ x: 0, y: 0 },
{ x: 0, y: 0 },
{ x: 0, y: 0 } ]
getBoundingBox: ->
# for now just use the control points. TODO: compute the bounding
# box correctly
new boxes.BoundingBox(
Math.min((p.x for p in @_p)...),
Math.min((p.y for p in @_p)...),
Math.max((p.x for p in @_p)...),
Math.max((p.y for p in @_p)...))
getFinishPoint: -> { x: @_p[3].x, y: @_p[3].y }
getEntryAngle: -> new geometry.Vector(@_p[0], @_p[1]).angle()
getExitAngle: -> new geometry.Vector(@_p[2], @_p[3]).angle()
render: (ctx) ->
ctx.beginPath()
ctx.moveTo(0,0)
ctx.bezierCurveTo(
@_p[1].x, @_p[1].y,
@_p[2].x, @_p[2].y,
@_p[3].x, @_p[3].y)
ctx.stroke()
return
graphemes.classes.b = class B extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: - 3 * lWidth / 12, y: lWidth / 2 },
{ x: - lWidth / 3, y: 11 * lWidth / 12 },
{ x: - lWidth / 4, y: lWidth } ]
graphemes.classes.c = class C extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: - rWidth / 2, y: 0 }
{ x: - rWidth / 2, y: rWidth / 3 }
{ x: 0, y: rWidth / 3 } ]
graphemes.classes.f = class F extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 12, y: rWidth / 12 },
{ x: lWidth / 10, y: rWidth / 2 },
{ x: - lWidth / 6, y: rWidth } ]
graphemes.classes.g = class G extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 6, y: - lWidth / 6 },
{ x: lWidth / 2, y: - lWidth / 6 },
{ x: lWidth, y: 0 } ]
graphemes.classes.l = class L extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 6, y: lWidth / 6 },
{ x: lWidth / 2, y: lWidth / 6 },
{ x: lWidth, y: 0 } ]
graphemes.classes.k = class K extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 6, y: - lWidth / 6 },
{ x: rWidth / 2, y: - lWidth / 6 },
{ x: rWidth, y: 0 } ]
graphemes.classes.o = class O extends CubicBezier
_p: [
{ x: 0, y: 0}
{ x: rWidth / 2, y: - rWidth / 4 }
{ x: rWidth / 2, y: 0 }
{ x: rWidth / 4, y: rWidth / 3 } ]
graphemes.classes.p = class P extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: - 3 * lWidth / 12, y: rWidth / 2 },
{ x: - lWidth / 3, y: 11 * rWidth / 12 },
{ x: - lWidth / 4, y: rWidth } ]
graphemes.classes.r = class R extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: rWidth / 6, y: lWidth / 6 },
{ x: rWidth / 2, y: lWidth / 6 },
{ x: rWidth, y: 0 } ]
graphemes.classes.u = class U extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: 0, y: rWidth / 2 }
{ x: rWidth / 3, y: rWidth / 2 }
{ x: rWidth / 3, y: 0 } ]
graphemes.classes.v = class V extends CubicBezier
_p: [
{ x: 0, y: 0 },
{ x: lWidth / 12, y: lWidth / 12 },
{ x: lWidth / 10, y: lWidth / 2 },
{ x: - 3 * lWidth / 8, y: lWidth } ]
graphemes.classes.y = class Y extends CubicBezier
_p: [
{ x: 0, y: 0 }
{ x: lWidth / 10, y: 0 }
{ x: lWidth / 8, y: 0 }
{ x: lWidth / 6, y: lWidth } ]
return graphemes
|
[
{
"context": "\\docker-openresty · GitHub](https:\\\\\\\\github.com\\\\openresty\\\\docker-openresty\\\\blob\\\\master\\\\alpine\\\\Dockerfi",
"end": 458,
"score": 0.897545337677002,
"start": 449,
"tag": "USERNAME",
"value": "openresty"
},
{
"context": "\\master\\\\alpine\\\\Dockerfi... | notes/1c20e5ca-7722-4366-9ac6-b423739f24e9.cson | cwocwo/boostnote | 0 | createdAt: "2018-09-01T03:27:00.589Z"
updatedAt: "2018-09-22T03:34:41.655Z"
type: "MARKDOWN_NOTE"
folder: "66dff0cfbfde06f3d2e8"
title: "http2 kecloak proxy"
content: '''
# http2 kecloak proxy
.\\keycloak-proxy-linux-amd64 -config config.yaml --skip-openid-provider-tls-verify=true --secure-cookie=false --verbose=true
[docker-openresty\\Dockerfile.fat at master · openresty\\docker-openresty · GitHub](https:\\\\github.com\\openresty\\docker-openresty\\blob\\master\\alpine\\Dockerfile.fat)
[GitHub - zmartzone\\lua-resty-openidc: Lua implementation to make NGINX operate as an OpenID Connect RP or OAuth 2.0 RS using the Lua extension scripting features (http:\\\\wiki.nginx.org\\HttpLuaModule) which are for instance part of OpenResty (http:\\\\openresty.org\\)](https:\\\\github.com\\zmartzone\\lua-resty-openidc)
## 构建镜像
[auth-proxy\\openidc-auth-proxy at master · jochenchrist\\auth-proxy · GitHub](https:\\\\github.com\\jochenchrist\\auth-proxy\\tree\\master\\openidc-auth-proxy)
```
git clone https:\\\\github.com\\jochenchrist\\auth-proxy.git
cd auth-proxy\\openidc-auth-proxy
docker build -t openidc-auth-proxy:1.0.0 .
docker tag openidc-auth-proxy:1.0.0 docker pull cwocwo\\auth-proxy:1.0.1
```
```
docker pull cwocwo\\auth-proxy:1.0.1
```
nginx.conf
```
events {
worker_connections 128;
}
http {
lua_package_path '~\\lua\\?.lua;;';
# docker embedded DNS server
resolver 10.110.1.25 ipv6=off;
lua_ssl_trusted_certificate \\etc\\ssl\\certs\\ca-certificates.crt;
lua_ssl_verify_depth 5;
# cache for discovery metadata documents
lua_shared_dict discovery 1m;
# cache for JWKs
lua_shared_dict jwks 1m;
server {
listen 4443 ssl http2;
ssl on;
ssl_certificate \\etc\\nginx\\certs\\tiller.cert.pem;
ssl_certificate_key \\etc\\nginx\\certs\\tiller.key.pem;
# Keycloak (Identity Provider)
location \\auth {
proxy_pass https:\\\\app-web.ioc-test.10.110.25.57.xip.io;
proxy_set_header Host $http_host;
proxy_pass_request_headers on;
}
# app1 (Relying Party)
location \\ {
access_by_lua_block {
local opts = {
redirect_uri_path = "\\app1\\redirect_uri",
discovery = "https:\\\\app-web.ioc-test.10.110.25.57.xip.io\\auth\\realms\\app\\.well-known\\openid-configuration",
client_id = "app",
-- client_secret = "b162ec35-3e05-4129-8da1-63d5e721b7d6",
scope = "openid email",
access_token_expires_leeway = 30,
-- This is really, really important
accept_none_alg = false,
accept_unsupported_alg = false,
renew_access_token_on_expiry = true,
session_contents = {access_token=true, id_token=true}
}
-- call authenticate for OpenID Connect user authentication
local res, err = require("resty.openidc").authenticate(opts)
if err then
ngx.status = 500
ngx.say(err)
ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR)
end
-- Set valid access token and email as request header
ngx.req.set_header("Authorization", "Bearer " .. res.access_token)
ngx.req.set_header("X-User", res.id_token.email)
}
proxy_pass http:\\\\10.110.25.114:31313;
}
}
}
```
docker run -v \\opt\\keycloak-proxy\\conf:\\usr\\local\\openresty\\nginx\\conf -v \\opt\\keycloak-proxy\\certs:\\etc\\nginx\\certs -p 3000:4443 -d --name=auth-proxy cwocwo\\auth-proxy:1.0.1
## http2 ssl
server {
listen 443 ssl http2;
server_name www.tinywan.com;
set $root_path \\home\\www;
root $root_path;
ssl on;
ssl_certificate \\etc\\letsencrypt\\live\\www.tinywan.com\\fullchain.pem;
ssl_certificate_key \\etc\\letsencrypt\\live\\www.tinywan.com\\\\privkey.pem;
server_tokens off;
location \\ {
if (!-e $request_filename) {
rewrite ^(.*)$ \\index.php?s=\\$1 last;
break;
}
}
}
[GitHub - grpc-ecosystem\\grpc-gateway: gRPC to JSON proxy generator following the gRPC HTTP spec](https:\\\\github.com\\grpc-ecosystem\\grpc-gateway) --gRPC to JSON proxy generator following the gRPC HTTP spec
'''
tags: []
isStarred: false
isTrashed: false
| 109486 | createdAt: "2018-09-01T03:27:00.589Z"
updatedAt: "2018-09-22T03:34:41.655Z"
type: "MARKDOWN_NOTE"
folder: "66dff0cfbfde06f3d2e8"
title: "http2 kecloak proxy"
content: '''
# http2 kecloak proxy
.\\keycloak-proxy-linux-amd64 -config config.yaml --skip-openid-provider-tls-verify=true --secure-cookie=false --verbose=true
[docker-openresty\\Dockerfile.fat at master · openresty\\docker-openresty · GitHub](https:\\\\github.com\\openresty\\docker-openresty\\blob\\master\\alpine\\Dockerfile.fat)
[GitHub - zmartzone\\lua-resty-openidc: Lua implementation to make NGINX operate as an OpenID Connect RP or OAuth 2.0 RS using the Lua extension scripting features (http:\\\\wiki.nginx.org\\HttpLuaModule) which are for instance part of OpenResty (http:\\\\openresty.org\\)](https:\\\\github.com\\zmartzone\\lua-resty-openidc)
## 构建镜像
[auth-proxy\\openidc-auth-proxy at master · jochenchrist\\auth-proxy · GitHub](https:\\\\github.com\\jochenchrist\\auth-proxy\\tree\\master\\openidc-auth-proxy)
```
git clone https:\\\\github.com\\jochenchrist\\auth-proxy.git
cd auth-proxy\\openidc-auth-proxy
docker build -t openidc-auth-proxy:1.0.0 .
docker tag openidc-auth-proxy:1.0.0 docker pull cwocwo\\auth-proxy:1.0.1
```
```
docker pull cwocwo\\auth-proxy:1.0.1
```
nginx.conf
```
events {
worker_connections 128;
}
http {
lua_package_path '~\\lua\\?.lua;;';
# docker embedded DNS server
resolver 10.110.1.25 ipv6=off;
lua_ssl_trusted_certificate \\etc\\ssl\\certs\\ca-certificates.crt;
lua_ssl_verify_depth 5;
# cache for discovery metadata documents
lua_shared_dict discovery 1m;
# cache for JWKs
lua_shared_dict jwks 1m;
server {
listen 4443 ssl http2;
ssl on;
ssl_certificate \\etc\\nginx\\certs\\tiller.cert.pem;
ssl_certificate_key \\etc\\nginx\\certs\\tiller.key.pem;
# Keycloak (Identity Provider)
location \\auth {
proxy_pass https:\\\\app-web.ioc-test.10.110.25.57.xip.io;
proxy_set_header Host $http_host;
proxy_pass_request_headers on;
}
# app1 (Relying Party)
location \\ {
access_by_lua_block {
local opts = {
redirect_uri_path = "\\app1\\redirect_uri",
discovery = "https:\\\\app-web.ioc-test.10.110.25.57.xip.io\\auth\\realms\\app\\.well-known\\openid-configuration",
client_id = "app",
-- client_secret = "<KEY>",
scope = "openid email",
access_token_expires_leeway = 30,
-- This is really, really important
accept_none_alg = false,
accept_unsupported_alg = false,
renew_access_token_on_expiry = true,
session_contents = {access_token=true, id_token=true}
}
-- call authenticate for OpenID Connect user authentication
local res, err = require("resty.openidc").authenticate(opts)
if err then
ngx.status = 500
ngx.say(err)
ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR)
end
-- Set valid access token and email as request header
ngx.req.set_header("Authorization", "Bearer " .. res.access_token)
ngx.req.set_header("X-User", res.id_token.email)
}
proxy_pass http:\\\\10.110.25.114:31313;
}
}
}
```
docker run -v \\opt\\keycloak-proxy\\conf:\\usr\\local\\openresty\\nginx\\conf -v \\opt\\keycloak-proxy\\certs:\\etc\\nginx\\certs -p 3000:4443 -d --name=auth-proxy cwocwo\\auth-proxy:1.0.1
## http2 ssl
server {
listen 443 ssl http2;
server_name www.tinywan.com;
set $root_path \\home\\www;
root $root_path;
ssl on;
ssl_certificate \\etc\\letsencrypt\\live\\www.tinywan.com\\fullchain.pem;
ssl_certificate_key \\etc\\letsencrypt\\live\\www.tinywan.com\\\\privkey.pem;
server_tokens off;
location \\ {
if (!-e $request_filename) {
rewrite ^(.*)$ \\index.php?s=\\$1 last;
break;
}
}
}
[GitHub - grpc-ecosystem\\grpc-gateway: gRPC to JSON proxy generator following the gRPC HTTP spec](https:\\\\github.com\\grpc-ecosystem\\grpc-gateway) --gRPC to JSON proxy generator following the gRPC HTTP spec
'''
tags: []
isStarred: false
isTrashed: false
| true | createdAt: "2018-09-01T03:27:00.589Z"
updatedAt: "2018-09-22T03:34:41.655Z"
type: "MARKDOWN_NOTE"
folder: "66dff0cfbfde06f3d2e8"
title: "http2 kecloak proxy"
content: '''
# http2 kecloak proxy
.\\keycloak-proxy-linux-amd64 -config config.yaml --skip-openid-provider-tls-verify=true --secure-cookie=false --verbose=true
[docker-openresty\\Dockerfile.fat at master · openresty\\docker-openresty · GitHub](https:\\\\github.com\\openresty\\docker-openresty\\blob\\master\\alpine\\Dockerfile.fat)
[GitHub - zmartzone\\lua-resty-openidc: Lua implementation to make NGINX operate as an OpenID Connect RP or OAuth 2.0 RS using the Lua extension scripting features (http:\\\\wiki.nginx.org\\HttpLuaModule) which are for instance part of OpenResty (http:\\\\openresty.org\\)](https:\\\\github.com\\zmartzone\\lua-resty-openidc)
## 构建镜像
[auth-proxy\\openidc-auth-proxy at master · jochenchrist\\auth-proxy · GitHub](https:\\\\github.com\\jochenchrist\\auth-proxy\\tree\\master\\openidc-auth-proxy)
```
git clone https:\\\\github.com\\jochenchrist\\auth-proxy.git
cd auth-proxy\\openidc-auth-proxy
docker build -t openidc-auth-proxy:1.0.0 .
docker tag openidc-auth-proxy:1.0.0 docker pull cwocwo\\auth-proxy:1.0.1
```
```
docker pull cwocwo\\auth-proxy:1.0.1
```
nginx.conf
```
events {
worker_connections 128;
}
http {
lua_package_path '~\\lua\\?.lua;;';
# docker embedded DNS server
resolver 10.110.1.25 ipv6=off;
lua_ssl_trusted_certificate \\etc\\ssl\\certs\\ca-certificates.crt;
lua_ssl_verify_depth 5;
# cache for discovery metadata documents
lua_shared_dict discovery 1m;
# cache for JWKs
lua_shared_dict jwks 1m;
server {
listen 4443 ssl http2;
ssl on;
ssl_certificate \\etc\\nginx\\certs\\tiller.cert.pem;
ssl_certificate_key \\etc\\nginx\\certs\\tiller.key.pem;
# Keycloak (Identity Provider)
location \\auth {
proxy_pass https:\\\\app-web.ioc-test.10.110.25.57.xip.io;
proxy_set_header Host $http_host;
proxy_pass_request_headers on;
}
# app1 (Relying Party)
location \\ {
access_by_lua_block {
local opts = {
redirect_uri_path = "\\app1\\redirect_uri",
discovery = "https:\\\\app-web.ioc-test.10.110.25.57.xip.io\\auth\\realms\\app\\.well-known\\openid-configuration",
client_id = "app",
-- client_secret = "PI:KEY:<KEY>END_PI",
scope = "openid email",
access_token_expires_leeway = 30,
-- This is really, really important
accept_none_alg = false,
accept_unsupported_alg = false,
renew_access_token_on_expiry = true,
session_contents = {access_token=true, id_token=true}
}
-- call authenticate for OpenID Connect user authentication
local res, err = require("resty.openidc").authenticate(opts)
if err then
ngx.status = 500
ngx.say(err)
ngx.exit(ngx.HTTP_INTERNAL_SERVER_ERROR)
end
-- Set valid access token and email as request header
ngx.req.set_header("Authorization", "Bearer " .. res.access_token)
ngx.req.set_header("X-User", res.id_token.email)
}
proxy_pass http:\\\\10.110.25.114:31313;
}
}
}
```
docker run -v \\opt\\keycloak-proxy\\conf:\\usr\\local\\openresty\\nginx\\conf -v \\opt\\keycloak-proxy\\certs:\\etc\\nginx\\certs -p 3000:4443 -d --name=auth-proxy cwocwo\\auth-proxy:1.0.1
## http2 ssl
server {
listen 443 ssl http2;
server_name www.tinywan.com;
set $root_path \\home\\www;
root $root_path;
ssl on;
ssl_certificate \\etc\\letsencrypt\\live\\www.tinywan.com\\fullchain.pem;
ssl_certificate_key \\etc\\letsencrypt\\live\\www.tinywan.com\\\\privkey.pem;
server_tokens off;
location \\ {
if (!-e $request_filename) {
rewrite ^(.*)$ \\index.php?s=\\$1 last;
break;
}
}
}
[GitHub - grpc-ecosystem\\grpc-gateway: gRPC to JSON proxy generator following the gRPC HTTP spec](https:\\\\github.com\\grpc-ecosystem\\grpc-gateway) --gRPC to JSON proxy generator following the gRPC HTTP spec
'''
tags: []
isStarred: false
isTrashed: false
|
[
{
"context": "js.server.connect({\n user: user,\n password:password,\n host: host,\n ssl: ssl\n})\n\nmessageCon",
"end": 377,
"score": 0.9995935559272766,
"start": 369,
"tag": "PASSWORD",
"value": "password"
}
] | app.coffee | stenver/node-mailing-service | 0 | #!/usr/bin/env coffee
EmailConsumer = require './lib/email_consumer'
emailjs = require("emailjs/email")
kafka = require 'kafka-node'
user = process.env.EMAIL_USER
password = process.env.EMAIL_PASSWORD
host = process.env.HOST
ssl = process.env.SSL || true
console.log(user, password, host, ssl)
emailjsserver = emailjs.server.connect({
user: user,
password:password,
host: host,
ssl: ssl
})
messageConsumer = new kafka.HighLevelConsumer(
new kafka.Client(), [{ topic: 'node-email'}]
)
emailConsumer = new EmailConsumer(emailjsserver, messageConsumer)
emailConsumer.start()
| 186326 | #!/usr/bin/env coffee
EmailConsumer = require './lib/email_consumer'
emailjs = require("emailjs/email")
kafka = require 'kafka-node'
user = process.env.EMAIL_USER
password = process.env.EMAIL_PASSWORD
host = process.env.HOST
ssl = process.env.SSL || true
console.log(user, password, host, ssl)
emailjsserver = emailjs.server.connect({
user: user,
password:<PASSWORD>,
host: host,
ssl: ssl
})
messageConsumer = new kafka.HighLevelConsumer(
new kafka.Client(), [{ topic: 'node-email'}]
)
emailConsumer = new EmailConsumer(emailjsserver, messageConsumer)
emailConsumer.start()
| true | #!/usr/bin/env coffee
EmailConsumer = require './lib/email_consumer'
emailjs = require("emailjs/email")
kafka = require 'kafka-node'
user = process.env.EMAIL_USER
password = process.env.EMAIL_PASSWORD
host = process.env.HOST
ssl = process.env.SSL || true
console.log(user, password, host, ssl)
emailjsserver = emailjs.server.connect({
user: user,
password:PI:PASSWORD:<PASSWORD>END_PI,
host: host,
ssl: ssl
})
messageConsumer = new kafka.HighLevelConsumer(
new kafka.Client(), [{ topic: 'node-email'}]
)
emailConsumer = new EmailConsumer(emailjsserver, messageConsumer)
emailConsumer.start()
|
[
{
"context": " args.join(\" \")\n opt = {}\n opt.uniqueKey = 'textidote'\n opt.stream = 'both'\n console.log \"lintin",
"end": 1924,
"score": 0.9758475422859192,
"start": 1915,
"tag": "KEY",
"value": "textidote"
}
] | lib/linter-textidote.coffee | 73/linter-textidote | 1 | {CompositeDisposable} = require 'atom'
fs = require 'fs'
path = require 'path'
helpers = require 'atom-linter'
XRegExp = require('xregexp').XRegExp
xcache = new Map
module.exports =
config:
executablePath:
type: 'string'
default: "/usr/local/bin/textidote"
description: 'Path to the textidote binary'
textidoteArguments:
type: 'array'
default: ["--check", "en"]
description: 'Arguments to pass to textidote. Seperate by comma.'
activate: (state) ->
require("atom-package-deps").install("linter-textidote")
.then ->
console.log 'linter-textidote loaded'
@subscriptions = new CompositeDisposable
@subscriptions.add atom.config.observe 'linter-textidote.executablePath',
(executablePath) =>
# console.log 'observe ' + executablePath
@executablePath = executablePath
@subscriptions.add atom.config.observe 'linter-textidote.textidoteArguments',
(textidoteArguments) =>
# console.log 'observe ' + textidoteArguments
@textidoteArguments = textidoteArguments
deactivate: ->
@subscriptions.dispose()
provideLinter: ->
provider =
name: 'textidote'
grammarScopes: ['text.tex.latex', 'text.tex.latex.beamer', 'text.tex.latex.memoir', 'text.tex.latex.knitr']
scope: 'file'
lintsOnChange: false
lint: (textEditor) =>
if fs.existsSync(textEditor.getPath())
return @lintFile textEditor.getPath()
.then @parseOutput
console.log 'file "' + textEditor.getPath() + '"" does not exist'
return []
lintFile: (filePath) ->
args = ["--output", "singleline", "--read-all", "--no-color", "--no-config"]
if textidoteArguments
for x in textidoteArguments
args.push x
args.push filePath
# console.log args.join(" ")
opt = {}
opt.uniqueKey = 'textidote'
opt.stream = 'both'
console.log "linting: " + filePath
return helpers.exec(executablePath, args, opt)
parseOutput: (output, filePath) ->
console.log "linting finished: " + filePath + "\n" + output.stderr
rawRegex = '^(?<file>.+)\\(L(?<lineStart>[0-9]+)C(?<colStart>[0-9]+)-L(?<lineEnd>[0-9]+)C(?<colEnd>[0-9]+)\\): (?<message>.+) Suggestions: \\[(?<suggestions>.+)\\].*$'
toReturn = []
if xcache.has(rawRegex)
regex = xcache.get(rawRegex)
else
xcache.set(rawRegex, regex = XRegExp(rawRegex, 'm'))
#for line in output.split(/\r?\n/)
for line in output.stdout.split('\n')
# console.log line
match = XRegExp.exec(line, regex)
if match
# console.log match
lineStart = parseInt(match.lineStart,10) - 1
colStart = parseInt(match.colStart,10) - 1
lineEnd = parseInt(match.lineEnd,10) - 1
colEnd = parseInt(match.colEnd,10) - 1
range = [[lineStart, colStart], [lineEnd, colEnd]]
message = match.message
solutions = []
for suggestion in match.suggestions.split(', ')
solutions.push {position: range, title: 'Change to: ' + suggestion, replaceWith: suggestion}
# console.log solutions
toReturn.push({
severity: "warning",
location: {
file: match.file,
position: range
},
solutions: solutions,
description: message,
excerpt: message
})
# console.log toReturn
return toReturn
| 19281 | {CompositeDisposable} = require 'atom'
fs = require 'fs'
path = require 'path'
helpers = require 'atom-linter'
XRegExp = require('xregexp').XRegExp
xcache = new Map
module.exports =
config:
executablePath:
type: 'string'
default: "/usr/local/bin/textidote"
description: 'Path to the textidote binary'
textidoteArguments:
type: 'array'
default: ["--check", "en"]
description: 'Arguments to pass to textidote. Seperate by comma.'
activate: (state) ->
require("atom-package-deps").install("linter-textidote")
.then ->
console.log 'linter-textidote loaded'
@subscriptions = new CompositeDisposable
@subscriptions.add atom.config.observe 'linter-textidote.executablePath',
(executablePath) =>
# console.log 'observe ' + executablePath
@executablePath = executablePath
@subscriptions.add atom.config.observe 'linter-textidote.textidoteArguments',
(textidoteArguments) =>
# console.log 'observe ' + textidoteArguments
@textidoteArguments = textidoteArguments
deactivate: ->
@subscriptions.dispose()
provideLinter: ->
provider =
name: 'textidote'
grammarScopes: ['text.tex.latex', 'text.tex.latex.beamer', 'text.tex.latex.memoir', 'text.tex.latex.knitr']
scope: 'file'
lintsOnChange: false
lint: (textEditor) =>
if fs.existsSync(textEditor.getPath())
return @lintFile textEditor.getPath()
.then @parseOutput
console.log 'file "' + textEditor.getPath() + '"" does not exist'
return []
lintFile: (filePath) ->
args = ["--output", "singleline", "--read-all", "--no-color", "--no-config"]
if textidoteArguments
for x in textidoteArguments
args.push x
args.push filePath
# console.log args.join(" ")
opt = {}
opt.uniqueKey = '<KEY>'
opt.stream = 'both'
console.log "linting: " + filePath
return helpers.exec(executablePath, args, opt)
parseOutput: (output, filePath) ->
console.log "linting finished: " + filePath + "\n" + output.stderr
rawRegex = '^(?<file>.+)\\(L(?<lineStart>[0-9]+)C(?<colStart>[0-9]+)-L(?<lineEnd>[0-9]+)C(?<colEnd>[0-9]+)\\): (?<message>.+) Suggestions: \\[(?<suggestions>.+)\\].*$'
toReturn = []
if xcache.has(rawRegex)
regex = xcache.get(rawRegex)
else
xcache.set(rawRegex, regex = XRegExp(rawRegex, 'm'))
#for line in output.split(/\r?\n/)
for line in output.stdout.split('\n')
# console.log line
match = XRegExp.exec(line, regex)
if match
# console.log match
lineStart = parseInt(match.lineStart,10) - 1
colStart = parseInt(match.colStart,10) - 1
lineEnd = parseInt(match.lineEnd,10) - 1
colEnd = parseInt(match.colEnd,10) - 1
range = [[lineStart, colStart], [lineEnd, colEnd]]
message = match.message
solutions = []
for suggestion in match.suggestions.split(', ')
solutions.push {position: range, title: 'Change to: ' + suggestion, replaceWith: suggestion}
# console.log solutions
toReturn.push({
severity: "warning",
location: {
file: match.file,
position: range
},
solutions: solutions,
description: message,
excerpt: message
})
# console.log toReturn
return toReturn
| true | {CompositeDisposable} = require 'atom'
fs = require 'fs'
path = require 'path'
helpers = require 'atom-linter'
XRegExp = require('xregexp').XRegExp
xcache = new Map
module.exports =
config:
executablePath:
type: 'string'
default: "/usr/local/bin/textidote"
description: 'Path to the textidote binary'
textidoteArguments:
type: 'array'
default: ["--check", "en"]
description: 'Arguments to pass to textidote. Seperate by comma.'
activate: (state) ->
require("atom-package-deps").install("linter-textidote")
.then ->
console.log 'linter-textidote loaded'
@subscriptions = new CompositeDisposable
@subscriptions.add atom.config.observe 'linter-textidote.executablePath',
(executablePath) =>
# console.log 'observe ' + executablePath
@executablePath = executablePath
@subscriptions.add atom.config.observe 'linter-textidote.textidoteArguments',
(textidoteArguments) =>
# console.log 'observe ' + textidoteArguments
@textidoteArguments = textidoteArguments
deactivate: ->
@subscriptions.dispose()
provideLinter: ->
provider =
name: 'textidote'
grammarScopes: ['text.tex.latex', 'text.tex.latex.beamer', 'text.tex.latex.memoir', 'text.tex.latex.knitr']
scope: 'file'
lintsOnChange: false
lint: (textEditor) =>
if fs.existsSync(textEditor.getPath())
return @lintFile textEditor.getPath()
.then @parseOutput
console.log 'file "' + textEditor.getPath() + '"" does not exist'
return []
lintFile: (filePath) ->
args = ["--output", "singleline", "--read-all", "--no-color", "--no-config"]
if textidoteArguments
for x in textidoteArguments
args.push x
args.push filePath
# console.log args.join(" ")
opt = {}
opt.uniqueKey = 'PI:KEY:<KEY>END_PI'
opt.stream = 'both'
console.log "linting: " + filePath
return helpers.exec(executablePath, args, opt)
parseOutput: (output, filePath) ->
console.log "linting finished: " + filePath + "\n" + output.stderr
rawRegex = '^(?<file>.+)\\(L(?<lineStart>[0-9]+)C(?<colStart>[0-9]+)-L(?<lineEnd>[0-9]+)C(?<colEnd>[0-9]+)\\): (?<message>.+) Suggestions: \\[(?<suggestions>.+)\\].*$'
toReturn = []
if xcache.has(rawRegex)
regex = xcache.get(rawRegex)
else
xcache.set(rawRegex, regex = XRegExp(rawRegex, 'm'))
#for line in output.split(/\r?\n/)
for line in output.stdout.split('\n')
# console.log line
match = XRegExp.exec(line, regex)
if match
# console.log match
lineStart = parseInt(match.lineStart,10) - 1
colStart = parseInt(match.colStart,10) - 1
lineEnd = parseInt(match.lineEnd,10) - 1
colEnd = parseInt(match.colEnd,10) - 1
range = [[lineStart, colStart], [lineEnd, colEnd]]
message = match.message
solutions = []
for suggestion in match.suggestions.split(', ')
solutions.push {position: range, title: 'Change to: ' + suggestion, replaceWith: suggestion}
# console.log solutions
toReturn.push({
severity: "warning",
location: {
file: match.file,
position: range
},
solutions: solutions,
description: message,
excerpt: message
})
# console.log toReturn
return toReturn
|
[
{
"context": "###\n chroma.js\n\n Copyright (c) 2011-2013, Gregor Aisch\n All rights reserved.\n\n Redistribution and ",
"end": 60,
"score": 0.9998817443847656,
"start": 48,
"tag": "NAME",
"value": "Gregor Aisch"
},
{
"context": " OF SUCH DAMAGE.\n\n @source: https://g... | bower_components/chroma-js/src/colorscale.coffee | zzolo/ractive-js-example-presentation | 1 | ###
chroma.js
Copyright (c) 2011-2013, Gregor Aisch
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* The name Gregor Aisch may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL GREGOR AISCH OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@source: https://github.com/gka/chroma.js
###
root = (exports ? this)
chroma = root.chroma ?= {}
Color = chroma.Color
class ColorScale
###
base class for color scales
###
constructor: (opts={}) ->
me = @
me.range opts.colors, opts.positions
me._mode = opts.mode ? 'rgb'
me._nacol = chroma.hex opts.nacol ? chroma.hex '#ccc'
me.domain [0, 1]
me
range: (colors, positions) ->
me = @
if not colors?
colors = ['#ddd', '#222']
if colors? and type(colors) == 'string' and chroma.brewer?[colors]?
colors = chroma.brewer[colors].slice(0)
# convert to chroma classes
for c in [0..colors.length-1]
col = colors[c]
colors[c] = new Color(col) if type(col) == "string"
me._colors = colors
# auto-fill color position
if positions?
me._pos = positions
else
me._pos = []
for c in [0..colors.length-1]
me._pos.push c/(colors.length-1)
me
domain: (domain = []) ->
###
# use this if you want to display a limited number of data classes
# possible methods are "equalinterval", "quantiles", "custom"
###
me = @
me._domain = domain
me._min = domain[0]
me._max = domain[domain.length-1]
if domain.length == 2
me._numClasses = 0
else
me._numClasses = domain.length-1
me
get: (value) ->
me = @
if isNaN(value) then return me._nacol
if me._domain.length > 2
c = me.getClass value
f = c/(me._numClasses-1)
else
f = f0 = (value - me._min) / (me._max - me._min)
f = Math.min(1, Math.max(0, f))
me.fColor f
fColor: (f) ->
me = @
cols = me._colors
for i in [0..me._pos.length-1]
p = me._pos[i]
if f <= p
col = cols[i]
break
if f >= p and i == me._pos.length-1
col = cols[i]
break
if f > p and f < me._pos[i+1]
f = (f-p)/(me._pos[i+1]-p)
col = chroma.interpolate cols[i], cols[i+1], f, me._mode
break
col
classifyValue: (value) ->
me = @
domain = me._domain
val = value
if domain.length > 2
n = domain.length-1
i = me.getClass(value)
val = domain[i] + (domain[i+1] - domain[i]) * 0.5
#console.log '-', val
minc = domain[0] # + (domain[1]-domain[0])*0.3
maxc = domain[n-1] # + (domain[n]-domain[n-1])*0.7
val = me._min + ((val - minc) / (maxc-minc)) * (me._max - me._min)
val
getClass: (value) ->
self = @
domain = self._domain
if domain?
n = domain.length-1
i = 0
while i < n and value >= domain[i]
i++
return i-1
return 0
validValue: (value) ->
not isNaN(value)
chroma.ColorScale = ColorScale
# minimal multi-purpose interface
chroma.scale = (colors, positions) ->
colscale = new chroma.ColorScale()
colscale.range colors, positions
out = false
f = (v) ->
c = colscale.get v
if out and c[out] then c[out]() else c
f.domain = (domain, classes, mode='e', key) ->
if classes?
d = chroma.analyze domain, key
if classes == 0
domain = [d.min, d.max]
else
domain = chroma.limits d, mode, classes
colscale.domain domain
f
f.mode = (_m) ->
colscale._mode = _m
f
f.range = (_colors, _pos) ->
colscale.range _colors, _pos
f
f.out = (_o) ->
out = _o
f
f.getColor = (val) ->
# introduced for backward compatiblity
f val
f
# some pre-defined color scales:
chroma.scales ?= {}
chroma.scales.cool = ->
chroma.scale [chroma.hsl(180,1,.9), chroma.hsl(250,.7,.4)]
chroma.scales.hot = ->
chroma.scale(['#000','#f00','#ff0','#fff'], [0,.25,.75,1]).mode('rgb')
| 104477 | ###
chroma.js
Copyright (c) 2011-2013, <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* The name Gregor Aisch may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL GREGOR AISCH OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@source: https://github.com/gka/chroma.js
###
root = (exports ? this)
chroma = root.chroma ?= {}
Color = chroma.Color
class ColorScale
###
base class for color scales
###
constructor: (opts={}) ->
me = @
me.range opts.colors, opts.positions
me._mode = opts.mode ? 'rgb'
me._nacol = chroma.hex opts.nacol ? chroma.hex '#ccc'
me.domain [0, 1]
me
range: (colors, positions) ->
me = @
if not colors?
colors = ['#ddd', '#222']
if colors? and type(colors) == 'string' and chroma.brewer?[colors]?
colors = chroma.brewer[colors].slice(0)
# convert to chroma classes
for c in [0..colors.length-1]
col = colors[c]
colors[c] = new Color(col) if type(col) == "string"
me._colors = colors
# auto-fill color position
if positions?
me._pos = positions
else
me._pos = []
for c in [0..colors.length-1]
me._pos.push c/(colors.length-1)
me
domain: (domain = []) ->
###
# use this if you want to display a limited number of data classes
# possible methods are "equalinterval", "quantiles", "custom"
###
me = @
me._domain = domain
me._min = domain[0]
me._max = domain[domain.length-1]
if domain.length == 2
me._numClasses = 0
else
me._numClasses = domain.length-1
me
get: (value) ->
me = @
if isNaN(value) then return me._nacol
if me._domain.length > 2
c = me.getClass value
f = c/(me._numClasses-1)
else
f = f0 = (value - me._min) / (me._max - me._min)
f = Math.min(1, Math.max(0, f))
me.fColor f
fColor: (f) ->
me = @
cols = me._colors
for i in [0..me._pos.length-1]
p = me._pos[i]
if f <= p
col = cols[i]
break
if f >= p and i == me._pos.length-1
col = cols[i]
break
if f > p and f < me._pos[i+1]
f = (f-p)/(me._pos[i+1]-p)
col = chroma.interpolate cols[i], cols[i+1], f, me._mode
break
col
classifyValue: (value) ->
me = @
domain = me._domain
val = value
if domain.length > 2
n = domain.length-1
i = me.getClass(value)
val = domain[i] + (domain[i+1] - domain[i]) * 0.5
#console.log '-', val
minc = domain[0] # + (domain[1]-domain[0])*0.3
maxc = domain[n-1] # + (domain[n]-domain[n-1])*0.7
val = me._min + ((val - minc) / (maxc-minc)) * (me._max - me._min)
val
getClass: (value) ->
self = @
domain = self._domain
if domain?
n = domain.length-1
i = 0
while i < n and value >= domain[i]
i++
return i-1
return 0
validValue: (value) ->
not isNaN(value)
chroma.ColorScale = ColorScale
# minimal multi-purpose interface
chroma.scale = (colors, positions) ->
colscale = new chroma.ColorScale()
colscale.range colors, positions
out = false
f = (v) ->
c = colscale.get v
if out and c[out] then c[out]() else c
f.domain = (domain, classes, mode='e', key) ->
if classes?
d = chroma.analyze domain, key
if classes == 0
domain = [d.min, d.max]
else
domain = chroma.limits d, mode, classes
colscale.domain domain
f
f.mode = (_m) ->
colscale._mode = _m
f
f.range = (_colors, _pos) ->
colscale.range _colors, _pos
f
f.out = (_o) ->
out = _o
f
f.getColor = (val) ->
# introduced for backward compatiblity
f val
f
# some pre-defined color scales:
chroma.scales ?= {}
chroma.scales.cool = ->
chroma.scale [chroma.hsl(180,1,.9), chroma.hsl(250,.7,.4)]
chroma.scales.hot = ->
chroma.scale(['#000','#f00','#ff0','#fff'], [0,.25,.75,1]).mode('rgb')
| true | ###
chroma.js
Copyright (c) 2011-2013, PI:NAME:<NAME>END_PI
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* The name Gregor Aisch may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL GREGOR AISCH OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@source: https://github.com/gka/chroma.js
###
root = (exports ? this)
chroma = root.chroma ?= {}
Color = chroma.Color
class ColorScale
###
base class for color scales
###
constructor: (opts={}) ->
me = @
me.range opts.colors, opts.positions
me._mode = opts.mode ? 'rgb'
me._nacol = chroma.hex opts.nacol ? chroma.hex '#ccc'
me.domain [0, 1]
me
range: (colors, positions) ->
me = @
if not colors?
colors = ['#ddd', '#222']
if colors? and type(colors) == 'string' and chroma.brewer?[colors]?
colors = chroma.brewer[colors].slice(0)
# convert to chroma classes
for c in [0..colors.length-1]
col = colors[c]
colors[c] = new Color(col) if type(col) == "string"
me._colors = colors
# auto-fill color position
if positions?
me._pos = positions
else
me._pos = []
for c in [0..colors.length-1]
me._pos.push c/(colors.length-1)
me
domain: (domain = []) ->
###
# use this if you want to display a limited number of data classes
# possible methods are "equalinterval", "quantiles", "custom"
###
me = @
me._domain = domain
me._min = domain[0]
me._max = domain[domain.length-1]
if domain.length == 2
me._numClasses = 0
else
me._numClasses = domain.length-1
me
get: (value) ->
me = @
if isNaN(value) then return me._nacol
if me._domain.length > 2
c = me.getClass value
f = c/(me._numClasses-1)
else
f = f0 = (value - me._min) / (me._max - me._min)
f = Math.min(1, Math.max(0, f))
me.fColor f
fColor: (f) ->
me = @
cols = me._colors
for i in [0..me._pos.length-1]
p = me._pos[i]
if f <= p
col = cols[i]
break
if f >= p and i == me._pos.length-1
col = cols[i]
break
if f > p and f < me._pos[i+1]
f = (f-p)/(me._pos[i+1]-p)
col = chroma.interpolate cols[i], cols[i+1], f, me._mode
break
col
classifyValue: (value) ->
me = @
domain = me._domain
val = value
if domain.length > 2
n = domain.length-1
i = me.getClass(value)
val = domain[i] + (domain[i+1] - domain[i]) * 0.5
#console.log '-', val
minc = domain[0] # + (domain[1]-domain[0])*0.3
maxc = domain[n-1] # + (domain[n]-domain[n-1])*0.7
val = me._min + ((val - minc) / (maxc-minc)) * (me._max - me._min)
val
getClass: (value) ->
self = @
domain = self._domain
if domain?
n = domain.length-1
i = 0
while i < n and value >= domain[i]
i++
return i-1
return 0
validValue: (value) ->
not isNaN(value)
chroma.ColorScale = ColorScale
# minimal multi-purpose interface
chroma.scale = (colors, positions) ->
colscale = new chroma.ColorScale()
colscale.range colors, positions
out = false
f = (v) ->
c = colscale.get v
if out and c[out] then c[out]() else c
f.domain = (domain, classes, mode='e', key) ->
if classes?
d = chroma.analyze domain, key
if classes == 0
domain = [d.min, d.max]
else
domain = chroma.limits d, mode, classes
colscale.domain domain
f
f.mode = (_m) ->
colscale._mode = _m
f
f.range = (_colors, _pos) ->
colscale.range _colors, _pos
f
f.out = (_o) ->
out = _o
f
f.getColor = (val) ->
# introduced for backward compatiblity
f val
f
# some pre-defined color scales:
chroma.scales ?= {}
chroma.scales.cool = ->
chroma.scale [chroma.hsl(180,1,.9), chroma.hsl(250,.7,.4)]
chroma.scales.hot = ->
chroma.scale(['#000','#f00','#ff0','#fff'], [0,.25,.75,1]).mode('rgb')
|
[
{
"context": "p Controller\", ->\n\n\tbeforeEach ->\n\t\t@user = {_id:\"!@312431\"}\n\t\t@subscription = {}\n\t\t@GroupHandler = \n\t\t\taddU",
"end": 307,
"score": 0.8701440095901489,
"start": 300,
"tag": "PASSWORD",
"value": "@312431"
},
{
"context": "nLocator\": @SubscriptionLocat... | test/UnitTests/coffee/Subscription/SubscriptionGroupControllerTests.coffee | mickaobrien/web-sharelatex | 0 | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/Subscription/SubscriptionGroupController"
describe "Subscription Group Controller", ->
beforeEach ->
@user = {_id:"!@312431"}
@subscription = {}
@GroupHandler =
addUserToGroup: sinon.stub().callsArgWith(2, null, @user)
removeUserFromGroup: sinon.stub().callsArgWith(2)
@SubscriptionLocator = getUsersSubscription: sinon.stub().callsArgWith(1, null, @subscription)
@Controller = SandboxedModule.require modulePath, requires:
"./SubscriptionGroupHandler":@GroupHandler
"logger-sharelatex": log:->
"./SubscriptionLocator": @SubscriptionLocator
@adminUserId = "123jlkj"
@req =
session:
user: _id: @adminUserId
describe "addUserToGroup", ->
it "should use the admin id for the logged in user and take the email address from the body", (done)->
newEmail = "31231"
@req.body = email: newEmail
res =
json : (data)=>
@GroupHandler.addUserToGroup.calledWith(@adminUserId, newEmail).should.equal true
data.user.should.deep.equal @user
done()
@Controller.addUserToGroup @req, res
describe "removeUserFromGroup", ->
it "should use the admin id for the logged in user and take the email address from the body", (done)->
userIdToRemove = "31231"
@req.params = user_id: userIdToRemove
res =
send : =>
@GroupHandler.removeUserFromGroup.calledWith(@adminUserId, userIdToRemove).should.equal true
done()
@Controller.removeUserFromGroup @req, res
describe "renderSubscriptionGroupAdminPage", ->
it "should redirect you if you don't have a group account", (done)->
@subscription.group = false
res =
redirect : (path)=>
path.should.equal("/")
done()
@Controller.renderSubscriptionGroupAdminPage @req, res
| 170285 | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/Subscription/SubscriptionGroupController"
describe "Subscription Group Controller", ->
beforeEach ->
@user = {_id:"!<PASSWORD>"}
@subscription = {}
@GroupHandler =
addUserToGroup: sinon.stub().callsArgWith(2, null, @user)
removeUserFromGroup: sinon.stub().callsArgWith(2)
@SubscriptionLocator = getUsersSubscription: sinon.stub().callsArgWith(1, null, @subscription)
@Controller = SandboxedModule.require modulePath, requires:
"./SubscriptionGroupHandler":@GroupHandler
"logger-sharelatex": log:->
"./SubscriptionLocator": @SubscriptionLocator
@adminUserId = "123jlkj"
@req =
session:
user: _id: @adminUserId
describe "addUserToGroup", ->
it "should use the admin id for the logged in user and take the email address from the body", (done)->
newEmail = "31231"
@req.body = email: newEmail
res =
json : (data)=>
@GroupHandler.addUserToGroup.calledWith(@adminUserId, newEmail).should.equal true
data.user.should.deep.equal @user
done()
@Controller.addUserToGroup @req, res
describe "removeUserFromGroup", ->
it "should use the admin id for the logged in user and take the email address from the body", (done)->
userIdToRemove = "31231"
@req.params = user_id: userIdToRemove
res =
send : =>
@GroupHandler.removeUserFromGroup.calledWith(@adminUserId, userIdToRemove).should.equal true
done()
@Controller.removeUserFromGroup @req, res
describe "renderSubscriptionGroupAdminPage", ->
it "should redirect you if you don't have a group account", (done)->
@subscription.group = false
res =
redirect : (path)=>
path.should.equal("/")
done()
@Controller.renderSubscriptionGroupAdminPage @req, res
| true | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/Subscription/SubscriptionGroupController"
describe "Subscription Group Controller", ->
beforeEach ->
@user = {_id:"!PI:PASSWORD:<PASSWORD>END_PI"}
@subscription = {}
@GroupHandler =
addUserToGroup: sinon.stub().callsArgWith(2, null, @user)
removeUserFromGroup: sinon.stub().callsArgWith(2)
@SubscriptionLocator = getUsersSubscription: sinon.stub().callsArgWith(1, null, @subscription)
@Controller = SandboxedModule.require modulePath, requires:
"./SubscriptionGroupHandler":@GroupHandler
"logger-sharelatex": log:->
"./SubscriptionLocator": @SubscriptionLocator
@adminUserId = "123jlkj"
@req =
session:
user: _id: @adminUserId
describe "addUserToGroup", ->
it "should use the admin id for the logged in user and take the email address from the body", (done)->
newEmail = "31231"
@req.body = email: newEmail
res =
json : (data)=>
@GroupHandler.addUserToGroup.calledWith(@adminUserId, newEmail).should.equal true
data.user.should.deep.equal @user
done()
@Controller.addUserToGroup @req, res
describe "removeUserFromGroup", ->
it "should use the admin id for the logged in user and take the email address from the body", (done)->
userIdToRemove = "31231"
@req.params = user_id: userIdToRemove
res =
send : =>
@GroupHandler.removeUserFromGroup.calledWith(@adminUserId, userIdToRemove).should.equal true
done()
@Controller.removeUserFromGroup @req, res
describe "renderSubscriptionGroupAdminPage", ->
it "should redirect you if you don't have a group account", (done)->
@subscription.group = false
res =
redirect : (path)=>
path.should.equal("/")
done()
@Controller.renderSubscriptionGroupAdminPage @req, res
|
[
{
"context": "ion\n# * query job statistics\n#\n# Author:\n# <dan.ryan@XXXXXXXXXX>\n\n_ = require('underscore')\nsys = require 'sys' #",
"end": 923,
"score": 0.9830531477928162,
"start": 904,
"tag": "EMAIL",
"value": "dan.ryan@XXXXXXXXXX"
}
] | hubot/scripts/slack_rundeck.coffee | steeef/hubot-demo | 0 | # Description
# Rundeck integration with hubot
#
# Dependencies:
# "underscore": "^1.6.0"
# "strftime": "^0.8.0"
# "xml2js": "^0.4.1"
# "hubot-auth"
#
# Configuration:
# HUBOT_RUNDECK_URL - root URL for Rundeck, not including api path
# HUBOT_RUNDECK_TOKEN
# HUBOT_RUNDECK_PROJECT
#
# Commands:
# hubot (rd|rundeck) (list|jobs) - List all Rundeck jobs
# hubot (rd|rundeck) show <name> - Show detailed info for the job <name>
# hubot (rd|rundeck) run <name> - Execute a Rundeck job <name>
# hubot (rd|rundeck) (adhoc|ad-hoc|ad hoc) <name> <nodename> - Execute an ad-hoc Rundeck job <name> on node <nodename>
# hubot (rd|rundeck) output <id> - Print the output of execution <id>
#
# Notes:
# REQUIRES Rundeck API version 12
# Todo:
# * make job name lookups case-insensitive
# * ability to show results of a job/execution
# * query job statistics
#
# Author:
# <dan.ryan@XXXXXXXXXX>
_ = require('underscore')
sys = require 'sys' # Used for debugging
querystring = require 'querystring'
url = require 'url'
inspect = require('util').inspect
strftime = require('strftime')
Parser = require('xml2js').Parser
class Rundeck
constructor: (@robot) ->
@logger = @robot.logger
@baseUrl = "#{process.env.HUBOT_RUNDECK_URL}/api/12"
@authToken = process.env.HUBOT_RUNDECK_TOKEN
@project = process.env.HUBOT_RUNDECK_PROJECT
@room = process.env.HUBOT_RUNDECK_ROOM
@adminRole = "rundeck_admin"
@headers =
"Accept": "application/xml"
"Content-Type": "application/xml"
"X-Rundeck-Auth-Token": "#{@authToken}"
@plainTextHeaders =
"Accept": "text/plain"
"Content-Type": "text/plain"
"X-Rundeck-Auth-Token": "#{@authToken}"
@cache = {}
@cache['jobs'] = {}
@brain = @robot.brain.data
robot.brain.on 'loaded', =>
@logger.info("Loading rundeck jobs from brain")
if @brain.rundeck?
@logger.info("Loaded saved rundeck jobs")
@cache = @brain.rundeck
else
@logger.info("No saved rundeck jobs found ")
@brain.rundeck = @cache
cache: -> @cache
parser: -> new Parser()
jobs: -> new Jobs(@)
save: ->
@logger.info("Saving cached rundeck jobs to brain")
@brain.rundeck = @cache
getOutput: (url, cb) ->
@robot.http("#{@baseUrl}/#{url}").headers(@plainTextHeaders).get() (err, res, body) =>
if err?
@logger.err JSON.stringify(err)
else
cb body
get: (url, cb) ->
parser = new Parser()
@robot.http("#{@baseUrl}/#{url}").headers(@headers).get() (err, res, body) =>
if err?
@logger.error JSON.stringify(err)
else
@logger.debug body
parser.parseString body, (e, result) ->
cb result
class Job
constructor: (data) ->
@id = data["$"].id
@name = data.name[0]
@description = data.description[0]
@group = data.group[0]
@project = data.project[0]
format: ->
"Name: #{@name}\nId: #{@id}\nDescription: #{@description}\nGroup: #{@group}\nProject: #{@project}"
formatList: ->
"#{@name} - #{@description}"
class Jobs
constructor: (@rundeck) ->
@logger = @rundeck.logger
list: (cb) ->
jobs = []
@rundeck.get "project/#{@rundeck.project}/jobs", (results) ->
for job in results.jobs.job
jobs.push new Job(job)
cb jobs
find: (name, cb) ->
@list (jobs) =>
job = _.findWhere jobs, { name: name }
if job
cb job
else
cb false
run: (name, query, cb) ->
@find name, (job) =>
if job
uri = "job/#{job.id}/run"
uri += query if query?
@rundeck.get uri, (results) ->
cb job, results
else
cb null, false
module.exports = (robot) ->
logger = robot.logger
rundeck = new Rundeck(robot)
# hubot rundeck list
robot.respond /(?:rd|rundeck) (?:list|jobs)$/i, (msg) ->
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().list (jobs) ->
if jobs.length > 0
for job in jobs
msg.send job.formatList()
else
msg.send "No Rundeck jobs found."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck output <job-id>
# sample url:
robot.respond /(?:rd|rundeck) output (.+)/i, (msg) ->
jobid = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.getOutput "execution/#{jobid}/output", (output) ->
if output
msg.send "```#{output}```"
else
msg.send "Could not find output for Rundeck job \"#{jobid}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck show <name>
robot.respond /(?:rd|rundeck) show ([\w -_]+)/i, (msg) ->
name = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().find name, (job) ->
if job
msg.send job.format()
else
msg.send "Could not find Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck run <name>
robot.respond /(?:rd|rundeck) run ([\w -_]+)/i, (msg) ->
name = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().run name, null, (job, results) ->
if job
robot.logger.debug inspect(results, false, null)
msg.send "Running job #{name}: #{results.executions.execution[0]['$'].href}"
else
msg.send "Could not execute Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# takes all but last word as the name of our job
# hubot rundeck ad-hoc <name> <nodename>
robot.respond /(?:rd|rundeck) (?:ad[ -]?hoc) ([\w -_]+) ([\w-]+)/i, (msg) ->
name = msg.match[1]
params = { argString: "-nodename #{msg.match[2].trim().toLowerCase()}" }
query = "?#{querystring.stringify(params)}"
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().run name, query, (job, results) ->
if job
msg.send "Running job #{name}: #{results.executions.execution[0]['$'].href}"
else
msg.send "Could not execute Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# allows webhook from Rundeck for job notifications
# It would be great to get the information from the body of the request, but
# unfortunately, Rundeck's built-in webhooks only use XML, and Hubot's
# Express router expects JSON. So we'll grab from the URI params.
# expects:
# http://hubot:port/hubot/rundeck-webhook/roomname/?status=<status>&job=<job>&execution_id=<execution_id>
robot.router.post "/hubot/rundeck-webhook/:room", (req, res) ->
query = querystring.parse(url.parse(req.url).query)
status = query.status
job = query.job
execution_id = query.execution_id
robot.messageRoom req.params.room, "#{job} #{execution_id} - #{status}"
res.end "ok"
| 150839 | # Description
# Rundeck integration with hubot
#
# Dependencies:
# "underscore": "^1.6.0"
# "strftime": "^0.8.0"
# "xml2js": "^0.4.1"
# "hubot-auth"
#
# Configuration:
# HUBOT_RUNDECK_URL - root URL for Rundeck, not including api path
# HUBOT_RUNDECK_TOKEN
# HUBOT_RUNDECK_PROJECT
#
# Commands:
# hubot (rd|rundeck) (list|jobs) - List all Rundeck jobs
# hubot (rd|rundeck) show <name> - Show detailed info for the job <name>
# hubot (rd|rundeck) run <name> - Execute a Rundeck job <name>
# hubot (rd|rundeck) (adhoc|ad-hoc|ad hoc) <name> <nodename> - Execute an ad-hoc Rundeck job <name> on node <nodename>
# hubot (rd|rundeck) output <id> - Print the output of execution <id>
#
# Notes:
# REQUIRES Rundeck API version 12
# Todo:
# * make job name lookups case-insensitive
# * ability to show results of a job/execution
# * query job statistics
#
# Author:
# <<EMAIL>>
_ = require('underscore')
sys = require 'sys' # Used for debugging
querystring = require 'querystring'
url = require 'url'
inspect = require('util').inspect
strftime = require('strftime')
Parser = require('xml2js').Parser
class Rundeck
constructor: (@robot) ->
@logger = @robot.logger
@baseUrl = "#{process.env.HUBOT_RUNDECK_URL}/api/12"
@authToken = process.env.HUBOT_RUNDECK_TOKEN
@project = process.env.HUBOT_RUNDECK_PROJECT
@room = process.env.HUBOT_RUNDECK_ROOM
@adminRole = "rundeck_admin"
@headers =
"Accept": "application/xml"
"Content-Type": "application/xml"
"X-Rundeck-Auth-Token": "#{@authToken}"
@plainTextHeaders =
"Accept": "text/plain"
"Content-Type": "text/plain"
"X-Rundeck-Auth-Token": "#{@authToken}"
@cache = {}
@cache['jobs'] = {}
@brain = @robot.brain.data
robot.brain.on 'loaded', =>
@logger.info("Loading rundeck jobs from brain")
if @brain.rundeck?
@logger.info("Loaded saved rundeck jobs")
@cache = @brain.rundeck
else
@logger.info("No saved rundeck jobs found ")
@brain.rundeck = @cache
cache: -> @cache
parser: -> new Parser()
jobs: -> new Jobs(@)
save: ->
@logger.info("Saving cached rundeck jobs to brain")
@brain.rundeck = @cache
getOutput: (url, cb) ->
@robot.http("#{@baseUrl}/#{url}").headers(@plainTextHeaders).get() (err, res, body) =>
if err?
@logger.err JSON.stringify(err)
else
cb body
get: (url, cb) ->
parser = new Parser()
@robot.http("#{@baseUrl}/#{url}").headers(@headers).get() (err, res, body) =>
if err?
@logger.error JSON.stringify(err)
else
@logger.debug body
parser.parseString body, (e, result) ->
cb result
class Job
constructor: (data) ->
@id = data["$"].id
@name = data.name[0]
@description = data.description[0]
@group = data.group[0]
@project = data.project[0]
format: ->
"Name: #{@name}\nId: #{@id}\nDescription: #{@description}\nGroup: #{@group}\nProject: #{@project}"
formatList: ->
"#{@name} - #{@description}"
class Jobs
constructor: (@rundeck) ->
@logger = @rundeck.logger
list: (cb) ->
jobs = []
@rundeck.get "project/#{@rundeck.project}/jobs", (results) ->
for job in results.jobs.job
jobs.push new Job(job)
cb jobs
find: (name, cb) ->
@list (jobs) =>
job = _.findWhere jobs, { name: name }
if job
cb job
else
cb false
run: (name, query, cb) ->
@find name, (job) =>
if job
uri = "job/#{job.id}/run"
uri += query if query?
@rundeck.get uri, (results) ->
cb job, results
else
cb null, false
module.exports = (robot) ->
logger = robot.logger
rundeck = new Rundeck(robot)
# hubot rundeck list
robot.respond /(?:rd|rundeck) (?:list|jobs)$/i, (msg) ->
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().list (jobs) ->
if jobs.length > 0
for job in jobs
msg.send job.formatList()
else
msg.send "No Rundeck jobs found."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck output <job-id>
# sample url:
robot.respond /(?:rd|rundeck) output (.+)/i, (msg) ->
jobid = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.getOutput "execution/#{jobid}/output", (output) ->
if output
msg.send "```#{output}```"
else
msg.send "Could not find output for Rundeck job \"#{jobid}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck show <name>
robot.respond /(?:rd|rundeck) show ([\w -_]+)/i, (msg) ->
name = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().find name, (job) ->
if job
msg.send job.format()
else
msg.send "Could not find Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck run <name>
robot.respond /(?:rd|rundeck) run ([\w -_]+)/i, (msg) ->
name = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().run name, null, (job, results) ->
if job
robot.logger.debug inspect(results, false, null)
msg.send "Running job #{name}: #{results.executions.execution[0]['$'].href}"
else
msg.send "Could not execute Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# takes all but last word as the name of our job
# hubot rundeck ad-hoc <name> <nodename>
robot.respond /(?:rd|rundeck) (?:ad[ -]?hoc) ([\w -_]+) ([\w-]+)/i, (msg) ->
name = msg.match[1]
params = { argString: "-nodename #{msg.match[2].trim().toLowerCase()}" }
query = "?#{querystring.stringify(params)}"
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().run name, query, (job, results) ->
if job
msg.send "Running job #{name}: #{results.executions.execution[0]['$'].href}"
else
msg.send "Could not execute Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# allows webhook from Rundeck for job notifications
# It would be great to get the information from the body of the request, but
# unfortunately, Rundeck's built-in webhooks only use XML, and Hubot's
# Express router expects JSON. So we'll grab from the URI params.
# expects:
# http://hubot:port/hubot/rundeck-webhook/roomname/?status=<status>&job=<job>&execution_id=<execution_id>
robot.router.post "/hubot/rundeck-webhook/:room", (req, res) ->
query = querystring.parse(url.parse(req.url).query)
status = query.status
job = query.job
execution_id = query.execution_id
robot.messageRoom req.params.room, "#{job} #{execution_id} - #{status}"
res.end "ok"
| true | # Description
# Rundeck integration with hubot
#
# Dependencies:
# "underscore": "^1.6.0"
# "strftime": "^0.8.0"
# "xml2js": "^0.4.1"
# "hubot-auth"
#
# Configuration:
# HUBOT_RUNDECK_URL - root URL for Rundeck, not including api path
# HUBOT_RUNDECK_TOKEN
# HUBOT_RUNDECK_PROJECT
#
# Commands:
# hubot (rd|rundeck) (list|jobs) - List all Rundeck jobs
# hubot (rd|rundeck) show <name> - Show detailed info for the job <name>
# hubot (rd|rundeck) run <name> - Execute a Rundeck job <name>
# hubot (rd|rundeck) (adhoc|ad-hoc|ad hoc) <name> <nodename> - Execute an ad-hoc Rundeck job <name> on node <nodename>
# hubot (rd|rundeck) output <id> - Print the output of execution <id>
#
# Notes:
# REQUIRES Rundeck API version 12
# Todo:
# * make job name lookups case-insensitive
# * ability to show results of a job/execution
# * query job statistics
#
# Author:
# <PI:EMAIL:<EMAIL>END_PI>
_ = require('underscore')
sys = require 'sys' # Used for debugging
querystring = require 'querystring'
url = require 'url'
inspect = require('util').inspect
strftime = require('strftime')
Parser = require('xml2js').Parser
class Rundeck
constructor: (@robot) ->
@logger = @robot.logger
@baseUrl = "#{process.env.HUBOT_RUNDECK_URL}/api/12"
@authToken = process.env.HUBOT_RUNDECK_TOKEN
@project = process.env.HUBOT_RUNDECK_PROJECT
@room = process.env.HUBOT_RUNDECK_ROOM
@adminRole = "rundeck_admin"
@headers =
"Accept": "application/xml"
"Content-Type": "application/xml"
"X-Rundeck-Auth-Token": "#{@authToken}"
@plainTextHeaders =
"Accept": "text/plain"
"Content-Type": "text/plain"
"X-Rundeck-Auth-Token": "#{@authToken}"
@cache = {}
@cache['jobs'] = {}
@brain = @robot.brain.data
robot.brain.on 'loaded', =>
@logger.info("Loading rundeck jobs from brain")
if @brain.rundeck?
@logger.info("Loaded saved rundeck jobs")
@cache = @brain.rundeck
else
@logger.info("No saved rundeck jobs found ")
@brain.rundeck = @cache
cache: -> @cache
parser: -> new Parser()
jobs: -> new Jobs(@)
save: ->
@logger.info("Saving cached rundeck jobs to brain")
@brain.rundeck = @cache
getOutput: (url, cb) ->
@robot.http("#{@baseUrl}/#{url}").headers(@plainTextHeaders).get() (err, res, body) =>
if err?
@logger.err JSON.stringify(err)
else
cb body
get: (url, cb) ->
parser = new Parser()
@robot.http("#{@baseUrl}/#{url}").headers(@headers).get() (err, res, body) =>
if err?
@logger.error JSON.stringify(err)
else
@logger.debug body
parser.parseString body, (e, result) ->
cb result
class Job
constructor: (data) ->
@id = data["$"].id
@name = data.name[0]
@description = data.description[0]
@group = data.group[0]
@project = data.project[0]
format: ->
"Name: #{@name}\nId: #{@id}\nDescription: #{@description}\nGroup: #{@group}\nProject: #{@project}"
formatList: ->
"#{@name} - #{@description}"
class Jobs
constructor: (@rundeck) ->
@logger = @rundeck.logger
list: (cb) ->
jobs = []
@rundeck.get "project/#{@rundeck.project}/jobs", (results) ->
for job in results.jobs.job
jobs.push new Job(job)
cb jobs
find: (name, cb) ->
@list (jobs) =>
job = _.findWhere jobs, { name: name }
if job
cb job
else
cb false
run: (name, query, cb) ->
@find name, (job) =>
if job
uri = "job/#{job.id}/run"
uri += query if query?
@rundeck.get uri, (results) ->
cb job, results
else
cb null, false
module.exports = (robot) ->
logger = robot.logger
rundeck = new Rundeck(robot)
# hubot rundeck list
robot.respond /(?:rd|rundeck) (?:list|jobs)$/i, (msg) ->
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().list (jobs) ->
if jobs.length > 0
for job in jobs
msg.send job.formatList()
else
msg.send "No Rundeck jobs found."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck output <job-id>
# sample url:
robot.respond /(?:rd|rundeck) output (.+)/i, (msg) ->
jobid = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.getOutput "execution/#{jobid}/output", (output) ->
if output
msg.send "```#{output}```"
else
msg.send "Could not find output for Rundeck job \"#{jobid}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck show <name>
robot.respond /(?:rd|rundeck) show ([\w -_]+)/i, (msg) ->
name = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().find name, (job) ->
if job
msg.send job.format()
else
msg.send "Could not find Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# hubot rundeck run <name>
robot.respond /(?:rd|rundeck) run ([\w -_]+)/i, (msg) ->
name = msg.match[1]
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().run name, null, (job, results) ->
if job
robot.logger.debug inspect(results, false, null)
msg.send "Running job #{name}: #{results.executions.execution[0]['$'].href}"
else
msg.send "Could not execute Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# takes all but last word as the name of our job
# hubot rundeck ad-hoc <name> <nodename>
robot.respond /(?:rd|rundeck) (?:ad[ -]?hoc) ([\w -_]+) ([\w-]+)/i, (msg) ->
name = msg.match[1]
params = { argString: "-nodename #{msg.match[2].trim().toLowerCase()}" }
query = "?#{querystring.stringify(params)}"
if robot.auth.hasRole(msg.envelope.user, rundeck.adminRole)
rundeck.jobs().run name, query, (job, results) ->
if job
msg.send "Running job #{name}: #{results.executions.execution[0]['$'].href}"
else
msg.send "Could not execute Rundeck job \"#{name}\"."
else
msg.send "#{msg.envelope.user}: you do not have #{rundeck.adminRole} role."
# allows webhook from Rundeck for job notifications
# It would be great to get the information from the body of the request, but
# unfortunately, Rundeck's built-in webhooks only use XML, and Hubot's
# Express router expects JSON. So we'll grab from the URI params.
# expects:
# http://hubot:port/hubot/rundeck-webhook/roomname/?status=<status>&job=<job>&execution_id=<execution_id>
robot.router.post "/hubot/rundeck-webhook/:room", (req, res) ->
query = querystring.parse(url.parse(req.url).query)
status = query.status
job = query.job
execution_id = query.execution_id
robot.messageRoom req.params.room, "#{job} #{execution_id} - #{status}"
res.end "ok"
|
[
{
"context": " Software (http://funcss.org)\n#\n# Copyright © 2015 Bernát Kalló\n#\n# Permission is hereby granted, free of charge,",
"end": 94,
"score": 0.9998233914375305,
"start": 82,
"tag": "NAME",
"value": "Bernát Kalló"
}
] | src/compiler/syntax/tokenizer.coffee | funcss-lang/funcss | 9 | #
# This file is part of FuncSS Software (http://funcss.org)
#
# Copyright © 2015 Bernát Kalló
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# This implementation is based on CSS Syntax Module Level 3 Candidate
# Recommendation http://www.w3.org/TR/css-syntax-3
#
# Copyright © 2014 W3C® (MIT, ERCIM, Keio, Beihang). This software or document
# includes material copied from or derived from CSS Syntax Module Level 3
# Candidate Recommendation http://www.w3.org/TR/css-syntax-3 . Comments that
# start with #> are copied from the referred specification. Consult
# http://www.w3.org/Consortium/Legal/2015/doc-license for details about their
# usage.
SS = require "./ss_nodes"
class Tokenizer
tokenize: (string) ->
@init(string)
tokens = []
while not ((token = @consume_a_token()) instanceof SS.EOFToken)
tokens.push token
return tokens
init: (string) ->
@stream = string.split("")
@current = undefined
consume_next: ->
if @stream.length
@current = @stream.shift()
else
@current = "EOF"
next: ->
if @stream.length
@stream[0]
else
"EOF"
next2: ->
if @stream.length > 1
@stream[1]
else
"EOF"
next3: ->
if @stream.length > 2
@stream[2]
else
"EOF"
reconsume_current: ->
@stream.unshift(@current)
@current = undefined
is_digit: (c) ->
"0" <= c <= "9"
is_hex_digit: (c) ->
@is_digit(c) or "A" <= c <= "F" or "a" <= c <= "f"
is_uppercase_letter: (c) ->
"A" <= c <= "Z"
is_lowercase_letter: (c) ->
"a" <= c <= "z"
is_letter: (c) ->
@is_uppercase_letter(c) or @is_lowercase_letter(c)
is_non_ASCII: (c) ->
c.charCodeAt(0) >= 0x80
is_name_start_code_point: (c) ->
c isnt "EOF" and @is_letter(c) or @is_non_ASCII(c) or c is "_"
is_name_code_point: (c) ->
@is_name_start_code_point(c) or @is_digit(c) or c is "-"
is_non_printable: (c) ->
cc = c.charCodeAt(0)
cc in [0x0..0x8] or cc is 0xB or cc in [0xe..0x1F] or cc is 0x7F
is_newline: (c) ->
c is "\n"
is_whitespace: (c) ->
@is_newline(c) or c is "\t" or c is " "
MIN_SURROGATE_CODE_POINT = 0xD800
MAX_SURROGATE_CODE_POINT = 0xDFFF
is_surrogate_code_point: (c) ->
c.charCodeAt(0) in [MIN_SURROGATE_CODE_POINT..MAX_SURROGATE_CODE_POINT]
MAX_ALLOWED_CODE_POINT = 0x10FFFF
consume_a_token : ->
@consume_next()
switch
when @is_whitespace(@current)
while @is_whitespace(@next())
@consume_next()
new SS.WhitespaceToken()
when @current is "\""
@consume_a_string_token("\"")
when @current is "#"
if @is_name_code_point(@next()) or @next_2_valid_escape()
is_id = @next_3_starts_identifier() # FIXME needed?
new SS.HashToken(@consume_a_name(), if is_id then "id" else undefined)
else
new SS.DelimToken(@current)
when @current is "$"
if @next() is "="
@consume_next()
new SS.SuffixMatchToken()
else
new SS.DelimToken(@current)
when @current is "'"
@consume_a_string_token("'")
when @current is "("
new SS.OpeningParenToken
when @current is ")"
new SS.ClosingParenToken
when @current is "*"
if @next() is "="
@consume_next()
new SS.SubstringMatchToken()
else
new SS.DelimToken(@current)
when @current is "+"
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else
new SS.DelimToken(@current)
when @current is ","
new SS.CommaToken()
when @current is "-"
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else if @next() is "-" and @next2() is ">"
@consume_next()
@consume_next()
new SS.CDCToken()
else if @starts_with_ident()
@reconsume_current()
@consume_an_ident_like_token()
else
new SS.DelimToken(@current)
when @current is "."
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else
new SS.DelimToken(@current)
when @current is "/"
if @next() is "*" # comment
@consume_next()
while @next() isnt "EOF" and not (@next() is "*" and @next2() is "/")
@consume_next()
if (@next() is "*" and @next2() is "/")
@consume_next()
@consume_next()
@consume_a_token()
else
new SS.DelimToken(@current)
when @current is ":"
new SS.ColonToken()
when @current is ";"
new SS.SemicolonToken()
when @current is "<"
if @next() is "!" and @next2() is "-" and @next3() is "-"
@consume_next()
@consume_next()
@consume_next()
new SS.CDOToken()
else
new SS.DelimToken(@current)
when @current is "@"
if @next_3_starts_identifier()
new SS.AtKeywordToken(@consume_a_name())
else
new SS.DelimToken(@current)
when @current is "["
new SS.OpeningSquareToken
when @current is "\\"
if @starts_with_valid_escape()
@reconsume_current()
@consume_an_ident_like_token()
else
new SS.DelimToken(@current)
when @current is "]"
new SS.ClosingSquareToken
when @current is "^"
if @next() is "="
@consume_next()
new SS.PrefixMatchToken()
else
new SS.DelimToken(@current)
when @current is "{"
new SS.OpeningCurlyToken
when @current is "}"
new SS.ClosingCurlyToken
when "0" <= @current <= "9"
@reconsume_current()
@consume_a_numeric_token()
when @is_name_start_code_point(@current)
@reconsume_current()
@consume_an_ident_like_token()
when @current is "|"
if @next() is "="
@consume_next()
new SS.DashMatchToken()
else if @next() is "|"
@consume_next()
new SS.ColumnToken()
else
new SS.DelimToken(@current)
when @current is "~"
if @next() is "="
@consume_next()
new SS.IncludeMatchToken()
else
new SS.DelimToken(@current)
when @current is "EOF"
return new SS.EOFToken()
else
new SS.DelimToken(@current)
consume_a_numeric_token: ->
number = @consume_a_number()
if @next_3_starts_identifier()
new SS.DimensionToken(number.repr, number.value, number.type, @consume_a_name())
else if @next() is "%"
@consume_next()
new SS.PercentageToken(number.repr, number.value)
else
new SS.NumberToken(number.repr, number.value, number.type)
consume_an_ident_like_token: ->
name = @consume_a_name()
lowerCase = name.toLowerCase() # XXX not really what the spec says
if lowerCase is "url" and @next() is "("
@consume_next()
@consume_a_url_token()
else if @next() is "("
@consume_next()
new SS.FunctionToken(name)
else
new SS.IdentToken(name)
consume_a_string_token: (delim) ->
s = []
while true
@consume_next()
switch
when @current is delim or @current is "EOF"
return new SS.StringToken(s.join(""))
when @current is "\n"
@reconsume_current()
return new SS.BadStringToken
when @current is "\\"
if @next() is "EOF"
else if @next() is "\n"
@consume_next()
else #if @starts_with_valid_escape() # it always will be true
s.push @consume_an_escaped_code_point()
else
s.push @current
consume_a_url_token: ->
#> This algorithm assumes that the initial "url(" has already been consumed.
s = []
while @is_whitespace(@next())
@consume_next()
if @next() is "EOF"
return new SS.UrlToken(s.join(''))
if @next() in ["'", '"']
@consume_next()
SS.stringToken = @consume_a_string_token(@current)
if SS.stringToken instanceof SS.BadStringToken
return new SS.BadUrlToken
while @is_whitespace(@next())
@consume_next()
if @next() in [")", "EOF"]
@consume_next()
return new SS.UrlToken(SS.stringToken.value)
else
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
while true
@consume_next()
switch
when @current in [")", "EOF"]
return new SS.UrlToken(s.join(''))
when @is_whitespace(@current)
while @is_whitespace(@next())
@consume_next()
if @next() in [")", "EOF"]
@consume_next()
return new SS.UrlToken(s.join(''))
else
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
when @current in ['"', "'", "("] or @is_non_printable(@current)
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
when @current is "\\"
if @starts_with_valid_escape()
s.push @consume_an_escaped_code_point()
else
@consume_the_remnants_of_a_bad_url()
return new SS.UrlToken(s.join(''))
else
s.push @current
consume_a_unicode_range_token: ->
#> This algorithm assumes that the initial "u+" has been consumed, and the next code point verified to be a hex digit or a "?".
throw "unicode range tokens not implemented yet"
consume_an_escaped_code_point: ->
#> It assumes that the U+005C REVERSE SOLIDUS (\) has already been consumed and that the next input code point has already been verified to not be a newline.
@consume_next()
switch
when @is_hex_digit(@current)
digits = [@current]
count = 1
while @is_hex_digit(@next()) and count < 6
digits.push @consume_next()
++count
if @is_whitespace(@next())
@consume_next()
number = parseInt(digits.join(''),16)
if number is 0 or number in [MIN_SURROGATE_CODE_POINT..MAX_SURROGATE_CODE_POINT] or number > MAX_ALLOWED_CODE_POINT
return "\ufffd"
when @current is "EOF"
return "\ufffd"
else
return @current
starts_with_valid_escape: ->
@is_valid_escape(@current, @next())
next_2_valid_escape: ->
@is_valid_escape(@next(), @next2())
is_valid_escape: (c1, c2) ->
if c1 isnt "\\"
return false
if c2 is "\n"
return false
return true
starts_with_ident: ->
@starts_identifier(@current, @next(), @next2())
next_3_starts_identifier: ->
@starts_identifier(@next(), @next2(), @next3())
starts_identifier: (c1, c2, c3) ->
switch
when c1 is "-"
if @is_name_start_code_point(c2) or c2 is "-" or @is_valid_escape(c2,c3)
return true
else
return false
when @is_name_start_code_point(c1)
return true
when c1 is "\\"
if @is_valid_escape(c1,c2)
return true
else
return false
else
return false
starts_with_number: ->
@starts_number(@current, @next(), @next2())
starts_number: (c1,c2,c3) ->
switch
when c1 in ["+", "-"]
if @is_digit(c2)
return true
if c2 is "." and @is_digit(c3)
return true
return false
when c1 is "."
if @is_digit(c2)
return true
return false
when @is_digit(c1)
return true
else
return false
consume_a_name: ->
s = []
while true
@consume_next()
switch
when @is_name_code_point(@current)
s.push @current
when @starts_with_valid_escape()
s.push @consume_an_escaped_code_point()
else
@reconsume_current()
return s.join('')
consume_a_number: ->
#> This algorithm does not do the verification of the first few code points that are necessary to ensure a number can be obtained from the stream. Ensure that the stream starts with a number before calling this algorithm.
repr = []
type = "integer"
if @next() in ["+", '-']
repr.push @consume_next()
while @is_digit(@next())
repr.push @consume_next()
if @next() is "." and @is_digit(@next2())
repr.push @consume_next()
repr.push @consume_next()
type = "number"
while @is_digit(@next())
repr.push @consume_next()
if @next() in ["e", "E"] and (@is_digit(@next2()) or @next2() in ['-', '+'] and @is_digit(@next3()))
repr.push @consume_next()
if not @is_digit(@next())
repr.push @consume_next()
repr.push @consume_next()
type = "number"
while @is_digit(@next())
repr.push @consume_next()
repr = repr.join('')
value = @string_to_number(repr)
return {repr,value,type}
string_to_number: (s) ->
parseFloat(s)
consume_the_remnants_of_a_bad_url: () ->
while true
@consume_next()
switch
when @current is ")" or @current is "EOF"
return
when @starts_with_valid_escape()
@consume_an_escaped_code_point()
module.exports = new Tokenizer
| 68895 | #
# This file is part of FuncSS Software (http://funcss.org)
#
# Copyright © 2015 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# This implementation is based on CSS Syntax Module Level 3 Candidate
# Recommendation http://www.w3.org/TR/css-syntax-3
#
# Copyright © 2014 W3C® (MIT, ERCIM, Keio, Beihang). This software or document
# includes material copied from or derived from CSS Syntax Module Level 3
# Candidate Recommendation http://www.w3.org/TR/css-syntax-3 . Comments that
# start with #> are copied from the referred specification. Consult
# http://www.w3.org/Consortium/Legal/2015/doc-license for details about their
# usage.
SS = require "./ss_nodes"
class Tokenizer
tokenize: (string) ->
@init(string)
tokens = []
while not ((token = @consume_a_token()) instanceof SS.EOFToken)
tokens.push token
return tokens
init: (string) ->
@stream = string.split("")
@current = undefined
consume_next: ->
if @stream.length
@current = @stream.shift()
else
@current = "EOF"
next: ->
if @stream.length
@stream[0]
else
"EOF"
next2: ->
if @stream.length > 1
@stream[1]
else
"EOF"
next3: ->
if @stream.length > 2
@stream[2]
else
"EOF"
reconsume_current: ->
@stream.unshift(@current)
@current = undefined
is_digit: (c) ->
"0" <= c <= "9"
is_hex_digit: (c) ->
@is_digit(c) or "A" <= c <= "F" or "a" <= c <= "f"
is_uppercase_letter: (c) ->
"A" <= c <= "Z"
is_lowercase_letter: (c) ->
"a" <= c <= "z"
is_letter: (c) ->
@is_uppercase_letter(c) or @is_lowercase_letter(c)
is_non_ASCII: (c) ->
c.charCodeAt(0) >= 0x80
is_name_start_code_point: (c) ->
c isnt "EOF" and @is_letter(c) or @is_non_ASCII(c) or c is "_"
is_name_code_point: (c) ->
@is_name_start_code_point(c) or @is_digit(c) or c is "-"
is_non_printable: (c) ->
cc = c.charCodeAt(0)
cc in [0x0..0x8] or cc is 0xB or cc in [0xe..0x1F] or cc is 0x7F
is_newline: (c) ->
c is "\n"
is_whitespace: (c) ->
@is_newline(c) or c is "\t" or c is " "
MIN_SURROGATE_CODE_POINT = 0xD800
MAX_SURROGATE_CODE_POINT = 0xDFFF
is_surrogate_code_point: (c) ->
c.charCodeAt(0) in [MIN_SURROGATE_CODE_POINT..MAX_SURROGATE_CODE_POINT]
MAX_ALLOWED_CODE_POINT = 0x10FFFF
consume_a_token : ->
@consume_next()
switch
when @is_whitespace(@current)
while @is_whitespace(@next())
@consume_next()
new SS.WhitespaceToken()
when @current is "\""
@consume_a_string_token("\"")
when @current is "#"
if @is_name_code_point(@next()) or @next_2_valid_escape()
is_id = @next_3_starts_identifier() # FIXME needed?
new SS.HashToken(@consume_a_name(), if is_id then "id" else undefined)
else
new SS.DelimToken(@current)
when @current is "$"
if @next() is "="
@consume_next()
new SS.SuffixMatchToken()
else
new SS.DelimToken(@current)
when @current is "'"
@consume_a_string_token("'")
when @current is "("
new SS.OpeningParenToken
when @current is ")"
new SS.ClosingParenToken
when @current is "*"
if @next() is "="
@consume_next()
new SS.SubstringMatchToken()
else
new SS.DelimToken(@current)
when @current is "+"
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else
new SS.DelimToken(@current)
when @current is ","
new SS.CommaToken()
when @current is "-"
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else if @next() is "-" and @next2() is ">"
@consume_next()
@consume_next()
new SS.CDCToken()
else if @starts_with_ident()
@reconsume_current()
@consume_an_ident_like_token()
else
new SS.DelimToken(@current)
when @current is "."
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else
new SS.DelimToken(@current)
when @current is "/"
if @next() is "*" # comment
@consume_next()
while @next() isnt "EOF" and not (@next() is "*" and @next2() is "/")
@consume_next()
if (@next() is "*" and @next2() is "/")
@consume_next()
@consume_next()
@consume_a_token()
else
new SS.DelimToken(@current)
when @current is ":"
new SS.ColonToken()
when @current is ";"
new SS.SemicolonToken()
when @current is "<"
if @next() is "!" and @next2() is "-" and @next3() is "-"
@consume_next()
@consume_next()
@consume_next()
new SS.CDOToken()
else
new SS.DelimToken(@current)
when @current is "@"
if @next_3_starts_identifier()
new SS.AtKeywordToken(@consume_a_name())
else
new SS.DelimToken(@current)
when @current is "["
new SS.OpeningSquareToken
when @current is "\\"
if @starts_with_valid_escape()
@reconsume_current()
@consume_an_ident_like_token()
else
new SS.DelimToken(@current)
when @current is "]"
new SS.ClosingSquareToken
when @current is "^"
if @next() is "="
@consume_next()
new SS.PrefixMatchToken()
else
new SS.DelimToken(@current)
when @current is "{"
new SS.OpeningCurlyToken
when @current is "}"
new SS.ClosingCurlyToken
when "0" <= @current <= "9"
@reconsume_current()
@consume_a_numeric_token()
when @is_name_start_code_point(@current)
@reconsume_current()
@consume_an_ident_like_token()
when @current is "|"
if @next() is "="
@consume_next()
new SS.DashMatchToken()
else if @next() is "|"
@consume_next()
new SS.ColumnToken()
else
new SS.DelimToken(@current)
when @current is "~"
if @next() is "="
@consume_next()
new SS.IncludeMatchToken()
else
new SS.DelimToken(@current)
when @current is "EOF"
return new SS.EOFToken()
else
new SS.DelimToken(@current)
consume_a_numeric_token: ->
number = @consume_a_number()
if @next_3_starts_identifier()
new SS.DimensionToken(number.repr, number.value, number.type, @consume_a_name())
else if @next() is "%"
@consume_next()
new SS.PercentageToken(number.repr, number.value)
else
new SS.NumberToken(number.repr, number.value, number.type)
consume_an_ident_like_token: ->
name = @consume_a_name()
lowerCase = name.toLowerCase() # XXX not really what the spec says
if lowerCase is "url" and @next() is "("
@consume_next()
@consume_a_url_token()
else if @next() is "("
@consume_next()
new SS.FunctionToken(name)
else
new SS.IdentToken(name)
consume_a_string_token: (delim) ->
s = []
while true
@consume_next()
switch
when @current is delim or @current is "EOF"
return new SS.StringToken(s.join(""))
when @current is "\n"
@reconsume_current()
return new SS.BadStringToken
when @current is "\\"
if @next() is "EOF"
else if @next() is "\n"
@consume_next()
else #if @starts_with_valid_escape() # it always will be true
s.push @consume_an_escaped_code_point()
else
s.push @current
consume_a_url_token: ->
#> This algorithm assumes that the initial "url(" has already been consumed.
s = []
while @is_whitespace(@next())
@consume_next()
if @next() is "EOF"
return new SS.UrlToken(s.join(''))
if @next() in ["'", '"']
@consume_next()
SS.stringToken = @consume_a_string_token(@current)
if SS.stringToken instanceof SS.BadStringToken
return new SS.BadUrlToken
while @is_whitespace(@next())
@consume_next()
if @next() in [")", "EOF"]
@consume_next()
return new SS.UrlToken(SS.stringToken.value)
else
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
while true
@consume_next()
switch
when @current in [")", "EOF"]
return new SS.UrlToken(s.join(''))
when @is_whitespace(@current)
while @is_whitespace(@next())
@consume_next()
if @next() in [")", "EOF"]
@consume_next()
return new SS.UrlToken(s.join(''))
else
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
when @current in ['"', "'", "("] or @is_non_printable(@current)
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
when @current is "\\"
if @starts_with_valid_escape()
s.push @consume_an_escaped_code_point()
else
@consume_the_remnants_of_a_bad_url()
return new SS.UrlToken(s.join(''))
else
s.push @current
consume_a_unicode_range_token: ->
#> This algorithm assumes that the initial "u+" has been consumed, and the next code point verified to be a hex digit or a "?".
throw "unicode range tokens not implemented yet"
consume_an_escaped_code_point: ->
#> It assumes that the U+005C REVERSE SOLIDUS (\) has already been consumed and that the next input code point has already been verified to not be a newline.
@consume_next()
switch
when @is_hex_digit(@current)
digits = [@current]
count = 1
while @is_hex_digit(@next()) and count < 6
digits.push @consume_next()
++count
if @is_whitespace(@next())
@consume_next()
number = parseInt(digits.join(''),16)
if number is 0 or number in [MIN_SURROGATE_CODE_POINT..MAX_SURROGATE_CODE_POINT] or number > MAX_ALLOWED_CODE_POINT
return "\ufffd"
when @current is "EOF"
return "\ufffd"
else
return @current
starts_with_valid_escape: ->
@is_valid_escape(@current, @next())
next_2_valid_escape: ->
@is_valid_escape(@next(), @next2())
is_valid_escape: (c1, c2) ->
if c1 isnt "\\"
return false
if c2 is "\n"
return false
return true
starts_with_ident: ->
@starts_identifier(@current, @next(), @next2())
next_3_starts_identifier: ->
@starts_identifier(@next(), @next2(), @next3())
starts_identifier: (c1, c2, c3) ->
switch
when c1 is "-"
if @is_name_start_code_point(c2) or c2 is "-" or @is_valid_escape(c2,c3)
return true
else
return false
when @is_name_start_code_point(c1)
return true
when c1 is "\\"
if @is_valid_escape(c1,c2)
return true
else
return false
else
return false
starts_with_number: ->
@starts_number(@current, @next(), @next2())
starts_number: (c1,c2,c3) ->
switch
when c1 in ["+", "-"]
if @is_digit(c2)
return true
if c2 is "." and @is_digit(c3)
return true
return false
when c1 is "."
if @is_digit(c2)
return true
return false
when @is_digit(c1)
return true
else
return false
consume_a_name: ->
s = []
while true
@consume_next()
switch
when @is_name_code_point(@current)
s.push @current
when @starts_with_valid_escape()
s.push @consume_an_escaped_code_point()
else
@reconsume_current()
return s.join('')
consume_a_number: ->
#> This algorithm does not do the verification of the first few code points that are necessary to ensure a number can be obtained from the stream. Ensure that the stream starts with a number before calling this algorithm.
repr = []
type = "integer"
if @next() in ["+", '-']
repr.push @consume_next()
while @is_digit(@next())
repr.push @consume_next()
if @next() is "." and @is_digit(@next2())
repr.push @consume_next()
repr.push @consume_next()
type = "number"
while @is_digit(@next())
repr.push @consume_next()
if @next() in ["e", "E"] and (@is_digit(@next2()) or @next2() in ['-', '+'] and @is_digit(@next3()))
repr.push @consume_next()
if not @is_digit(@next())
repr.push @consume_next()
repr.push @consume_next()
type = "number"
while @is_digit(@next())
repr.push @consume_next()
repr = repr.join('')
value = @string_to_number(repr)
return {repr,value,type}
string_to_number: (s) ->
parseFloat(s)
consume_the_remnants_of_a_bad_url: () ->
while true
@consume_next()
switch
when @current is ")" or @current is "EOF"
return
when @starts_with_valid_escape()
@consume_an_escaped_code_point()
module.exports = new Tokenizer
| true | #
# This file is part of FuncSS Software (http://funcss.org)
#
# Copyright © 2015 PI:NAME:<NAME>END_PI
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# This implementation is based on CSS Syntax Module Level 3 Candidate
# Recommendation http://www.w3.org/TR/css-syntax-3
#
# Copyright © 2014 W3C® (MIT, ERCIM, Keio, Beihang). This software or document
# includes material copied from or derived from CSS Syntax Module Level 3
# Candidate Recommendation http://www.w3.org/TR/css-syntax-3 . Comments that
# start with #> are copied from the referred specification. Consult
# http://www.w3.org/Consortium/Legal/2015/doc-license for details about their
# usage.
SS = require "./ss_nodes"
class Tokenizer
tokenize: (string) ->
@init(string)
tokens = []
while not ((token = @consume_a_token()) instanceof SS.EOFToken)
tokens.push token
return tokens
init: (string) ->
@stream = string.split("")
@current = undefined
consume_next: ->
if @stream.length
@current = @stream.shift()
else
@current = "EOF"
next: ->
if @stream.length
@stream[0]
else
"EOF"
next2: ->
if @stream.length > 1
@stream[1]
else
"EOF"
next3: ->
if @stream.length > 2
@stream[2]
else
"EOF"
reconsume_current: ->
@stream.unshift(@current)
@current = undefined
is_digit: (c) ->
"0" <= c <= "9"
is_hex_digit: (c) ->
@is_digit(c) or "A" <= c <= "F" or "a" <= c <= "f"
is_uppercase_letter: (c) ->
"A" <= c <= "Z"
is_lowercase_letter: (c) ->
"a" <= c <= "z"
is_letter: (c) ->
@is_uppercase_letter(c) or @is_lowercase_letter(c)
is_non_ASCII: (c) ->
c.charCodeAt(0) >= 0x80
is_name_start_code_point: (c) ->
c isnt "EOF" and @is_letter(c) or @is_non_ASCII(c) or c is "_"
is_name_code_point: (c) ->
@is_name_start_code_point(c) or @is_digit(c) or c is "-"
is_non_printable: (c) ->
cc = c.charCodeAt(0)
cc in [0x0..0x8] or cc is 0xB or cc in [0xe..0x1F] or cc is 0x7F
is_newline: (c) ->
c is "\n"
is_whitespace: (c) ->
@is_newline(c) or c is "\t" or c is " "
MIN_SURROGATE_CODE_POINT = 0xD800
MAX_SURROGATE_CODE_POINT = 0xDFFF
is_surrogate_code_point: (c) ->
c.charCodeAt(0) in [MIN_SURROGATE_CODE_POINT..MAX_SURROGATE_CODE_POINT]
MAX_ALLOWED_CODE_POINT = 0x10FFFF
consume_a_token : ->
@consume_next()
switch
when @is_whitespace(@current)
while @is_whitespace(@next())
@consume_next()
new SS.WhitespaceToken()
when @current is "\""
@consume_a_string_token("\"")
when @current is "#"
if @is_name_code_point(@next()) or @next_2_valid_escape()
is_id = @next_3_starts_identifier() # FIXME needed?
new SS.HashToken(@consume_a_name(), if is_id then "id" else undefined)
else
new SS.DelimToken(@current)
when @current is "$"
if @next() is "="
@consume_next()
new SS.SuffixMatchToken()
else
new SS.DelimToken(@current)
when @current is "'"
@consume_a_string_token("'")
when @current is "("
new SS.OpeningParenToken
when @current is ")"
new SS.ClosingParenToken
when @current is "*"
if @next() is "="
@consume_next()
new SS.SubstringMatchToken()
else
new SS.DelimToken(@current)
when @current is "+"
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else
new SS.DelimToken(@current)
when @current is ","
new SS.CommaToken()
when @current is "-"
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else if @next() is "-" and @next2() is ">"
@consume_next()
@consume_next()
new SS.CDCToken()
else if @starts_with_ident()
@reconsume_current()
@consume_an_ident_like_token()
else
new SS.DelimToken(@current)
when @current is "."
if @starts_with_number()
@reconsume_current()
@consume_a_numeric_token()
else
new SS.DelimToken(@current)
when @current is "/"
if @next() is "*" # comment
@consume_next()
while @next() isnt "EOF" and not (@next() is "*" and @next2() is "/")
@consume_next()
if (@next() is "*" and @next2() is "/")
@consume_next()
@consume_next()
@consume_a_token()
else
new SS.DelimToken(@current)
when @current is ":"
new SS.ColonToken()
when @current is ";"
new SS.SemicolonToken()
when @current is "<"
if @next() is "!" and @next2() is "-" and @next3() is "-"
@consume_next()
@consume_next()
@consume_next()
new SS.CDOToken()
else
new SS.DelimToken(@current)
when @current is "@"
if @next_3_starts_identifier()
new SS.AtKeywordToken(@consume_a_name())
else
new SS.DelimToken(@current)
when @current is "["
new SS.OpeningSquareToken
when @current is "\\"
if @starts_with_valid_escape()
@reconsume_current()
@consume_an_ident_like_token()
else
new SS.DelimToken(@current)
when @current is "]"
new SS.ClosingSquareToken
when @current is "^"
if @next() is "="
@consume_next()
new SS.PrefixMatchToken()
else
new SS.DelimToken(@current)
when @current is "{"
new SS.OpeningCurlyToken
when @current is "}"
new SS.ClosingCurlyToken
when "0" <= @current <= "9"
@reconsume_current()
@consume_a_numeric_token()
when @is_name_start_code_point(@current)
@reconsume_current()
@consume_an_ident_like_token()
when @current is "|"
if @next() is "="
@consume_next()
new SS.DashMatchToken()
else if @next() is "|"
@consume_next()
new SS.ColumnToken()
else
new SS.DelimToken(@current)
when @current is "~"
if @next() is "="
@consume_next()
new SS.IncludeMatchToken()
else
new SS.DelimToken(@current)
when @current is "EOF"
return new SS.EOFToken()
else
new SS.DelimToken(@current)
consume_a_numeric_token: ->
number = @consume_a_number()
if @next_3_starts_identifier()
new SS.DimensionToken(number.repr, number.value, number.type, @consume_a_name())
else if @next() is "%"
@consume_next()
new SS.PercentageToken(number.repr, number.value)
else
new SS.NumberToken(number.repr, number.value, number.type)
consume_an_ident_like_token: ->
name = @consume_a_name()
lowerCase = name.toLowerCase() # XXX not really what the spec says
if lowerCase is "url" and @next() is "("
@consume_next()
@consume_a_url_token()
else if @next() is "("
@consume_next()
new SS.FunctionToken(name)
else
new SS.IdentToken(name)
consume_a_string_token: (delim) ->
s = []
while true
@consume_next()
switch
when @current is delim or @current is "EOF"
return new SS.StringToken(s.join(""))
when @current is "\n"
@reconsume_current()
return new SS.BadStringToken
when @current is "\\"
if @next() is "EOF"
else if @next() is "\n"
@consume_next()
else #if @starts_with_valid_escape() # it always will be true
s.push @consume_an_escaped_code_point()
else
s.push @current
consume_a_url_token: ->
#> This algorithm assumes that the initial "url(" has already been consumed.
s = []
while @is_whitespace(@next())
@consume_next()
if @next() is "EOF"
return new SS.UrlToken(s.join(''))
if @next() in ["'", '"']
@consume_next()
SS.stringToken = @consume_a_string_token(@current)
if SS.stringToken instanceof SS.BadStringToken
return new SS.BadUrlToken
while @is_whitespace(@next())
@consume_next()
if @next() in [")", "EOF"]
@consume_next()
return new SS.UrlToken(SS.stringToken.value)
else
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
while true
@consume_next()
switch
when @current in [")", "EOF"]
return new SS.UrlToken(s.join(''))
when @is_whitespace(@current)
while @is_whitespace(@next())
@consume_next()
if @next() in [")", "EOF"]
@consume_next()
return new SS.UrlToken(s.join(''))
else
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
when @current in ['"', "'", "("] or @is_non_printable(@current)
@consume_the_remnants_of_a_bad_url()
return new SS.BadUrlToken
when @current is "\\"
if @starts_with_valid_escape()
s.push @consume_an_escaped_code_point()
else
@consume_the_remnants_of_a_bad_url()
return new SS.UrlToken(s.join(''))
else
s.push @current
consume_a_unicode_range_token: ->
#> This algorithm assumes that the initial "u+" has been consumed, and the next code point verified to be a hex digit or a "?".
throw "unicode range tokens not implemented yet"
consume_an_escaped_code_point: ->
#> It assumes that the U+005C REVERSE SOLIDUS (\) has already been consumed and that the next input code point has already been verified to not be a newline.
@consume_next()
switch
when @is_hex_digit(@current)
digits = [@current]
count = 1
while @is_hex_digit(@next()) and count < 6
digits.push @consume_next()
++count
if @is_whitespace(@next())
@consume_next()
number = parseInt(digits.join(''),16)
if number is 0 or number in [MIN_SURROGATE_CODE_POINT..MAX_SURROGATE_CODE_POINT] or number > MAX_ALLOWED_CODE_POINT
return "\ufffd"
when @current is "EOF"
return "\ufffd"
else
return @current
starts_with_valid_escape: ->
@is_valid_escape(@current, @next())
next_2_valid_escape: ->
@is_valid_escape(@next(), @next2())
is_valid_escape: (c1, c2) ->
if c1 isnt "\\"
return false
if c2 is "\n"
return false
return true
starts_with_ident: ->
@starts_identifier(@current, @next(), @next2())
next_3_starts_identifier: ->
@starts_identifier(@next(), @next2(), @next3())
starts_identifier: (c1, c2, c3) ->
switch
when c1 is "-"
if @is_name_start_code_point(c2) or c2 is "-" or @is_valid_escape(c2,c3)
return true
else
return false
when @is_name_start_code_point(c1)
return true
when c1 is "\\"
if @is_valid_escape(c1,c2)
return true
else
return false
else
return false
starts_with_number: ->
@starts_number(@current, @next(), @next2())
starts_number: (c1,c2,c3) ->
switch
when c1 in ["+", "-"]
if @is_digit(c2)
return true
if c2 is "." and @is_digit(c3)
return true
return false
when c1 is "."
if @is_digit(c2)
return true
return false
when @is_digit(c1)
return true
else
return false
consume_a_name: ->
s = []
while true
@consume_next()
switch
when @is_name_code_point(@current)
s.push @current
when @starts_with_valid_escape()
s.push @consume_an_escaped_code_point()
else
@reconsume_current()
return s.join('')
consume_a_number: ->
#> This algorithm does not do the verification of the first few code points that are necessary to ensure a number can be obtained from the stream. Ensure that the stream starts with a number before calling this algorithm.
repr = []
type = "integer"
if @next() in ["+", '-']
repr.push @consume_next()
while @is_digit(@next())
repr.push @consume_next()
if @next() is "." and @is_digit(@next2())
repr.push @consume_next()
repr.push @consume_next()
type = "number"
while @is_digit(@next())
repr.push @consume_next()
if @next() in ["e", "E"] and (@is_digit(@next2()) or @next2() in ['-', '+'] and @is_digit(@next3()))
repr.push @consume_next()
if not @is_digit(@next())
repr.push @consume_next()
repr.push @consume_next()
type = "number"
while @is_digit(@next())
repr.push @consume_next()
repr = repr.join('')
value = @string_to_number(repr)
return {repr,value,type}
string_to_number: (s) ->
parseFloat(s)
consume_the_remnants_of_a_bad_url: () ->
while true
@consume_next()
switch
when @current is ")" or @current is "EOF"
return
when @starts_with_valid_escape()
@consume_an_escaped_code_point()
module.exports = new Tokenizer
|
[
{
"context": " all:\n options:\n urls: ['http://127.0.0.1:3000/spec/runner.html']\n\n # Coding standards\n ",
"end": 2423,
"score": 0.7149990797042847,
"start": 2415,
"tag": "IP_ADDRESS",
"value": "27.0.0.1"
}
] | Gruntfile.coffee | noflo/noflo-runtime | 7 | module.exports = ->
# Project configuration
@initConfig
pkg: @file.readJSON 'package.json'
# CoffeeScript compilation
coffee:
spec:
options:
bare: true
transpile:
presets: ['es2015']
expand: true
cwd: 'spec'
src: ['**.coffee']
dest: 'spec'
ext: '.js'
# Browser build of NoFlo
noflo_browser:
options:
baseDir: './'
webpack:
externals:
'repl': 'commonjs repl' # somewhere inside coffee-script
'module': 'commonjs module' # somewhere inside coffee-script
'child_process': 'commonjs child_process' # somewhere inside coffee-script
'jison': 'commonjs jison'
'should': 'commonjs should' # used by tests in octo
'express': 'commonjs express' # used by tests in octo
'highlight': 'commonjs highlight' # used by octo?
'acorn': 'commonjs acorn' # optional?
module:
rules: [
test: /noflo([\\]+|\/)lib([\\]+|\/)(.*)\.js$|noflo([\\]+|\/)components([\\]+|\/)(.*)\.js$|fbp-graph([\\]+|\/)lib([\\]+|\/)(.*)\.js$|noflo-runtime-([a-z]+)([\\]+|\/)(.*).js$/
use: [
loader: 'babel-loader'
options:
presets: ['es2015']
]
,
test: /\.coffee$/
use: [
loader: 'coffee-loader'
options:
transpile:
presets: ['es2015']
]
,
test: /\.fbp$/
use: ["fbp-loader"]
]
resolve:
extensions: [".coffee", ".js"]
node:
fs: "empty"
ignores: [
/bin\/coffee/
]
main:
files:
'browser/noflo-runtime.js': ['entry.webpack.js']
# Automated recompilation and testing when developing
watch:
files: ['spec/*.coffee', 'components/*.coffee']
tasks: ['test']
# BDD tests on Node.js
mochaTest:
nodejs:
src: ['spec/*.coffee']
options:
reporter: 'spec'
# BDD tests on browser
connect:
server:
options:
port: 3000
#keepalive: true
mocha_phantomjs:
options:
reporter: 'spec'
all:
options:
urls: ['http://127.0.0.1:3000/spec/runner.html']
# Coding standards
coffeelint:
components:
files:
src: ['components/*.coffee', 'src/*.coffee', 'src/runtimes/*.coffee']
options:
max_line_length:
value: 80
level: 'ignore'
# Grunt plugins used for building
@loadNpmTasks 'grunt-noflo-browser'
@loadNpmTasks 'grunt-contrib-coffee'
# Grunt plugins used for testing
@loadNpmTasks 'grunt-contrib-watch'
@loadNpmTasks 'grunt-contrib-connect'
@loadNpmTasks 'grunt-mocha-test'
@loadNpmTasks 'grunt-mocha-phantomjs'
@loadNpmTasks 'grunt-coffeelint'
# Our local tasks
@registerTask 'build', 'Build NoFlo for the chosen target platform', (target = 'all') =>
@task.run 'coffee'
if target is 'all' or target is 'browser'
@task.run 'noflo_browser'
@registerTask 'start_servers', 'Start local WebSocket servers', ->
done = @async()
require('coffee-script/register');
utils = require './spec/utils/utils'
utils.createServer 3889, (err) =>
return @fail.fatal err if err
console.log "Echo server running at port 3889"
utils.createNoFloServer 3892, (err) =>
return @fail.fatal err if err
console.log "NoFlo server running at port 3892"
done()
@registerTask 'test', 'Build NoFlo and run automated tests', (target = 'all') =>
@task.run 'coffeelint'
@task.run 'build'
if target is 'all' or target is 'nodejs'
@task.run 'mochaTest'
if target is 'all' or target is 'browser'
@task.run 'connect'
@task.run 'start_servers'
@task.run 'mocha_phantomjs'
@registerTask 'default', ['test']
| 55495 | module.exports = ->
# Project configuration
@initConfig
pkg: @file.readJSON 'package.json'
# CoffeeScript compilation
coffee:
spec:
options:
bare: true
transpile:
presets: ['es2015']
expand: true
cwd: 'spec'
src: ['**.coffee']
dest: 'spec'
ext: '.js'
# Browser build of NoFlo
noflo_browser:
options:
baseDir: './'
webpack:
externals:
'repl': 'commonjs repl' # somewhere inside coffee-script
'module': 'commonjs module' # somewhere inside coffee-script
'child_process': 'commonjs child_process' # somewhere inside coffee-script
'jison': 'commonjs jison'
'should': 'commonjs should' # used by tests in octo
'express': 'commonjs express' # used by tests in octo
'highlight': 'commonjs highlight' # used by octo?
'acorn': 'commonjs acorn' # optional?
module:
rules: [
test: /noflo([\\]+|\/)lib([\\]+|\/)(.*)\.js$|noflo([\\]+|\/)components([\\]+|\/)(.*)\.js$|fbp-graph([\\]+|\/)lib([\\]+|\/)(.*)\.js$|noflo-runtime-([a-z]+)([\\]+|\/)(.*).js$/
use: [
loader: 'babel-loader'
options:
presets: ['es2015']
]
,
test: /\.coffee$/
use: [
loader: 'coffee-loader'
options:
transpile:
presets: ['es2015']
]
,
test: /\.fbp$/
use: ["fbp-loader"]
]
resolve:
extensions: [".coffee", ".js"]
node:
fs: "empty"
ignores: [
/bin\/coffee/
]
main:
files:
'browser/noflo-runtime.js': ['entry.webpack.js']
# Automated recompilation and testing when developing
watch:
files: ['spec/*.coffee', 'components/*.coffee']
tasks: ['test']
# BDD tests on Node.js
mochaTest:
nodejs:
src: ['spec/*.coffee']
options:
reporter: 'spec'
# BDD tests on browser
connect:
server:
options:
port: 3000
#keepalive: true
mocha_phantomjs:
options:
reporter: 'spec'
all:
options:
urls: ['http://1172.16.58.3:3000/spec/runner.html']
# Coding standards
coffeelint:
components:
files:
src: ['components/*.coffee', 'src/*.coffee', 'src/runtimes/*.coffee']
options:
max_line_length:
value: 80
level: 'ignore'
# Grunt plugins used for building
@loadNpmTasks 'grunt-noflo-browser'
@loadNpmTasks 'grunt-contrib-coffee'
# Grunt plugins used for testing
@loadNpmTasks 'grunt-contrib-watch'
@loadNpmTasks 'grunt-contrib-connect'
@loadNpmTasks 'grunt-mocha-test'
@loadNpmTasks 'grunt-mocha-phantomjs'
@loadNpmTasks 'grunt-coffeelint'
# Our local tasks
@registerTask 'build', 'Build NoFlo for the chosen target platform', (target = 'all') =>
@task.run 'coffee'
if target is 'all' or target is 'browser'
@task.run 'noflo_browser'
@registerTask 'start_servers', 'Start local WebSocket servers', ->
done = @async()
require('coffee-script/register');
utils = require './spec/utils/utils'
utils.createServer 3889, (err) =>
return @fail.fatal err if err
console.log "Echo server running at port 3889"
utils.createNoFloServer 3892, (err) =>
return @fail.fatal err if err
console.log "NoFlo server running at port 3892"
done()
@registerTask 'test', 'Build NoFlo and run automated tests', (target = 'all') =>
@task.run 'coffeelint'
@task.run 'build'
if target is 'all' or target is 'nodejs'
@task.run 'mochaTest'
if target is 'all' or target is 'browser'
@task.run 'connect'
@task.run 'start_servers'
@task.run 'mocha_phantomjs'
@registerTask 'default', ['test']
| true | module.exports = ->
# Project configuration
@initConfig
pkg: @file.readJSON 'package.json'
# CoffeeScript compilation
coffee:
spec:
options:
bare: true
transpile:
presets: ['es2015']
expand: true
cwd: 'spec'
src: ['**.coffee']
dest: 'spec'
ext: '.js'
# Browser build of NoFlo
noflo_browser:
options:
baseDir: './'
webpack:
externals:
'repl': 'commonjs repl' # somewhere inside coffee-script
'module': 'commonjs module' # somewhere inside coffee-script
'child_process': 'commonjs child_process' # somewhere inside coffee-script
'jison': 'commonjs jison'
'should': 'commonjs should' # used by tests in octo
'express': 'commonjs express' # used by tests in octo
'highlight': 'commonjs highlight' # used by octo?
'acorn': 'commonjs acorn' # optional?
module:
rules: [
test: /noflo([\\]+|\/)lib([\\]+|\/)(.*)\.js$|noflo([\\]+|\/)components([\\]+|\/)(.*)\.js$|fbp-graph([\\]+|\/)lib([\\]+|\/)(.*)\.js$|noflo-runtime-([a-z]+)([\\]+|\/)(.*).js$/
use: [
loader: 'babel-loader'
options:
presets: ['es2015']
]
,
test: /\.coffee$/
use: [
loader: 'coffee-loader'
options:
transpile:
presets: ['es2015']
]
,
test: /\.fbp$/
use: ["fbp-loader"]
]
resolve:
extensions: [".coffee", ".js"]
node:
fs: "empty"
ignores: [
/bin\/coffee/
]
main:
files:
'browser/noflo-runtime.js': ['entry.webpack.js']
# Automated recompilation and testing when developing
watch:
files: ['spec/*.coffee', 'components/*.coffee']
tasks: ['test']
# BDD tests on Node.js
mochaTest:
nodejs:
src: ['spec/*.coffee']
options:
reporter: 'spec'
# BDD tests on browser
connect:
server:
options:
port: 3000
#keepalive: true
mocha_phantomjs:
options:
reporter: 'spec'
all:
options:
urls: ['http://1PI:IP_ADDRESS:172.16.58.3END_PI:3000/spec/runner.html']
# Coding standards
coffeelint:
components:
files:
src: ['components/*.coffee', 'src/*.coffee', 'src/runtimes/*.coffee']
options:
max_line_length:
value: 80
level: 'ignore'
# Grunt plugins used for building
@loadNpmTasks 'grunt-noflo-browser'
@loadNpmTasks 'grunt-contrib-coffee'
# Grunt plugins used for testing
@loadNpmTasks 'grunt-contrib-watch'
@loadNpmTasks 'grunt-contrib-connect'
@loadNpmTasks 'grunt-mocha-test'
@loadNpmTasks 'grunt-mocha-phantomjs'
@loadNpmTasks 'grunt-coffeelint'
# Our local tasks
@registerTask 'build', 'Build NoFlo for the chosen target platform', (target = 'all') =>
@task.run 'coffee'
if target is 'all' or target is 'browser'
@task.run 'noflo_browser'
@registerTask 'start_servers', 'Start local WebSocket servers', ->
done = @async()
require('coffee-script/register');
utils = require './spec/utils/utils'
utils.createServer 3889, (err) =>
return @fail.fatal err if err
console.log "Echo server running at port 3889"
utils.createNoFloServer 3892, (err) =>
return @fail.fatal err if err
console.log "NoFlo server running at port 3892"
done()
@registerTask 'test', 'Build NoFlo and run automated tests', (target = 'all') =>
@task.run 'coffeelint'
@task.run 'build'
if target is 'all' or target is 'nodejs'
@task.run 'mochaTest'
if target is 'all' or target is 'browser'
@task.run 'connect'
@task.run 'start_servers'
@task.run 'mocha_phantomjs'
@registerTask 'default', ['test']
|
[
{
"context": " <OwnedCard\n key={resource.id}\n resource={resource}\n ",
"end": 2211,
"score": 0.8361924290657043,
"start": 2200,
"tag": "KEY",
"value": "resource.id"
}
] | app/components/owned-card-list.cjsx | camallen/Panoptes-Front-End | 0 | counterpart = require 'counterpart'
React = require 'react'
TitleMixin = require '../lib/title-mixin'
Translate = require 'react-translate-component'
apiClient = require '../api/client'
PromiseRenderer = require '../components/promise-renderer'
OwnedCard = require '../partials/owned-card'
{Link} = require '@edpaget/react-router'
module.exports = React.createClass
displayName: 'OwnedCardList'
propTypes:
imagePromise: React.PropTypes.func.isRequired
listPromise: React.PropTypes.object.isRequired
cardLink: React.PropTypes.func.isRequired
translationObjectName: React.PropTypes.string.isRequired
ownerName: React.PropTypes.string
heroClass: React.PropTypes.string
heroNav: React.PropTypes.node
componentDidMount: ->
document.documentElement.classList.add 'on-secondary-page'
componentWillUnmount: ->
document.documentElement.classList.remove 'on-secondary-page'
userForTitle: ->
if @props.ownerName
"#{@props.ownerName}'s"
else
'All'
render: ->
<div className="secondary-page all-resources-page">
<section className={"hero #{@props.heroClass}"}>
<div className="hero-container">
<Translate component="h1" user={@userForTitle()} content={"#{@props.translationObjectName}.title"} />
{if @props.heroNav?
@props.heroNav}
</div>
</section>
<section className="resources-container">
<PromiseRenderer promise={@props.listPromise}>{(ownedResources) =>
if ownedResources?.length > 0
meta = ownedResources[0].getMeta()
<div>
<div className="resource-results-counter">
{if meta
pageStart = meta.page * meta.page_size - meta.page_size + 1
pageEnd = Math.min(meta.page * meta.page_size, meta.count)
count = meta.count
<Translate pageStart={pageStart} pageEnd={pageEnd} count={count} content="#{@props.translationObjectName}.countMessage" component="p" />}
</div>
<div className="card-list">
{for resource in ownedResources
<OwnedCard
key={resource.id}
resource={resource}
imagePromise={@props.imagePromise(resource)}
linkTo={@props.cardLink(resource)}
translationObjectName={@props.translationObjectName}/>}
</div>
<nav>
{if meta
<nav className="pagination">
{for page in [1..meta.page_count]
<Link to={@props.linkTo} query={{page}} key={page} className="pill-button" style={border: "2px solid" if page is 1 and window.location.hash is "/#{@props.linkTo}"}>{page}</Link>}
</nav>}
</nav>
</div>
else if ownedResources?.length is 0
<Translate content="#{@props.translationObjectName}.notFoundMessage" component="div" />
else
<Translate content="#{@props.translationObjectName}.loadMessage" component="div" />
}</PromiseRenderer>
</section>
</div>
| 29154 | counterpart = require 'counterpart'
React = require 'react'
TitleMixin = require '../lib/title-mixin'
Translate = require 'react-translate-component'
apiClient = require '../api/client'
PromiseRenderer = require '../components/promise-renderer'
OwnedCard = require '../partials/owned-card'
{Link} = require '@edpaget/react-router'
module.exports = React.createClass
displayName: 'OwnedCardList'
propTypes:
imagePromise: React.PropTypes.func.isRequired
listPromise: React.PropTypes.object.isRequired
cardLink: React.PropTypes.func.isRequired
translationObjectName: React.PropTypes.string.isRequired
ownerName: React.PropTypes.string
heroClass: React.PropTypes.string
heroNav: React.PropTypes.node
componentDidMount: ->
document.documentElement.classList.add 'on-secondary-page'
componentWillUnmount: ->
document.documentElement.classList.remove 'on-secondary-page'
userForTitle: ->
if @props.ownerName
"#{@props.ownerName}'s"
else
'All'
render: ->
<div className="secondary-page all-resources-page">
<section className={"hero #{@props.heroClass}"}>
<div className="hero-container">
<Translate component="h1" user={@userForTitle()} content={"#{@props.translationObjectName}.title"} />
{if @props.heroNav?
@props.heroNav}
</div>
</section>
<section className="resources-container">
<PromiseRenderer promise={@props.listPromise}>{(ownedResources) =>
if ownedResources?.length > 0
meta = ownedResources[0].getMeta()
<div>
<div className="resource-results-counter">
{if meta
pageStart = meta.page * meta.page_size - meta.page_size + 1
pageEnd = Math.min(meta.page * meta.page_size, meta.count)
count = meta.count
<Translate pageStart={pageStart} pageEnd={pageEnd} count={count} content="#{@props.translationObjectName}.countMessage" component="p" />}
</div>
<div className="card-list">
{for resource in ownedResources
<OwnedCard
key={<KEY>}
resource={resource}
imagePromise={@props.imagePromise(resource)}
linkTo={@props.cardLink(resource)}
translationObjectName={@props.translationObjectName}/>}
</div>
<nav>
{if meta
<nav className="pagination">
{for page in [1..meta.page_count]
<Link to={@props.linkTo} query={{page}} key={page} className="pill-button" style={border: "2px solid" if page is 1 and window.location.hash is "/#{@props.linkTo}"}>{page}</Link>}
</nav>}
</nav>
</div>
else if ownedResources?.length is 0
<Translate content="#{@props.translationObjectName}.notFoundMessage" component="div" />
else
<Translate content="#{@props.translationObjectName}.loadMessage" component="div" />
}</PromiseRenderer>
</section>
</div>
| true | counterpart = require 'counterpart'
React = require 'react'
TitleMixin = require '../lib/title-mixin'
Translate = require 'react-translate-component'
apiClient = require '../api/client'
PromiseRenderer = require '../components/promise-renderer'
OwnedCard = require '../partials/owned-card'
{Link} = require '@edpaget/react-router'
module.exports = React.createClass
displayName: 'OwnedCardList'
propTypes:
imagePromise: React.PropTypes.func.isRequired
listPromise: React.PropTypes.object.isRequired
cardLink: React.PropTypes.func.isRequired
translationObjectName: React.PropTypes.string.isRequired
ownerName: React.PropTypes.string
heroClass: React.PropTypes.string
heroNav: React.PropTypes.node
componentDidMount: ->
document.documentElement.classList.add 'on-secondary-page'
componentWillUnmount: ->
document.documentElement.classList.remove 'on-secondary-page'
userForTitle: ->
if @props.ownerName
"#{@props.ownerName}'s"
else
'All'
render: ->
<div className="secondary-page all-resources-page">
<section className={"hero #{@props.heroClass}"}>
<div className="hero-container">
<Translate component="h1" user={@userForTitle()} content={"#{@props.translationObjectName}.title"} />
{if @props.heroNav?
@props.heroNav}
</div>
</section>
<section className="resources-container">
<PromiseRenderer promise={@props.listPromise}>{(ownedResources) =>
if ownedResources?.length > 0
meta = ownedResources[0].getMeta()
<div>
<div className="resource-results-counter">
{if meta
pageStart = meta.page * meta.page_size - meta.page_size + 1
pageEnd = Math.min(meta.page * meta.page_size, meta.count)
count = meta.count
<Translate pageStart={pageStart} pageEnd={pageEnd} count={count} content="#{@props.translationObjectName}.countMessage" component="p" />}
</div>
<div className="card-list">
{for resource in ownedResources
<OwnedCard
key={PI:KEY:<KEY>END_PI}
resource={resource}
imagePromise={@props.imagePromise(resource)}
linkTo={@props.cardLink(resource)}
translationObjectName={@props.translationObjectName}/>}
</div>
<nav>
{if meta
<nav className="pagination">
{for page in [1..meta.page_count]
<Link to={@props.linkTo} query={{page}} key={page} className="pill-button" style={border: "2px solid" if page is 1 and window.location.hash is "/#{@props.linkTo}"}>{page}</Link>}
</nav>}
</nav>
</div>
else if ownedResources?.length is 0
<Translate content="#{@props.translationObjectName}.notFoundMessage" component="div" />
else
<Translate content="#{@props.translationObjectName}.loadMessage" component="div" />
}</PromiseRenderer>
</section>
</div>
|
[
{
"context": " neo\n .createNode({ name: 'Kieve' })\n .then((node) ->\n ",
"end": 444,
"score": 0.9994712471961975,
"start": 439,
"tag": "NAME",
"value": "Kieve"
},
{
"context": " node.should.have.property('name').equal 'Kieve... | test/test.node.coffee | kievechua/js-neo4j | 2 | Q = require 'q'
chai = require 'chai'
chaiAsPromised = require 'chai-as-promised'
chai.should()
chai.use(chaiAsPromised)
require("mocha-as-promised")()
{Neo4js} = require '../src/main'
describe 'Node', ->
neo = new Neo4js()
testNode = null
describe 'neo.createNode({properties})', ->
describe 'when valid', ->
it 'should create a new node', ->
neo
.createNode({ name: 'Kieve' })
.then((node) ->
node.should.have.property('name').equal 'Kieve'
testNode = node
)
describe 'neo.readNode(nodeId)', ->
describe 'when valid', ->
it 'should return node details', ->
neo
.readNode(testNode._id)
.then((result) ->
result.name.should.equal 'Kieve'
)
describe 'neo.updateNodeProperty(nodeId, property, value)', ->
describe 'when valid', ->
it 'should update node property', ->
neo
.updateNodeProperty(testNode._id, 'gender', 'male')
.should.eventually.be.true
describe 'neo.updateNodeProperty(nodeId, {properties})', ->
describe 'when valid', ->
it 'should update node properties', ->
neo
.updateNodeProperty(testNode._id, { 'name': 'Kieve Chua', 'age': 17 })
.should.eventually.be.true
describe 'neo.readNodeProperty(nodeId)', ->
describe 'when valid', ->
it 'should return properties of a node', ->
neo
.readNodeProperty(testNode._id)
.then (result) ->
result.name.should.equal 'Kieve Chua'
result.age.should.equal 17
describe 'neo.deleteNodeProperty(nodeId, property)', ->
describe 'when valid', ->
it 'should delete node property', ->
neo.deleteNodeProperty(testNode._id, 'name')
.should.eventually.be.true
describe 'neo.deleteNodeProperty(nodeId)', ->
describe 'when valid', ->
it 'should delete all property of a node', ->
neo
.deleteNodeProperty(testNode._id)
.should.eventually.be.true
describe 'neo.deleteNode(nodeId)', ->
describe 'when valid', ->
it 'should delete a node', ->
neo
.deleteNode(testNode._id)
.should.eventually.be.true
# describe 'createUniqueNode', ->
# it 'should pass', (done) ->
# neo.createUniqueNode('people', 'name', 'Kieve', { age: 18 }).then((node) ->
# node.age.should.equal 18
# done()
# )
# it 'should pass or fail', (done) ->
# neo.createUniqueNode('people', 'name', 'Kieve', { age: 19 }, 'create_or_fail')
# .then((node) ->
# node.age.should.equal 19
# )
# .fail((result) ->
# result.status.should.equal 409
# )
# .done(->
# done()
# )
| 103180 | Q = require 'q'
chai = require 'chai'
chaiAsPromised = require 'chai-as-promised'
chai.should()
chai.use(chaiAsPromised)
require("mocha-as-promised")()
{Neo4js} = require '../src/main'
describe 'Node', ->
neo = new Neo4js()
testNode = null
describe 'neo.createNode({properties})', ->
describe 'when valid', ->
it 'should create a new node', ->
neo
.createNode({ name: '<NAME>' })
.then((node) ->
node.should.have.property('name').equal '<NAME>'
testNode = node
)
describe 'neo.readNode(nodeId)', ->
describe 'when valid', ->
it 'should return node details', ->
neo
.readNode(testNode._id)
.then((result) ->
result.name.should.equal '<NAME>'
)
describe 'neo.updateNodeProperty(nodeId, property, value)', ->
describe 'when valid', ->
it 'should update node property', ->
neo
.updateNodeProperty(testNode._id, 'gender', 'male')
.should.eventually.be.true
describe 'neo.updateNodeProperty(nodeId, {properties})', ->
describe 'when valid', ->
it 'should update node properties', ->
neo
.updateNodeProperty(testNode._id, { 'name': '<NAME>', 'age': 17 })
.should.eventually.be.true
describe 'neo.readNodeProperty(nodeId)', ->
describe 'when valid', ->
it 'should return properties of a node', ->
neo
.readNodeProperty(testNode._id)
.then (result) ->
result.name.should.equal '<NAME>'
result.age.should.equal 17
describe 'neo.deleteNodeProperty(nodeId, property)', ->
describe 'when valid', ->
it 'should delete node property', ->
neo.deleteNodeProperty(testNode._id, 'name')
.should.eventually.be.true
describe 'neo.deleteNodeProperty(nodeId)', ->
describe 'when valid', ->
it 'should delete all property of a node', ->
neo
.deleteNodeProperty(testNode._id)
.should.eventually.be.true
describe 'neo.deleteNode(nodeId)', ->
describe 'when valid', ->
it 'should delete a node', ->
neo
.deleteNode(testNode._id)
.should.eventually.be.true
# describe 'createUniqueNode', ->
# it 'should pass', (done) ->
# neo.createUniqueNode('people', 'name', '<NAME>', { age: 18 }).then((node) ->
# node.age.should.equal 18
# done()
# )
# it 'should pass or fail', (done) ->
# neo.createUniqueNode('people', 'name', '<NAME>', { age: 19 }, 'create_or_fail')
# .then((node) ->
# node.age.should.equal 19
# )
# .fail((result) ->
# result.status.should.equal 409
# )
# .done(->
# done()
# )
| true | Q = require 'q'
chai = require 'chai'
chaiAsPromised = require 'chai-as-promised'
chai.should()
chai.use(chaiAsPromised)
require("mocha-as-promised")()
{Neo4js} = require '../src/main'
describe 'Node', ->
neo = new Neo4js()
testNode = null
describe 'neo.createNode({properties})', ->
describe 'when valid', ->
it 'should create a new node', ->
neo
.createNode({ name: 'PI:NAME:<NAME>END_PI' })
.then((node) ->
node.should.have.property('name').equal 'PI:NAME:<NAME>END_PI'
testNode = node
)
describe 'neo.readNode(nodeId)', ->
describe 'when valid', ->
it 'should return node details', ->
neo
.readNode(testNode._id)
.then((result) ->
result.name.should.equal 'PI:NAME:<NAME>END_PI'
)
describe 'neo.updateNodeProperty(nodeId, property, value)', ->
describe 'when valid', ->
it 'should update node property', ->
neo
.updateNodeProperty(testNode._id, 'gender', 'male')
.should.eventually.be.true
describe 'neo.updateNodeProperty(nodeId, {properties})', ->
describe 'when valid', ->
it 'should update node properties', ->
neo
.updateNodeProperty(testNode._id, { 'name': 'PI:NAME:<NAME>END_PI', 'age': 17 })
.should.eventually.be.true
describe 'neo.readNodeProperty(nodeId)', ->
describe 'when valid', ->
it 'should return properties of a node', ->
neo
.readNodeProperty(testNode._id)
.then (result) ->
result.name.should.equal 'PI:NAME:<NAME>END_PI'
result.age.should.equal 17
describe 'neo.deleteNodeProperty(nodeId, property)', ->
describe 'when valid', ->
it 'should delete node property', ->
neo.deleteNodeProperty(testNode._id, 'name')
.should.eventually.be.true
describe 'neo.deleteNodeProperty(nodeId)', ->
describe 'when valid', ->
it 'should delete all property of a node', ->
neo
.deleteNodeProperty(testNode._id)
.should.eventually.be.true
describe 'neo.deleteNode(nodeId)', ->
describe 'when valid', ->
it 'should delete a node', ->
neo
.deleteNode(testNode._id)
.should.eventually.be.true
# describe 'createUniqueNode', ->
# it 'should pass', (done) ->
# neo.createUniqueNode('people', 'name', 'PI:NAME:<NAME>END_PI', { age: 18 }).then((node) ->
# node.age.should.equal 18
# done()
# )
# it 'should pass or fail', (done) ->
# neo.createUniqueNode('people', 'name', 'PI:NAME:<NAME>END_PI', { age: 19 }, 'create_or_fail')
# .then((node) ->
# node.age.should.equal 19
# )
# .fail((result) ->
# result.status.should.equal 409
# )
# .done(->
# done()
# )
|
[
{
"context": "unt #{accountId} - Cloud State: #{status}\"} key={\"#{accountId}-n1Cloud\"} className={\"activity-status-bubble state-\" + st",
"end": 2629,
"score": 0.9972579479217529,
"start": 2609,
"tag": "KEY",
"value": "#{accountId}-n1Cloud"
},
{
"context": "BarTask task={task}\n ... | packages/client-app/internal_packages/worker-ui/lib/developer-bar.cjsx | cnheider/nylas-mail | 24,369 | _ = require 'underscore'
React = require 'react'
{DatabaseStore,
AccountStore,
TaskQueue,
Actions,
Contact,
Utils,
Message} = require 'nylas-exports'
{InjectedComponentSet} = require 'nylas-component-kit'
DeveloperBarStore = require './developer-bar-store'
DeveloperBarTask = require './developer-bar-task'
DeveloperBarCurlItem = require './developer-bar-curl-item'
DeveloperBarLongPollItem = require './developer-bar-long-poll-item'
class DeveloperBar extends React.Component
@displayName: "DeveloperBar"
@containerRequired: false
constructor: (@props) ->
@state = _.extend @_getStateFromStores(),
section: 'curl'
filter: ''
componentDidMount: =>
@taskQueueUnsubscribe = TaskQueue.listen @_onChange
@activityStoreUnsubscribe = DeveloperBarStore.listen @_onChange
componentWillUnmount: =>
@taskQueueUnsubscribe() if @taskQueueUnsubscribe
@activityStoreUnsubscribe() if @activityStoreUnsubscribe
render: =>
<div className="developer-bar">
<div className="controls">
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('queue')}>
<span>Client Tasks ({@state.queue?.length})</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('providerSyncbackRequests')}>
<span>Provider Syncback Requests</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('long-polling')}>
{@_renderDeltaStates()}
<span>Cloud Deltas</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('curl')}>
<span>Requests: {@state.curlHistory.length}</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('local-sync')}>
<span>Local Sync Engine</span>
</div>
</div>
</div>
{@_sectionContent()}
<div className="footer">
<div className="btn" onClick={@_onClear}>Clear</div>
<input className="filter" placeholder="Filter..." value={@state.filter} onChange={@_onFilter} />
</div>
</div>
_renderDeltaStates: =>
_.map @state.longPollStates, (status, accountId) =>
<div className="delta-state-wrap" key={accountId} >
<div title={"Account #{accountId} - Cloud State: #{status}"} key={"#{accountId}-n1Cloud"} className={"activity-status-bubble state-" + status}></div>
</div>
_sectionContent: =>
expandedDiv = <div></div>
matchingFilter = (item) =>
return true if @state.filter is ''
return JSON.stringify(item).indexOf(@state.filter) >= 0
if @state.section == 'curl'
itemDivs = @state.curlHistory.filter(matchingFilter).map (item) ->
<DeveloperBarCurlItem item={item} key={item.id}/>
expandedDiv = <div className="expanded-section curl-history">{itemDivs}</div>
else if @state.section == 'long-polling'
itemDivs = @state.longPollHistory.filter(matchingFilter).map (item) ->
<DeveloperBarLongPollItem item={item} ignoredBecause={item.ignoredBecause} key={"#{item.cursor}-#{item.timestamp}"}/>
expandedDiv = <div className="expanded-section long-polling">{itemDivs}</div>
else if @state.section == 'local-sync'
expandedDiv = <div className="expanded-section local-sync">
<InjectedComponentSet matching={{role: "Developer:LocalSyncUI"}} />
</div>
else if @state.section == 'providerSyncbackRequests'
reqs = @state.providerSyncbackRequests.map (req) =>
<div key={req.id}> {req.type}: {req.status} - {JSON.stringify(req.props)}</div>
expandedDiv = <div className="expanded-section provider-syncback-requests">{reqs}</div>
else if @state.section == 'queue'
queue = @state.queue.filter(matchingFilter)
queueDivs = for i in [@state.queue.length - 1..0] by -1
task = @state.queue[i]
# We need to pass the task separately because we want to update
# when just that variable changes. Otherwise, since the `task`
# pointer doesn't change, the `DeveloperBarTask` doesn't know to
# update.
status = @state.queue[i].queueState.status
<DeveloperBarTask task={task}
key={task.id}
status={status}
type="queued" />
queueCompleted = @state.completed.filter(matchingFilter)
queueCompletedDivs = for i in [@state.completed.length - 1..0] by -1
task = @state.completed[i]
<DeveloperBarTask task={task}
key={task.id}
type="completed" />
expandedDiv =
<div className="expanded-section queue">
<div className="btn queue-buttons"
onClick={@_onDequeueAll}>Remove Queued Tasks</div>
<div className="section-content">
{queueDivs}
<hr />
{queueCompletedDivs}
</div>
</div>
expandedDiv
_onChange: =>
@setState(@_getStateFromStores())
_onClear: =>
Actions.clearDeveloperConsole()
_onFilter: (ev) =>
@setState(filter: ev.target.value)
_onDequeueAll: =>
Actions.dequeueAllTasks()
_onExpandSection: (section) =>
@setState(@_getStateFromStores())
@setState(section: section)
_getStateFromStores: =>
queue: Utils.deepClone(TaskQueue._queue)
completed: TaskQueue._completed
curlHistory: DeveloperBarStore.curlHistory()
longPollHistory: DeveloperBarStore.longPollHistory()
longPollStates: DeveloperBarStore.longPollStates()
providerSyncbackRequests: DeveloperBarStore.providerSyncbackRequests()
module.exports = DeveloperBar
| 84132 | _ = require 'underscore'
React = require 'react'
{DatabaseStore,
AccountStore,
TaskQueue,
Actions,
Contact,
Utils,
Message} = require 'nylas-exports'
{InjectedComponentSet} = require 'nylas-component-kit'
DeveloperBarStore = require './developer-bar-store'
DeveloperBarTask = require './developer-bar-task'
DeveloperBarCurlItem = require './developer-bar-curl-item'
DeveloperBarLongPollItem = require './developer-bar-long-poll-item'
class DeveloperBar extends React.Component
@displayName: "DeveloperBar"
@containerRequired: false
constructor: (@props) ->
@state = _.extend @_getStateFromStores(),
section: 'curl'
filter: ''
componentDidMount: =>
@taskQueueUnsubscribe = TaskQueue.listen @_onChange
@activityStoreUnsubscribe = DeveloperBarStore.listen @_onChange
componentWillUnmount: =>
@taskQueueUnsubscribe() if @taskQueueUnsubscribe
@activityStoreUnsubscribe() if @activityStoreUnsubscribe
render: =>
<div className="developer-bar">
<div className="controls">
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('queue')}>
<span>Client Tasks ({@state.queue?.length})</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('providerSyncbackRequests')}>
<span>Provider Syncback Requests</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('long-polling')}>
{@_renderDeltaStates()}
<span>Cloud Deltas</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('curl')}>
<span>Requests: {@state.curlHistory.length}</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('local-sync')}>
<span>Local Sync Engine</span>
</div>
</div>
</div>
{@_sectionContent()}
<div className="footer">
<div className="btn" onClick={@_onClear}>Clear</div>
<input className="filter" placeholder="Filter..." value={@state.filter} onChange={@_onFilter} />
</div>
</div>
_renderDeltaStates: =>
_.map @state.longPollStates, (status, accountId) =>
<div className="delta-state-wrap" key={accountId} >
<div title={"Account #{accountId} - Cloud State: #{status}"} key={"<KEY>"} className={"activity-status-bubble state-" + status}></div>
</div>
_sectionContent: =>
expandedDiv = <div></div>
matchingFilter = (item) =>
return true if @state.filter is ''
return JSON.stringify(item).indexOf(@state.filter) >= 0
if @state.section == 'curl'
itemDivs = @state.curlHistory.filter(matchingFilter).map (item) ->
<DeveloperBarCurlItem item={item} key={item.id}/>
expandedDiv = <div className="expanded-section curl-history">{itemDivs}</div>
else if @state.section == 'long-polling'
itemDivs = @state.longPollHistory.filter(matchingFilter).map (item) ->
<DeveloperBarLongPollItem item={item} ignoredBecause={item.ignoredBecause} key={"#{item.cursor}-#{item.timestamp}"}/>
expandedDiv = <div className="expanded-section long-polling">{itemDivs}</div>
else if @state.section == 'local-sync'
expandedDiv = <div className="expanded-section local-sync">
<InjectedComponentSet matching={{role: "Developer:LocalSyncUI"}} />
</div>
else if @state.section == 'providerSyncbackRequests'
reqs = @state.providerSyncbackRequests.map (req) =>
<div key={req.id}> {req.type}: {req.status} - {JSON.stringify(req.props)}</div>
expandedDiv = <div className="expanded-section provider-syncback-requests">{reqs}</div>
else if @state.section == 'queue'
queue = @state.queue.filter(matchingFilter)
queueDivs = for i in [@state.queue.length - 1..0] by -1
task = @state.queue[i]
# We need to pass the task separately because we want to update
# when just that variable changes. Otherwise, since the `task`
# pointer doesn't change, the `DeveloperBarTask` doesn't know to
# update.
status = @state.queue[i].queueState.status
<DeveloperBarTask task={task}
key={<KEY>}
status={status}
type="queued" />
queueCompleted = @state.completed.filter(matchingFilter)
queueCompletedDivs = for i in [@state.completed.length - 1..0] by -1
task = @state.completed[i]
<DeveloperBarTask task={task}
key={<KEY>}
type="completed" />
expandedDiv =
<div className="expanded-section queue">
<div className="btn queue-buttons"
onClick={@_onDequeueAll}>Remove Queued Tasks</div>
<div className="section-content">
{queueDivs}
<hr />
{queueCompletedDivs}
</div>
</div>
expandedDiv
_onChange: =>
@setState(@_getStateFromStores())
_onClear: =>
Actions.clearDeveloperConsole()
_onFilter: (ev) =>
@setState(filter: ev.target.value)
_onDequeueAll: =>
Actions.dequeueAllTasks()
_onExpandSection: (section) =>
@setState(@_getStateFromStores())
@setState(section: section)
_getStateFromStores: =>
queue: Utils.deepClone(TaskQueue._queue)
completed: TaskQueue._completed
curlHistory: DeveloperBarStore.curlHistory()
longPollHistory: DeveloperBarStore.longPollHistory()
longPollStates: DeveloperBarStore.longPollStates()
providerSyncbackRequests: DeveloperBarStore.providerSyncbackRequests()
module.exports = DeveloperBar
| true | _ = require 'underscore'
React = require 'react'
{DatabaseStore,
AccountStore,
TaskQueue,
Actions,
Contact,
Utils,
Message} = require 'nylas-exports'
{InjectedComponentSet} = require 'nylas-component-kit'
DeveloperBarStore = require './developer-bar-store'
DeveloperBarTask = require './developer-bar-task'
DeveloperBarCurlItem = require './developer-bar-curl-item'
DeveloperBarLongPollItem = require './developer-bar-long-poll-item'
class DeveloperBar extends React.Component
@displayName: "DeveloperBar"
@containerRequired: false
constructor: (@props) ->
@state = _.extend @_getStateFromStores(),
section: 'curl'
filter: ''
componentDidMount: =>
@taskQueueUnsubscribe = TaskQueue.listen @_onChange
@activityStoreUnsubscribe = DeveloperBarStore.listen @_onChange
componentWillUnmount: =>
@taskQueueUnsubscribe() if @taskQueueUnsubscribe
@activityStoreUnsubscribe() if @activityStoreUnsubscribe
render: =>
<div className="developer-bar">
<div className="controls">
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('queue')}>
<span>Client Tasks ({@state.queue?.length})</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('providerSyncbackRequests')}>
<span>Provider Syncback Requests</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('long-polling')}>
{@_renderDeltaStates()}
<span>Cloud Deltas</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('curl')}>
<span>Requests: {@state.curlHistory.length}</span>
</div>
</div>
<div className="btn-container pull-left">
<div className="btn" onClick={ => @_onExpandSection('local-sync')}>
<span>Local Sync Engine</span>
</div>
</div>
</div>
{@_sectionContent()}
<div className="footer">
<div className="btn" onClick={@_onClear}>Clear</div>
<input className="filter" placeholder="Filter..." value={@state.filter} onChange={@_onFilter} />
</div>
</div>
_renderDeltaStates: =>
_.map @state.longPollStates, (status, accountId) =>
<div className="delta-state-wrap" key={accountId} >
<div title={"Account #{accountId} - Cloud State: #{status}"} key={"PI:KEY:<KEY>END_PI"} className={"activity-status-bubble state-" + status}></div>
</div>
_sectionContent: =>
expandedDiv = <div></div>
matchingFilter = (item) =>
return true if @state.filter is ''
return JSON.stringify(item).indexOf(@state.filter) >= 0
if @state.section == 'curl'
itemDivs = @state.curlHistory.filter(matchingFilter).map (item) ->
<DeveloperBarCurlItem item={item} key={item.id}/>
expandedDiv = <div className="expanded-section curl-history">{itemDivs}</div>
else if @state.section == 'long-polling'
itemDivs = @state.longPollHistory.filter(matchingFilter).map (item) ->
<DeveloperBarLongPollItem item={item} ignoredBecause={item.ignoredBecause} key={"#{item.cursor}-#{item.timestamp}"}/>
expandedDiv = <div className="expanded-section long-polling">{itemDivs}</div>
else if @state.section == 'local-sync'
expandedDiv = <div className="expanded-section local-sync">
<InjectedComponentSet matching={{role: "Developer:LocalSyncUI"}} />
</div>
else if @state.section == 'providerSyncbackRequests'
reqs = @state.providerSyncbackRequests.map (req) =>
<div key={req.id}> {req.type}: {req.status} - {JSON.stringify(req.props)}</div>
expandedDiv = <div className="expanded-section provider-syncback-requests">{reqs}</div>
else if @state.section == 'queue'
queue = @state.queue.filter(matchingFilter)
queueDivs = for i in [@state.queue.length - 1..0] by -1
task = @state.queue[i]
# We need to pass the task separately because we want to update
# when just that variable changes. Otherwise, since the `task`
# pointer doesn't change, the `DeveloperBarTask` doesn't know to
# update.
status = @state.queue[i].queueState.status
<DeveloperBarTask task={task}
key={PI:KEY:<KEY>END_PI}
status={status}
type="queued" />
queueCompleted = @state.completed.filter(matchingFilter)
queueCompletedDivs = for i in [@state.completed.length - 1..0] by -1
task = @state.completed[i]
<DeveloperBarTask task={task}
key={PI:KEY:<KEY>END_PI}
type="completed" />
expandedDiv =
<div className="expanded-section queue">
<div className="btn queue-buttons"
onClick={@_onDequeueAll}>Remove Queued Tasks</div>
<div className="section-content">
{queueDivs}
<hr />
{queueCompletedDivs}
</div>
</div>
expandedDiv
_onChange: =>
@setState(@_getStateFromStores())
_onClear: =>
Actions.clearDeveloperConsole()
_onFilter: (ev) =>
@setState(filter: ev.target.value)
_onDequeueAll: =>
Actions.dequeueAllTasks()
_onExpandSection: (section) =>
@setState(@_getStateFromStores())
@setState(section: section)
_getStateFromStores: =>
queue: Utils.deepClone(TaskQueue._queue)
completed: TaskQueue._completed
curlHistory: DeveloperBarStore.curlHistory()
longPollHistory: DeveloperBarStore.longPollHistory()
longPollStates: DeveloperBarStore.longPollStates()
providerSyncbackRequests: DeveloperBarStore.providerSyncbackRequests()
module.exports = DeveloperBar
|
[
{
"context": "t(\"li\").attr('id').substr(5)\n if key is \"quit\" then return true\n else if key is \"conta",
"end": 10756,
"score": 0.9658454060554504,
"start": 10752,
"tag": "KEY",
"value": "quit"
},
{
"context": "\"quit\" then return true\n else if key i... | assets/js/src/category_tree.coffee | marten-seemann/oxid-kategorie-master | 0 | # handle both the language selectors.
#
"use strict"
class window.CategoryTree
# Constructor
#
# make a tree using the jQuery jstree plugin
#
# does not call the *initialize* function!
#
# @param [jQuery] dom_elem the DOM element where the tree should be created
# @see http://www.jstree.com
constructor: (@dom_elem) ->
@selected = [] # contains all selected (= highlighted elements in the tree)
@notifications = document.notification_handler
@loading = @notifications.loading
# initialize the category tree
#
# does the whole configuration necessary for the jstree plugin
#
# calls *addListeners* at the end
initialize: ->
@article_table = document.article_table
# initialize the jstree
# must be placed at the bottom
starttime = new Date().getTime()
@loading.category_tree = true
@notifications.checkDisplayLoading()
@dom_elem.jstree
types:
valid_children: [ "root" ]
types:
root:
icon:
image: "jstree/_docs/_drive.png"
ui:
select_multiple_modifier: "on"
disable_selecting_children: false
selected_parent_close: false
selected_parent_open: true
search: show_only_matches: true
core: animation: 100
themes: theme: 'default'
# disable moving tree elements completely
crrm:
move:
check_move: (m) -> false
# use the cookies plugin to save which nodes where opened the last time, but not which were selected
cookies:
save_selected: false
json_data:
ajax:
url: "ajax/categories.php"
cache: false
progressive_render: false
data: (n) ->
id: if n.attr then n.attr("id") else 0
complete: =>
@loading.category_tree = false
@notifications.checkDisplayLoading()
dnd:
open_timeout: 500
drag_finish: (data) ->
$("body").css('cursor', 'auto')
$(data.r).children("a").trigger "contextmenu"
plugins: [ "json_data", "ui", "themes", "search", "dnd", "crrm", "cookies" ]
endtime = new Date().getTime()
# console.log "Time to build the tree: "+(endtime-starttime)+" ms"
@addListeners()
# add listeners
#
# adds lots of listeners to handle click, selection, deselecting etc. of nodes
#
# calls *addContextMenu* at the end
addListeners: ->
# close all button: on click close *all* nodes in the tree
$('#tree_close_all input[type="button"]').bind 'click', =>
@dom_elem.jstree("close_all", -1)
false
# only handle right click on tree nodes
@dom_elem.filter("li a").bind 'mousedown', (event) =>
return if event.button != 2
# modify the default "deselect_all" behaviour of the jstree
# we need to do this because the main_category is sometimes added as a CSS class when selecting nodes, thus it must also be deselected
@dom_elem.bind 'deselect_all.jstree', (event,data) =>
@dom_elem.find(".main_category, .category_for_all").removeClass "main_category category_for_all"
# show long category name - if category name was shortened - on hover
@dom_elem.bind 'hover_node.jstree', (event, data) =>
node = data.args[0]
name_long = $(node).parent("li").data "name_long"
return false if !!name_long && name_long.length == 0
$(node).tooltip title: name_long, placement: 'left'
$(node).tooltip 'show'
# disable selecting nodes (by clicking)
# only selection should be possible by the script, so we disable it here
@dom_elem.bind 'select_node.jstree', (event, data) =>
# data.args.length == 3 only when the node was clicked
# if the node was selected by the script, we have data.args.length == 1
# why? nobody knows :-)
if data.args.length == 3 then @dom_elem.jstree('deselect_node', $(data.args[0]))
# disable deselecting nodes (by clicking)
# only deselection should be possible by the script, so we disable it here
@dom_elem.bind 'deselect_node.jstree', (event, data) =>
# check if the clicked node is already selected
return false unless $(data.args[0]).is 'li' # avoid some strange, annoying error message
contained = false
for elem in @selected
if $(elem).attr('id') == data.args[0].attr('id') then contained = true
# if the clicked node was already selected, reselect it
# this is necessary because the jstree first deselects it, when clicked, and then fires this event
if contained then @dom_elem.jstree('select_node', $(data.args[0]))
# fixes a faulty behaviour occuring in all browsers: if the jstree fits on one page on the screen, sticky scrolling does not work and leads to a "bouncing" datatable
# fix: disable the sticky scrolling selector if the tree is so small that it fits one page on the screen
# filter article list according to category given as a GET parameter
@dom_elem.bind 'after_open.jstree after_close.jstree loaded.jstree', (event,data) =>
# height of the jstree + (absolute) y-position of the jstree - (absolute) y-position of the topmost element when sticky scrolling is enabled
tree_bottom_y = @dom_elem.height() + @dom_elem.offset().top - $("#article_wrapper").position().top
if tree_bottom_y < $(window).height()
$('#sticky_scrolling_selector').hide()
document.sticky_scrolling = false
else
$('#sticky_scrolling_selector').show()
# ugly workaround to trigger the body of the click-handler :-)
# this then determines how to set the variable sticky_scrolling
$('#sticky_scrolling').triggerHandler 'click'
$('#sticky_scrolling').triggerHandler 'click'
# filter the article list to show only products from a certain category if given as a GET parameter
if $.urlParam('only_cat').length > 0 then @article_table.enableCategoryFiltering $.urlParam('only_cat')
# show the subtree of a found element
# default behaviour of jstree is to hide *all* non-matched elements
# @param [jQuery] elem the tree node whose children (and children of children and so on) should be shown
showSubtree = (elem) =>
# correct the appearance of the jstree by adding the jstree-last CSS class to the last elements of each subtree
# needed when manually showing / hiding nodes
correctNode = (elem) ->
last = elem.children("li").eq(-1)
last.addClass("jstree-last")
children = elem.children("li")
correctNode($(child).children("ul:first")) for child in children
elem.siblings("ul:first").find("li").show()
correctNode elem.siblings("ul:first")
# search the jstree
$('#tree_search').typeWatch
callback: (data, elem) =>
@loading.category_tree = true
starttime = new Date().getTime()
@dom_elem.jstree("search", data)
endtime = new Date().getTime()
# console.log "Time to search the tree: "+(endtime-starttime)+" ms"
# treeelem.children("ul").children("li").eq(-1).addClass("jstree-last")
showSubtree $(".jstree-search")
@highlightCategories
@loading.category_tree = false
wait: 600,
highlight: true
captureLength: 0
@addContextMenu()
# add the context menu appearing when dropping elements on the tree or when right clicking on a tree node
#
# using the jQuery ContextMenu plugin
# @see http://medialize.github.com/jQuery-contextMenu/
addContextMenu: ->
contextmenu_items =
"main_category":
name: lang.contextmenu_set_main_category
icon: "heart"
"redefine":
name: lang.contextmenu_redefine
icon: "legal"
"add":
name: lang.contextmenu_add
icon: "plus"
"delete":
name: lang.contextmenu_remove
icon: "trash"
"sep1": "---------"
"contained_products":
name: lang.contextmenu_only_products_from_this_cat
icon: "filter"
"open_category_admin":
name: lang.contextmenu_open_category_admin
icon: "external-link"
"sep2": "---------"
"quit":
name: lang.contextmenu_quit
icon: "remove"
# drag context menu (is shown when dropped)
$.contextMenu
selector: "#{@dom_elem.selector} li a"
build: (trigger, event) =>
# catch some strange behaviour when rightclicking when contextMenu is already enabled
return false if !event.originalEvent && event.pageX
# make a copy of contextmenu_items. thus, we do not change contextmenu_items itself
contextmenu_items_tmp = {}
$.extend(contextmenu_items_tmp,contextmenu_items)
# disable the function to show only products from this category if we are showing only products without a category
contextmenu_items_tmp.contained_products.disabled = !!$("#show_only_empty_categories").is(':checked')
unless event.which? # are we dealing with a drag&drop event?
contextmenu_items_tmp.contained_products.disabled = true # disable the function to show only products from this category
contextmenu_items_tmp.open_category_admin.disabled = true
# enable / disable options according to the selected products
if @article_table.getSelectedRows().length is 0 # if no products are selected, we cannot do anything with the categories
contextmenu_items_tmp.add.disabled = true
contextmenu_items_tmp.delete.disabled = true
contextmenu_items_tmp.redefine.disabled = true
contextmenu_items_tmp.main_category.disabled = true
else
# if @selected.length is 1 then contextmenu_items_tmp.redefine.disabled = true # if only one category is highlighted, you cannot set this one as the only category (since it is exacty this already) DOES NOT WORK
contextmenu_items_tmp.add.disabled = if trigger.hasClass('category_for_all') then true else false
contextmenu_items_tmp.main_category.disabled = if trigger.hasClass('category_for_all main_category') then true else false
if event.button != 2 then delete contextmenu_items_tmp.delete # show the delete option only on right click
else
# disable the delete option if
# 1. the selected category is not highlighted
# 2. the selected category is a main category
contextmenu_items_tmp.delete.disabled = if (@dom_elem.jstree('is_selected',trigger) && !trigger.hasClass('main_category')) then false else true
# finised enabling / disabling
items: contextmenu_items_tmp, callback: (key, options) =>
newcat = options.$trigger.parent("li").attr('id').substr(5)
if key is "quit" then return true
else if key is "contained_products" then @article_table.enableCategoryFiltering newcat
else if key is "open_category_admin"
if document.config.category_admin_path then $.open_in_new_tab "#{document.config.category_admin_path}?cat=#{newcat}"
else $('#modal_buy_category_admin').modal 'toggle' #show the dialog explaining how to buy the Category Admin
else
ids = ( $(elem).attr('id') for elem in @article_table.getSelectedRows() )
$.ajax
type: "POST"
url: "ajax/assign.php"
dataType: "json"
cache: false
data:
mode: key
ids: ids.join(",")
new_cat: newcat
error: (data) =>
@notifications.showError lang.error_categories_updated
success: (data, data2, data3) =>
if data == "false" || data == "" then @notifications.showError lang.error_categories_updated
else
@article_table.updateCategories data
@notifications.showSuccess lang.sucess_categories_updated
# open tree nodes according to the selected rows in the table
#
# @param [Boolean] open_nodes open subnodes if parent node is selected. **Caution**: open_nodes = false needs modified *jstree.js*
# @todo write a method *selectNode* or sth like this to clean up this function
highlightCategories: ( open_nodes = true ) ->
settings = @dom_elem.jstree "get_settings"
if settings.ui.selected_parent_open != open_nodes
settings.ui.selected_parent_open = if open_nodes then true else false
# Caution: needs modified jstree!!!
@dom_elem.jstree("set_settings", settings)
invisible_cats = false
categories = @article_table.getCategoriesOfSelected()
@dom_elem.jstree "deselect_all"
# highlight all categories that are assigned
num_rows_selected = @article_table.getSelectedRows().length
for cat in categories
if cat.length == 0 then continue
node = $("#node_" + $.escapeId(cat))
@dom_elem.jstree("select_node", node)
if node.is(':hidden') then invisible_cats = true
if $.countOccurences(categories,cat) == num_rows_selected then node.children("a").addClass 'category_for_all'
categories_main = @article_table.getMainCategoriesOfSelected()
# now add the highlighting for the main categories
for cat in categories_main
if cat.length == 0 then continue
node = $("#node_" + $.escapeId(cat))
node.children("a").addClass 'main_category'
if $.countOccurences(categories_main,cat) == num_rows_selected then node.children("a").addClass 'category_for_all'
if invisible_cats then $("#search_hidden_cat_warning").show() else $("#search_hidden_cat_warning").hide()
@selected = @dom_elem.jstree 'get_selected'
# get the category names of all parent categories of a category
#
# the category itself will be included in the name listing
# the order is as shown in the category tree, thus the first element returned is the name of topmost category
#
# @param [String] cat_id the OXID of the category
# @return [Array<String>] the names of all parent categories starting with the topmost category
getParentsNames: (cat_id) ->
cat_id = $.escapeId cat_id
names = ($.trim $(node).children("a").text() for node in $("#node_#{cat_id}").add($("#node_#{cat_id}").parents().filter("li")))
# deselect all selected tree elements
deselectAll: ->
@dom_elem.jstree "deselect_all"
| 59671 | # handle both the language selectors.
#
"use strict"
class window.CategoryTree
# Constructor
#
# make a tree using the jQuery jstree plugin
#
# does not call the *initialize* function!
#
# @param [jQuery] dom_elem the DOM element where the tree should be created
# @see http://www.jstree.com
constructor: (@dom_elem) ->
@selected = [] # contains all selected (= highlighted elements in the tree)
@notifications = document.notification_handler
@loading = @notifications.loading
# initialize the category tree
#
# does the whole configuration necessary for the jstree plugin
#
# calls *addListeners* at the end
initialize: ->
@article_table = document.article_table
# initialize the jstree
# must be placed at the bottom
starttime = new Date().getTime()
@loading.category_tree = true
@notifications.checkDisplayLoading()
@dom_elem.jstree
types:
valid_children: [ "root" ]
types:
root:
icon:
image: "jstree/_docs/_drive.png"
ui:
select_multiple_modifier: "on"
disable_selecting_children: false
selected_parent_close: false
selected_parent_open: true
search: show_only_matches: true
core: animation: 100
themes: theme: 'default'
# disable moving tree elements completely
crrm:
move:
check_move: (m) -> false
# use the cookies plugin to save which nodes where opened the last time, but not which were selected
cookies:
save_selected: false
json_data:
ajax:
url: "ajax/categories.php"
cache: false
progressive_render: false
data: (n) ->
id: if n.attr then n.attr("id") else 0
complete: =>
@loading.category_tree = false
@notifications.checkDisplayLoading()
dnd:
open_timeout: 500
drag_finish: (data) ->
$("body").css('cursor', 'auto')
$(data.r).children("a").trigger "contextmenu"
plugins: [ "json_data", "ui", "themes", "search", "dnd", "crrm", "cookies" ]
endtime = new Date().getTime()
# console.log "Time to build the tree: "+(endtime-starttime)+" ms"
@addListeners()
# add listeners
#
# adds lots of listeners to handle click, selection, deselecting etc. of nodes
#
# calls *addContextMenu* at the end
addListeners: ->
# close all button: on click close *all* nodes in the tree
$('#tree_close_all input[type="button"]').bind 'click', =>
@dom_elem.jstree("close_all", -1)
false
# only handle right click on tree nodes
@dom_elem.filter("li a").bind 'mousedown', (event) =>
return if event.button != 2
# modify the default "deselect_all" behaviour of the jstree
# we need to do this because the main_category is sometimes added as a CSS class when selecting nodes, thus it must also be deselected
@dom_elem.bind 'deselect_all.jstree', (event,data) =>
@dom_elem.find(".main_category, .category_for_all").removeClass "main_category category_for_all"
# show long category name - if category name was shortened - on hover
@dom_elem.bind 'hover_node.jstree', (event, data) =>
node = data.args[0]
name_long = $(node).parent("li").data "name_long"
return false if !!name_long && name_long.length == 0
$(node).tooltip title: name_long, placement: 'left'
$(node).tooltip 'show'
# disable selecting nodes (by clicking)
# only selection should be possible by the script, so we disable it here
@dom_elem.bind 'select_node.jstree', (event, data) =>
# data.args.length == 3 only when the node was clicked
# if the node was selected by the script, we have data.args.length == 1
# why? nobody knows :-)
if data.args.length == 3 then @dom_elem.jstree('deselect_node', $(data.args[0]))
# disable deselecting nodes (by clicking)
# only deselection should be possible by the script, so we disable it here
@dom_elem.bind 'deselect_node.jstree', (event, data) =>
# check if the clicked node is already selected
return false unless $(data.args[0]).is 'li' # avoid some strange, annoying error message
contained = false
for elem in @selected
if $(elem).attr('id') == data.args[0].attr('id') then contained = true
# if the clicked node was already selected, reselect it
# this is necessary because the jstree first deselects it, when clicked, and then fires this event
if contained then @dom_elem.jstree('select_node', $(data.args[0]))
# fixes a faulty behaviour occuring in all browsers: if the jstree fits on one page on the screen, sticky scrolling does not work and leads to a "bouncing" datatable
# fix: disable the sticky scrolling selector if the tree is so small that it fits one page on the screen
# filter article list according to category given as a GET parameter
@dom_elem.bind 'after_open.jstree after_close.jstree loaded.jstree', (event,data) =>
# height of the jstree + (absolute) y-position of the jstree - (absolute) y-position of the topmost element when sticky scrolling is enabled
tree_bottom_y = @dom_elem.height() + @dom_elem.offset().top - $("#article_wrapper").position().top
if tree_bottom_y < $(window).height()
$('#sticky_scrolling_selector').hide()
document.sticky_scrolling = false
else
$('#sticky_scrolling_selector').show()
# ugly workaround to trigger the body of the click-handler :-)
# this then determines how to set the variable sticky_scrolling
$('#sticky_scrolling').triggerHandler 'click'
$('#sticky_scrolling').triggerHandler 'click'
# filter the article list to show only products from a certain category if given as a GET parameter
if $.urlParam('only_cat').length > 0 then @article_table.enableCategoryFiltering $.urlParam('only_cat')
# show the subtree of a found element
# default behaviour of jstree is to hide *all* non-matched elements
# @param [jQuery] elem the tree node whose children (and children of children and so on) should be shown
showSubtree = (elem) =>
# correct the appearance of the jstree by adding the jstree-last CSS class to the last elements of each subtree
# needed when manually showing / hiding nodes
correctNode = (elem) ->
last = elem.children("li").eq(-1)
last.addClass("jstree-last")
children = elem.children("li")
correctNode($(child).children("ul:first")) for child in children
elem.siblings("ul:first").find("li").show()
correctNode elem.siblings("ul:first")
# search the jstree
$('#tree_search').typeWatch
callback: (data, elem) =>
@loading.category_tree = true
starttime = new Date().getTime()
@dom_elem.jstree("search", data)
endtime = new Date().getTime()
# console.log "Time to search the tree: "+(endtime-starttime)+" ms"
# treeelem.children("ul").children("li").eq(-1).addClass("jstree-last")
showSubtree $(".jstree-search")
@highlightCategories
@loading.category_tree = false
wait: 600,
highlight: true
captureLength: 0
@addContextMenu()
# add the context menu appearing when dropping elements on the tree or when right clicking on a tree node
#
# using the jQuery ContextMenu plugin
# @see http://medialize.github.com/jQuery-contextMenu/
addContextMenu: ->
contextmenu_items =
"main_category":
name: lang.contextmenu_set_main_category
icon: "heart"
"redefine":
name: lang.contextmenu_redefine
icon: "legal"
"add":
name: lang.contextmenu_add
icon: "plus"
"delete":
name: lang.contextmenu_remove
icon: "trash"
"sep1": "---------"
"contained_products":
name: lang.contextmenu_only_products_from_this_cat
icon: "filter"
"open_category_admin":
name: lang.contextmenu_open_category_admin
icon: "external-link"
"sep2": "---------"
"quit":
name: lang.contextmenu_quit
icon: "remove"
# drag context menu (is shown when dropped)
$.contextMenu
selector: "#{@dom_elem.selector} li a"
build: (trigger, event) =>
# catch some strange behaviour when rightclicking when contextMenu is already enabled
return false if !event.originalEvent && event.pageX
# make a copy of contextmenu_items. thus, we do not change contextmenu_items itself
contextmenu_items_tmp = {}
$.extend(contextmenu_items_tmp,contextmenu_items)
# disable the function to show only products from this category if we are showing only products without a category
contextmenu_items_tmp.contained_products.disabled = !!$("#show_only_empty_categories").is(':checked')
unless event.which? # are we dealing with a drag&drop event?
contextmenu_items_tmp.contained_products.disabled = true # disable the function to show only products from this category
contextmenu_items_tmp.open_category_admin.disabled = true
# enable / disable options according to the selected products
if @article_table.getSelectedRows().length is 0 # if no products are selected, we cannot do anything with the categories
contextmenu_items_tmp.add.disabled = true
contextmenu_items_tmp.delete.disabled = true
contextmenu_items_tmp.redefine.disabled = true
contextmenu_items_tmp.main_category.disabled = true
else
# if @selected.length is 1 then contextmenu_items_tmp.redefine.disabled = true # if only one category is highlighted, you cannot set this one as the only category (since it is exacty this already) DOES NOT WORK
contextmenu_items_tmp.add.disabled = if trigger.hasClass('category_for_all') then true else false
contextmenu_items_tmp.main_category.disabled = if trigger.hasClass('category_for_all main_category') then true else false
if event.button != 2 then delete contextmenu_items_tmp.delete # show the delete option only on right click
else
# disable the delete option if
# 1. the selected category is not highlighted
# 2. the selected category is a main category
contextmenu_items_tmp.delete.disabled = if (@dom_elem.jstree('is_selected',trigger) && !trigger.hasClass('main_category')) then false else true
# finised enabling / disabling
items: contextmenu_items_tmp, callback: (key, options) =>
newcat = options.$trigger.parent("li").attr('id').substr(5)
if key is "<KEY>" then return true
else if key is "<KEY>" then @article_table.enableCategoryFiltering newcat
else if key is "<KEY>"
if document.config.category_admin_path then $.open_in_new_tab "#{document.config.category_admin_path}?cat=#{newcat}"
else $('#modal_buy_category_admin').modal 'toggle' #show the dialog explaining how to buy the Category Admin
else
ids = ( $(elem).attr('id') for elem in @article_table.getSelectedRows() )
$.ajax
type: "POST"
url: "ajax/assign.php"
dataType: "json"
cache: false
data:
mode: key
ids: ids.join(",")
new_cat: newcat
error: (data) =>
@notifications.showError lang.error_categories_updated
success: (data, data2, data3) =>
if data == "false" || data == "" then @notifications.showError lang.error_categories_updated
else
@article_table.updateCategories data
@notifications.showSuccess lang.sucess_categories_updated
# open tree nodes according to the selected rows in the table
#
# @param [Boolean] open_nodes open subnodes if parent node is selected. **Caution**: open_nodes = false needs modified *jstree.js*
# @todo write a method *selectNode* or sth like this to clean up this function
highlightCategories: ( open_nodes = true ) ->
settings = @dom_elem.jstree "get_settings"
if settings.ui.selected_parent_open != open_nodes
settings.ui.selected_parent_open = if open_nodes then true else false
# Caution: needs modified jstree!!!
@dom_elem.jstree("set_settings", settings)
invisible_cats = false
categories = @article_table.getCategoriesOfSelected()
@dom_elem.jstree "deselect_all"
# highlight all categories that are assigned
num_rows_selected = @article_table.getSelectedRows().length
for cat in categories
if cat.length == 0 then continue
node = $("#node_" + $.escapeId(cat))
@dom_elem.jstree("select_node", node)
if node.is(':hidden') then invisible_cats = true
if $.countOccurences(categories,cat) == num_rows_selected then node.children("a").addClass 'category_for_all'
categories_main = @article_table.getMainCategoriesOfSelected()
# now add the highlighting for the main categories
for cat in categories_main
if cat.length == 0 then continue
node = $("#node_" + $.escapeId(cat))
node.children("a").addClass 'main_category'
if $.countOccurences(categories_main,cat) == num_rows_selected then node.children("a").addClass 'category_for_all'
if invisible_cats then $("#search_hidden_cat_warning").show() else $("#search_hidden_cat_warning").hide()
@selected = @dom_elem.jstree 'get_selected'
# get the category names of all parent categories of a category
#
# the category itself will be included in the name listing
# the order is as shown in the category tree, thus the first element returned is the name of topmost category
#
# @param [String] cat_id the OXID of the category
# @return [Array<String>] the names of all parent categories starting with the topmost category
getParentsNames: (cat_id) ->
cat_id = $.escapeId cat_id
names = ($.trim $(node).children("a").text() for node in $("#node_#{cat_id}").add($("#node_#{cat_id}").parents().filter("li")))
# deselect all selected tree elements
deselectAll: ->
@dom_elem.jstree "deselect_all"
| true | # handle both the language selectors.
#
"use strict"
class window.CategoryTree
# Constructor
#
# make a tree using the jQuery jstree plugin
#
# does not call the *initialize* function!
#
# @param [jQuery] dom_elem the DOM element where the tree should be created
# @see http://www.jstree.com
constructor: (@dom_elem) ->
@selected = [] # contains all selected (= highlighted elements in the tree)
@notifications = document.notification_handler
@loading = @notifications.loading
# initialize the category tree
#
# does the whole configuration necessary for the jstree plugin
#
# calls *addListeners* at the end
initialize: ->
@article_table = document.article_table
# initialize the jstree
# must be placed at the bottom
starttime = new Date().getTime()
@loading.category_tree = true
@notifications.checkDisplayLoading()
@dom_elem.jstree
types:
valid_children: [ "root" ]
types:
root:
icon:
image: "jstree/_docs/_drive.png"
ui:
select_multiple_modifier: "on"
disable_selecting_children: false
selected_parent_close: false
selected_parent_open: true
search: show_only_matches: true
core: animation: 100
themes: theme: 'default'
# disable moving tree elements completely
crrm:
move:
check_move: (m) -> false
# use the cookies plugin to save which nodes where opened the last time, but not which were selected
cookies:
save_selected: false
json_data:
ajax:
url: "ajax/categories.php"
cache: false
progressive_render: false
data: (n) ->
id: if n.attr then n.attr("id") else 0
complete: =>
@loading.category_tree = false
@notifications.checkDisplayLoading()
dnd:
open_timeout: 500
drag_finish: (data) ->
$("body").css('cursor', 'auto')
$(data.r).children("a").trigger "contextmenu"
plugins: [ "json_data", "ui", "themes", "search", "dnd", "crrm", "cookies" ]
endtime = new Date().getTime()
# console.log "Time to build the tree: "+(endtime-starttime)+" ms"
@addListeners()
# add listeners
#
# adds lots of listeners to handle click, selection, deselecting etc. of nodes
#
# calls *addContextMenu* at the end
addListeners: ->
# close all button: on click close *all* nodes in the tree
$('#tree_close_all input[type="button"]').bind 'click', =>
@dom_elem.jstree("close_all", -1)
false
# only handle right click on tree nodes
@dom_elem.filter("li a").bind 'mousedown', (event) =>
return if event.button != 2
# modify the default "deselect_all" behaviour of the jstree
# we need to do this because the main_category is sometimes added as a CSS class when selecting nodes, thus it must also be deselected
@dom_elem.bind 'deselect_all.jstree', (event,data) =>
@dom_elem.find(".main_category, .category_for_all").removeClass "main_category category_for_all"
# show long category name - if category name was shortened - on hover
@dom_elem.bind 'hover_node.jstree', (event, data) =>
node = data.args[0]
name_long = $(node).parent("li").data "name_long"
return false if !!name_long && name_long.length == 0
$(node).tooltip title: name_long, placement: 'left'
$(node).tooltip 'show'
# disable selecting nodes (by clicking)
# only selection should be possible by the script, so we disable it here
@dom_elem.bind 'select_node.jstree', (event, data) =>
# data.args.length == 3 only when the node was clicked
# if the node was selected by the script, we have data.args.length == 1
# why? nobody knows :-)
if data.args.length == 3 then @dom_elem.jstree('deselect_node', $(data.args[0]))
# disable deselecting nodes (by clicking)
# only deselection should be possible by the script, so we disable it here
@dom_elem.bind 'deselect_node.jstree', (event, data) =>
# check if the clicked node is already selected
return false unless $(data.args[0]).is 'li' # avoid some strange, annoying error message
contained = false
for elem in @selected
if $(elem).attr('id') == data.args[0].attr('id') then contained = true
# if the clicked node was already selected, reselect it
# this is necessary because the jstree first deselects it, when clicked, and then fires this event
if contained then @dom_elem.jstree('select_node', $(data.args[0]))
# fixes a faulty behaviour occuring in all browsers: if the jstree fits on one page on the screen, sticky scrolling does not work and leads to a "bouncing" datatable
# fix: disable the sticky scrolling selector if the tree is so small that it fits one page on the screen
# filter article list according to category given as a GET parameter
@dom_elem.bind 'after_open.jstree after_close.jstree loaded.jstree', (event,data) =>
# height of the jstree + (absolute) y-position of the jstree - (absolute) y-position of the topmost element when sticky scrolling is enabled
tree_bottom_y = @dom_elem.height() + @dom_elem.offset().top - $("#article_wrapper").position().top
if tree_bottom_y < $(window).height()
$('#sticky_scrolling_selector').hide()
document.sticky_scrolling = false
else
$('#sticky_scrolling_selector').show()
# ugly workaround to trigger the body of the click-handler :-)
# this then determines how to set the variable sticky_scrolling
$('#sticky_scrolling').triggerHandler 'click'
$('#sticky_scrolling').triggerHandler 'click'
# filter the article list to show only products from a certain category if given as a GET parameter
if $.urlParam('only_cat').length > 0 then @article_table.enableCategoryFiltering $.urlParam('only_cat')
# show the subtree of a found element
# default behaviour of jstree is to hide *all* non-matched elements
# @param [jQuery] elem the tree node whose children (and children of children and so on) should be shown
showSubtree = (elem) =>
# correct the appearance of the jstree by adding the jstree-last CSS class to the last elements of each subtree
# needed when manually showing / hiding nodes
correctNode = (elem) ->
last = elem.children("li").eq(-1)
last.addClass("jstree-last")
children = elem.children("li")
correctNode($(child).children("ul:first")) for child in children
elem.siblings("ul:first").find("li").show()
correctNode elem.siblings("ul:first")
# search the jstree
$('#tree_search').typeWatch
callback: (data, elem) =>
@loading.category_tree = true
starttime = new Date().getTime()
@dom_elem.jstree("search", data)
endtime = new Date().getTime()
# console.log "Time to search the tree: "+(endtime-starttime)+" ms"
# treeelem.children("ul").children("li").eq(-1).addClass("jstree-last")
showSubtree $(".jstree-search")
@highlightCategories
@loading.category_tree = false
wait: 600,
highlight: true
captureLength: 0
@addContextMenu()
# add the context menu appearing when dropping elements on the tree or when right clicking on a tree node
#
# using the jQuery ContextMenu plugin
# @see http://medialize.github.com/jQuery-contextMenu/
addContextMenu: ->
contextmenu_items =
"main_category":
name: lang.contextmenu_set_main_category
icon: "heart"
"redefine":
name: lang.contextmenu_redefine
icon: "legal"
"add":
name: lang.contextmenu_add
icon: "plus"
"delete":
name: lang.contextmenu_remove
icon: "trash"
"sep1": "---------"
"contained_products":
name: lang.contextmenu_only_products_from_this_cat
icon: "filter"
"open_category_admin":
name: lang.contextmenu_open_category_admin
icon: "external-link"
"sep2": "---------"
"quit":
name: lang.contextmenu_quit
icon: "remove"
# drag context menu (is shown when dropped)
$.contextMenu
selector: "#{@dom_elem.selector} li a"
build: (trigger, event) =>
# catch some strange behaviour when rightclicking when contextMenu is already enabled
return false if !event.originalEvent && event.pageX
# make a copy of contextmenu_items. thus, we do not change contextmenu_items itself
contextmenu_items_tmp = {}
$.extend(contextmenu_items_tmp,contextmenu_items)
# disable the function to show only products from this category if we are showing only products without a category
contextmenu_items_tmp.contained_products.disabled = !!$("#show_only_empty_categories").is(':checked')
unless event.which? # are we dealing with a drag&drop event?
contextmenu_items_tmp.contained_products.disabled = true # disable the function to show only products from this category
contextmenu_items_tmp.open_category_admin.disabled = true
# enable / disable options according to the selected products
if @article_table.getSelectedRows().length is 0 # if no products are selected, we cannot do anything with the categories
contextmenu_items_tmp.add.disabled = true
contextmenu_items_tmp.delete.disabled = true
contextmenu_items_tmp.redefine.disabled = true
contextmenu_items_tmp.main_category.disabled = true
else
# if @selected.length is 1 then contextmenu_items_tmp.redefine.disabled = true # if only one category is highlighted, you cannot set this one as the only category (since it is exacty this already) DOES NOT WORK
contextmenu_items_tmp.add.disabled = if trigger.hasClass('category_for_all') then true else false
contextmenu_items_tmp.main_category.disabled = if trigger.hasClass('category_for_all main_category') then true else false
if event.button != 2 then delete contextmenu_items_tmp.delete # show the delete option only on right click
else
# disable the delete option if
# 1. the selected category is not highlighted
# 2. the selected category is a main category
contextmenu_items_tmp.delete.disabled = if (@dom_elem.jstree('is_selected',trigger) && !trigger.hasClass('main_category')) then false else true
# finised enabling / disabling
items: contextmenu_items_tmp, callback: (key, options) =>
newcat = options.$trigger.parent("li").attr('id').substr(5)
if key is "PI:KEY:<KEY>END_PI" then return true
else if key is "PI:KEY:<KEY>END_PI" then @article_table.enableCategoryFiltering newcat
else if key is "PI:KEY:<KEY>END_PI"
if document.config.category_admin_path then $.open_in_new_tab "#{document.config.category_admin_path}?cat=#{newcat}"
else $('#modal_buy_category_admin').modal 'toggle' #show the dialog explaining how to buy the Category Admin
else
ids = ( $(elem).attr('id') for elem in @article_table.getSelectedRows() )
$.ajax
type: "POST"
url: "ajax/assign.php"
dataType: "json"
cache: false
data:
mode: key
ids: ids.join(",")
new_cat: newcat
error: (data) =>
@notifications.showError lang.error_categories_updated
success: (data, data2, data3) =>
if data == "false" || data == "" then @notifications.showError lang.error_categories_updated
else
@article_table.updateCategories data
@notifications.showSuccess lang.sucess_categories_updated
# open tree nodes according to the selected rows in the table
#
# @param [Boolean] open_nodes open subnodes if parent node is selected. **Caution**: open_nodes = false needs modified *jstree.js*
# @todo write a method *selectNode* or sth like this to clean up this function
highlightCategories: ( open_nodes = true ) ->
settings = @dom_elem.jstree "get_settings"
if settings.ui.selected_parent_open != open_nodes
settings.ui.selected_parent_open = if open_nodes then true else false
# Caution: needs modified jstree!!!
@dom_elem.jstree("set_settings", settings)
invisible_cats = false
categories = @article_table.getCategoriesOfSelected()
@dom_elem.jstree "deselect_all"
# highlight all categories that are assigned
num_rows_selected = @article_table.getSelectedRows().length
for cat in categories
if cat.length == 0 then continue
node = $("#node_" + $.escapeId(cat))
@dom_elem.jstree("select_node", node)
if node.is(':hidden') then invisible_cats = true
if $.countOccurences(categories,cat) == num_rows_selected then node.children("a").addClass 'category_for_all'
categories_main = @article_table.getMainCategoriesOfSelected()
# now add the highlighting for the main categories
for cat in categories_main
if cat.length == 0 then continue
node = $("#node_" + $.escapeId(cat))
node.children("a").addClass 'main_category'
if $.countOccurences(categories_main,cat) == num_rows_selected then node.children("a").addClass 'category_for_all'
if invisible_cats then $("#search_hidden_cat_warning").show() else $("#search_hidden_cat_warning").hide()
@selected = @dom_elem.jstree 'get_selected'
# get the category names of all parent categories of a category
#
# the category itself will be included in the name listing
# the order is as shown in the category tree, thus the first element returned is the name of topmost category
#
# @param [String] cat_id the OXID of the category
# @return [Array<String>] the names of all parent categories starting with the topmost category
getParentsNames: (cat_id) ->
cat_id = $.escapeId cat_id
names = ($.trim $(node).children("a").text() for node in $("#node_#{cat_id}").add($("#node_#{cat_id}").parents().filter("li")))
# deselect all selected tree elements
deselectAll: ->
@dom_elem.jstree "deselect_all"
|
[
{
"context": "ch ->\n Team.save([\n {_id: '1', name: 'foo1', local: false}\n {_id: '2', name: 'foo2',",
"end": 155,
"score": 0.7174509763717651,
"start": 152,
"tag": "NAME",
"value": "foo"
},
{
"context": "->\n Team.save([\n {_id: '1', name: 'foo1', loc... | application/api/test/commands/transfer-item-test.coffee | CHU-BURA/clone-app-kobito-oss | 215 | describe "src/commands/transfer-item", ->
stubDatabases()
context '#transferItem', ->
beforeEach ->
Team.save([
{_id: '1', name: 'foo1', local: false}
{_id: '2', name: 'foo2', local: false}
])
.then ->
Item.save [
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i1'}
]
it "should transfer item to other team", ->
Item.find('i1')
.then (item) ->
kobito.commands.transferItem(item._id, '2')
.then -> Item.find('i1')
.then (item) ->
assert item.teamId, '2'
context '#transferItems', ->
context 'send to local', ->
beforeEach ->
Team.save([
{_id: '1', name: 'foo1', local: false}
{_id: '2', name: 'foo2', local: true}
])
.then ->
Item.save {
_id: 'i1'
title: 'a'
body: 'a'
tags: []
teamId: '1'
syncedItemId: 'a'
local_updated_at: 3
remote_updated_at: 2
synced_at: 2
}
it "should sanitize remote annotations", ->
kobito.commands.transferItems('1', '2')
.then -> Item.find('i1')
.then (item) ->
assert item.local_updated_at isnt null
assert item.syncedItemId is null
assert item.remote_updated_at is null
assert item.synced_at is null
context 'send items to team', ->
beforeEach ->
Team.save([
{_id: '1', name: 'foo1', local: false}
{_id: '2', name: 'foo2', local: false}
])
.then ->
Item.save [
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i1'}
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i2'}
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i3'}
{title: 'a', body: 'a', tags: [], teamId: '2', _id: 'i4'}
]
it "should be written", ->
Item.select((i) => i.teamId is '2')
.then (items) ->
assert items.length is 1
kobito.commands.transferItems('1', '2')
.then -> Item.select((i) => i.teamId is '2')
.then (items) ->
assert items.length is 4
Team.all()
.then (teams) =>
assert teams.length is 1
| 20522 | describe "src/commands/transfer-item", ->
stubDatabases()
context '#transferItem', ->
beforeEach ->
Team.save([
{_id: '1', name: '<NAME>1', local: false}
{_id: '2', name: '<NAME>2', local: false}
])
.then ->
Item.save [
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i1'}
]
it "should transfer item to other team", ->
Item.find('i1')
.then (item) ->
kobito.commands.transferItem(item._id, '2')
.then -> Item.find('i1')
.then (item) ->
assert item.teamId, '2'
context '#transferItems', ->
context 'send to local', ->
beforeEach ->
Team.save([
{_id: '1', name: '<NAME>1', local: false}
{_id: '2', name: '<NAME>2', local: true}
])
.then ->
Item.save {
_id: 'i1'
title: 'a'
body: 'a'
tags: []
teamId: '1'
syncedItemId: 'a'
local_updated_at: 3
remote_updated_at: 2
synced_at: 2
}
it "should sanitize remote annotations", ->
kobito.commands.transferItems('1', '2')
.then -> Item.find('i1')
.then (item) ->
assert item.local_updated_at isnt null
assert item.syncedItemId is null
assert item.remote_updated_at is null
assert item.synced_at is null
context 'send items to team', ->
beforeEach ->
Team.save([
{_id: '1', name: '<NAME>1', local: false}
{_id: '2', name: '<NAME>2', local: false}
])
.then ->
Item.save [
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i1'}
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i2'}
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i3'}
{title: 'a', body: 'a', tags: [], teamId: '2', _id: 'i4'}
]
it "should be written", ->
Item.select((i) => i.teamId is '2')
.then (items) ->
assert items.length is 1
kobito.commands.transferItems('1', '2')
.then -> Item.select((i) => i.teamId is '2')
.then (items) ->
assert items.length is 4
Team.all()
.then (teams) =>
assert teams.length is 1
| true | describe "src/commands/transfer-item", ->
stubDatabases()
context '#transferItem', ->
beforeEach ->
Team.save([
{_id: '1', name: 'PI:NAME:<NAME>END_PI1', local: false}
{_id: '2', name: 'PI:NAME:<NAME>END_PI2', local: false}
])
.then ->
Item.save [
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i1'}
]
it "should transfer item to other team", ->
Item.find('i1')
.then (item) ->
kobito.commands.transferItem(item._id, '2')
.then -> Item.find('i1')
.then (item) ->
assert item.teamId, '2'
context '#transferItems', ->
context 'send to local', ->
beforeEach ->
Team.save([
{_id: '1', name: 'PI:NAME:<NAME>END_PI1', local: false}
{_id: '2', name: 'PI:NAME:<NAME>END_PI2', local: true}
])
.then ->
Item.save {
_id: 'i1'
title: 'a'
body: 'a'
tags: []
teamId: '1'
syncedItemId: 'a'
local_updated_at: 3
remote_updated_at: 2
synced_at: 2
}
it "should sanitize remote annotations", ->
kobito.commands.transferItems('1', '2')
.then -> Item.find('i1')
.then (item) ->
assert item.local_updated_at isnt null
assert item.syncedItemId is null
assert item.remote_updated_at is null
assert item.synced_at is null
context 'send items to team', ->
beforeEach ->
Team.save([
{_id: '1', name: 'PI:NAME:<NAME>END_PI1', local: false}
{_id: '2', name: 'PI:NAME:<NAME>END_PI2', local: false}
])
.then ->
Item.save [
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i1'}
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i2'}
{title: 'a', body: 'a', tags: [], teamId: '1', _id: 'i3'}
{title: 'a', body: 'a', tags: [], teamId: '2', _id: 'i4'}
]
it "should be written", ->
Item.select((i) => i.teamId is '2')
.then (items) ->
assert items.length is 1
kobito.commands.transferItems('1', '2')
.then -> Item.select((i) => i.teamId is '2')
.then (items) ->
assert items.length is 4
Team.all()
.then (teams) =>
assert teams.length is 1
|
[
{
"context": "ull,\n \"updated_at\": null,\n \"name\": \"User One\",\n \"email\": \"user1@nylas.com\"\n },\n ",
"end": 1466,
"score": 0.9801584482192993,
"start": 1458,
"tag": "NAME",
"value": "User One"
},
{
"context": "ll,\n \"name\": \"User One\"... | packages/client-app/internal_packages/thread-list/spec/thread-list-spec.cjsx | cnheider/nylas-mail | 24,369 |
return
moment = require "moment"
_ = require 'underscore'
React = require "react"
ReactTestUtils = require('react-addons-test-utils')
ReactTestUtils = _.extend ReactTestUtils, require "jasmine-react-helpers"
{Thread,
Actions,
Account,
DatabaseStore,
WorkspaceStore,
NylasTestUtils,
AccountStore,
ComponentRegistry} = require "nylas-exports"
{ListTabular} = require 'nylas-component-kit'
ThreadStore = require "../lib/thread-store"
ThreadList = require "../lib/thread-list"
test_threads = -> [
(new Thread).fromJSON({
"id": "111",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 111",
"subject": "Subject 111",
"tags": [
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
},
{
"id": "unread",
"created_at": null,
"updated_at": null,
"name": "unread"
},
{
"id": "attachment",
"created_at": null,
"updated_at": null,
"name": "attachment"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "User One",
"email": "user1@nylas.com"
},
{
"created_at": null,
"updated_at": null,
"name": "User Two",
"email": "user2@nylas.com"
}
],
"last_message_received_timestamp": 1415742036
}),
(new Thread).fromJSON({
"id": "222",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 222",
"subject": "Subject 222",
"tags": [
{
"id": "unread",
"created_at": null,
"updated_at": null,
"name": "unread"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
},
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "User One",
"email": "user1@nylas.com"
},
{
"created_at": null,
"updated_at": null,
"name": "User Three",
"email": "user3@nylas.com"
}
],
"last_message_received_timestamp": 1415741913
}),
(new Thread).fromJSON({
"id": "333",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 333",
"subject": "Subject 333",
"tags": [
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "User One",
"email": "user1@nylas.com"
},
{
"created_at": null,
"updated_at": null,
"name": "User Four",
"email": "user4@nylas.com"
}
],
"last_message_received_timestamp": 1415741837
})
]
cjsxSubjectResolver = (thread) ->
<div>
<span>Subject {thread.id}</span>
<span className="snippet">Snippet</span>
</div>
describe "ThreadList", ->
Foo = React.createClass({render: -> <div>{@props.children}</div>})
c1 = new ListTabular.Column
name: "Name"
flex: 1
resolver: (thread) -> "#{thread.id} Test Name"
c2 = new ListTabular.Column
name: "Subject"
flex: 3
resolver: cjsxSubjectResolver
c3 = new ListTabular.Column
name: "Date"
resolver: (thread) -> <Foo>{thread.id}</Foo>
columns = [c1,c2,c3]
beforeEach ->
NylasTestUtils.loadKeymap("internal_packages/thread-list/keymaps/thread-list")
spyOn(ThreadStore, "_onAccountChanged")
spyOn(DatabaseStore, "findAll").andCallFake ->
new Promise (resolve, reject) -> resolve(test_threads())
ReactTestUtils.spyOnClass(ThreadList, "_prepareColumns").andCallFake ->
@_columns = columns
ThreadStore._resetInstanceVars()
@thread_list = ReactTestUtils.renderIntoDocument(
<ThreadList />
)
it "renders into the document", ->
expect(ReactTestUtils.isCompositeComponentWithType(@thread_list,
ThreadList)).toBe true
it "has the expected columns", ->
expect(@thread_list._columns).toEqual columns
it "by default has zero children", ->
items = ReactTestUtils.scryRenderedComponentsWithType(@thread_list, ListTabular.Item)
expect(items.length).toBe 0
describe "when the workspace is in list mode", ->
beforeEach ->
spyOn(WorkspaceStore, "layoutMode").andReturn "list"
@thread_list.setState focusedId: "t111"
it "allows reply only when the sheet type is 'Thread'", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "Thread"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe true
it "doesn't reply only when the sheet type isnt 'Thread'", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "Root"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).not.toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe false
describe "when the workspace is in split mode", ->
beforeEach ->
spyOn(WorkspaceStore, "layoutMode").andReturn "split"
@thread_list.setState focusedId: "t111"
it "allows reply and reply-all regardless of sheet type", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "anything"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe true
describe "Populated thread list", ->
beforeEach ->
view =
loaded: -> true
get: (i) -> test_threads()[i]
count: -> test_threads().length
setRetainedRange: ->
ThreadStore._view = view
ThreadStore._focusedId = null
ThreadStore.trigger(ThreadStore)
@thread_list_node = ReactDOM.findDOMNode(@thread_list)
spyOn(@thread_list, "setState").andCallThrough()
it "renders all of the thread list items", ->
advanceClock(100)
items = ReactTestUtils.scryRenderedComponentsWithType(@thread_list, ListTabular.Item)
expect(items.length).toBe(test_threads().length)
| 51645 |
return
moment = require "moment"
_ = require 'underscore'
React = require "react"
ReactTestUtils = require('react-addons-test-utils')
ReactTestUtils = _.extend ReactTestUtils, require "jasmine-react-helpers"
{Thread,
Actions,
Account,
DatabaseStore,
WorkspaceStore,
NylasTestUtils,
AccountStore,
ComponentRegistry} = require "nylas-exports"
{ListTabular} = require 'nylas-component-kit'
ThreadStore = require "../lib/thread-store"
ThreadList = require "../lib/thread-list"
test_threads = -> [
(new Thread).fromJSON({
"id": "111",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 111",
"subject": "Subject 111",
"tags": [
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
},
{
"id": "unread",
"created_at": null,
"updated_at": null,
"name": "unread"
},
{
"id": "attachment",
"created_at": null,
"updated_at": null,
"name": "attachment"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "<NAME>",
"email": "<EMAIL>"
},
{
"created_at": null,
"updated_at": null,
"name": "<NAME>",
"email": "<EMAIL>"
}
],
"last_message_received_timestamp": 1415742036
}),
(new Thread).fromJSON({
"id": "222",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 222",
"subject": "Subject 222",
"tags": [
{
"id": "unread",
"created_at": null,
"updated_at": null,
"name": "unread"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
},
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "<NAME>",
"email": "<EMAIL>"
},
{
"created_at": null,
"updated_at": null,
"name": "<NAME>",
"email": "<EMAIL>"
}
],
"last_message_received_timestamp": 1415741913
}),
(new Thread).fromJSON({
"id": "333",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 333",
"subject": "Subject 333",
"tags": [
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "<NAME>",
"email": "<EMAIL>"
},
{
"created_at": null,
"updated_at": null,
"name": "<NAME>",
"email": "<EMAIL>"
}
],
"last_message_received_timestamp": 1415741837
})
]
cjsxSubjectResolver = (thread) ->
<div>
<span>Subject {thread.id}</span>
<span className="snippet">Snippet</span>
</div>
describe "ThreadList", ->
Foo = React.createClass({render: -> <div>{@props.children}</div>})
c1 = new ListTabular.Column
name: "Name"
flex: 1
resolver: (thread) -> "#{thread.id} Test Name"
c2 = new ListTabular.Column
name: "Subject"
flex: 3
resolver: cjsxSubjectResolver
c3 = new ListTabular.Column
name: "Date"
resolver: (thread) -> <Foo>{thread.id}</Foo>
columns = [c1,c2,c3]
beforeEach ->
NylasTestUtils.loadKeymap("internal_packages/thread-list/keymaps/thread-list")
spyOn(ThreadStore, "_onAccountChanged")
spyOn(DatabaseStore, "findAll").andCallFake ->
new Promise (resolve, reject) -> resolve(test_threads())
ReactTestUtils.spyOnClass(ThreadList, "_prepareColumns").andCallFake ->
@_columns = columns
ThreadStore._resetInstanceVars()
@thread_list = ReactTestUtils.renderIntoDocument(
<ThreadList />
)
it "renders into the document", ->
expect(ReactTestUtils.isCompositeComponentWithType(@thread_list,
ThreadList)).toBe true
it "has the expected columns", ->
expect(@thread_list._columns).toEqual columns
it "by default has zero children", ->
items = ReactTestUtils.scryRenderedComponentsWithType(@thread_list, ListTabular.Item)
expect(items.length).toBe 0
describe "when the workspace is in list mode", ->
beforeEach ->
spyOn(WorkspaceStore, "layoutMode").andReturn "list"
@thread_list.setState focusedId: "t111"
it "allows reply only when the sheet type is 'Thread'", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "Thread"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe true
it "doesn't reply only when the sheet type isnt 'Thread'", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "Root"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).not.toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe false
describe "when the workspace is in split mode", ->
beforeEach ->
spyOn(WorkspaceStore, "layoutMode").andReturn "split"
@thread_list.setState focusedId: "t111"
it "allows reply and reply-all regardless of sheet type", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "anything"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe true
describe "Populated thread list", ->
beforeEach ->
view =
loaded: -> true
get: (i) -> test_threads()[i]
count: -> test_threads().length
setRetainedRange: ->
ThreadStore._view = view
ThreadStore._focusedId = null
ThreadStore.trigger(ThreadStore)
@thread_list_node = ReactDOM.findDOMNode(@thread_list)
spyOn(@thread_list, "setState").andCallThrough()
it "renders all of the thread list items", ->
advanceClock(100)
items = ReactTestUtils.scryRenderedComponentsWithType(@thread_list, ListTabular.Item)
expect(items.length).toBe(test_threads().length)
| true |
return
moment = require "moment"
_ = require 'underscore'
React = require "react"
ReactTestUtils = require('react-addons-test-utils')
ReactTestUtils = _.extend ReactTestUtils, require "jasmine-react-helpers"
{Thread,
Actions,
Account,
DatabaseStore,
WorkspaceStore,
NylasTestUtils,
AccountStore,
ComponentRegistry} = require "nylas-exports"
{ListTabular} = require 'nylas-component-kit'
ThreadStore = require "../lib/thread-store"
ThreadList = require "../lib/thread-list"
test_threads = -> [
(new Thread).fromJSON({
"id": "111",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 111",
"subject": "Subject 111",
"tags": [
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
},
{
"id": "unread",
"created_at": null,
"updated_at": null,
"name": "unread"
},
{
"id": "attachment",
"created_at": null,
"updated_at": null,
"name": "attachment"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "PI:NAME:<NAME>END_PI",
"email": "PI:EMAIL:<EMAIL>END_PI"
},
{
"created_at": null,
"updated_at": null,
"name": "PI:NAME:<NAME>END_PI",
"email": "PI:EMAIL:<EMAIL>END_PI"
}
],
"last_message_received_timestamp": 1415742036
}),
(new Thread).fromJSON({
"id": "222",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 222",
"subject": "Subject 222",
"tags": [
{
"id": "unread",
"created_at": null,
"updated_at": null,
"name": "unread"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
},
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "PI:NAME:<NAME>END_PI",
"email": "PI:EMAIL:<EMAIL>END_PI"
},
{
"created_at": null,
"updated_at": null,
"name": "PI:NAME:<NAME>END_PI",
"email": "PI:EMAIL:<EMAIL>END_PI"
}
],
"last_message_received_timestamp": 1415741913
}),
(new Thread).fromJSON({
"id": "333",
"object": "thread",
"created_at": null,
"updated_at": null,
"account_id": TEST_ACCOUNT_ID,
"snippet": "snippet 333",
"subject": "Subject 333",
"tags": [
{
"id": "inbox",
"created_at": null,
"updated_at": null,
"name": "inbox"
},
{
"id": "all",
"created_at": null,
"updated_at": null,
"name": "all"
},
{
"id": "unseen",
"created_at": null,
"updated_at": null,
"name": "unseen"
}
],
"participants": [
{
"created_at": null,
"updated_at": null,
"name": "PI:NAME:<NAME>END_PI",
"email": "PI:EMAIL:<EMAIL>END_PI"
},
{
"created_at": null,
"updated_at": null,
"name": "PI:NAME:<NAME>END_PI",
"email": "PI:EMAIL:<EMAIL>END_PI"
}
],
"last_message_received_timestamp": 1415741837
})
]
cjsxSubjectResolver = (thread) ->
<div>
<span>Subject {thread.id}</span>
<span className="snippet">Snippet</span>
</div>
describe "ThreadList", ->
Foo = React.createClass({render: -> <div>{@props.children}</div>})
c1 = new ListTabular.Column
name: "Name"
flex: 1
resolver: (thread) -> "#{thread.id} Test Name"
c2 = new ListTabular.Column
name: "Subject"
flex: 3
resolver: cjsxSubjectResolver
c3 = new ListTabular.Column
name: "Date"
resolver: (thread) -> <Foo>{thread.id}</Foo>
columns = [c1,c2,c3]
beforeEach ->
NylasTestUtils.loadKeymap("internal_packages/thread-list/keymaps/thread-list")
spyOn(ThreadStore, "_onAccountChanged")
spyOn(DatabaseStore, "findAll").andCallFake ->
new Promise (resolve, reject) -> resolve(test_threads())
ReactTestUtils.spyOnClass(ThreadList, "_prepareColumns").andCallFake ->
@_columns = columns
ThreadStore._resetInstanceVars()
@thread_list = ReactTestUtils.renderIntoDocument(
<ThreadList />
)
it "renders into the document", ->
expect(ReactTestUtils.isCompositeComponentWithType(@thread_list,
ThreadList)).toBe true
it "has the expected columns", ->
expect(@thread_list._columns).toEqual columns
it "by default has zero children", ->
items = ReactTestUtils.scryRenderedComponentsWithType(@thread_list, ListTabular.Item)
expect(items.length).toBe 0
describe "when the workspace is in list mode", ->
beforeEach ->
spyOn(WorkspaceStore, "layoutMode").andReturn "list"
@thread_list.setState focusedId: "t111"
it "allows reply only when the sheet type is 'Thread'", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "Thread"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe true
it "doesn't reply only when the sheet type isnt 'Thread'", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "Root"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).not.toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe false
describe "when the workspace is in split mode", ->
beforeEach ->
spyOn(WorkspaceStore, "layoutMode").andReturn "split"
@thread_list.setState focusedId: "t111"
it "allows reply and reply-all regardless of sheet type", ->
spyOn(WorkspaceStore, "sheet").andCallFake -> {type: "anything"}
spyOn(Actions, "composeReply")
@thread_list._onReply()
expect(Actions.composeReply).toHaveBeenCalled()
expect(@thread_list._actionInVisualScope()).toBe true
describe "Populated thread list", ->
beforeEach ->
view =
loaded: -> true
get: (i) -> test_threads()[i]
count: -> test_threads().length
setRetainedRange: ->
ThreadStore._view = view
ThreadStore._focusedId = null
ThreadStore.trigger(ThreadStore)
@thread_list_node = ReactDOM.findDOMNode(@thread_list)
spyOn(@thread_list, "setState").andCallThrough()
it "renders all of the thread list items", ->
advanceClock(100)
items = ReactTestUtils.scryRenderedComponentsWithType(@thread_list, ListTabular.Item)
expect(items.length).toBe(test_threads().length)
|
[
{
"context": "#\n# Name : Cylinder\n# Author : Thomas Stachl, http://stachl.me/, @thomasstachl\n# Version : 1.0",
"end": 48,
"score": 0.9998860359191895,
"start": 35,
"tag": "NAME",
"value": "Thomas Stachl"
},
{
"context": "nder\n# Author : Thomas Stachl, http://stachl.me/, @tho... | js/cylinder.coffee | tstachl/cylinder | 1 | #
# Name : Cylinder
# Author : Thomas Stachl, http://stachl.me/, @thomasstachl
# Version : 1.0
# Repo : git@github.com:tstachl/cylinder.git
# Website : https://github.com/tstachl/cylinder
#
jQuery ->
$.cylinder = (element, options) ->
# current state
state = ''
# plugin settings
@settings = {}
# jQuery version of DOM element attached to the plugin
@$element = $ element
# set current state
@setState = (_state) -> state = _state
#get current state
@getState = -> state
# get particular plugin setting
@getSetting = (key) ->
@settings[key]
# call one of the plugin setting functions
@callSettingFunction = (name, args = []) ->
@settings[name].apply this, args
@init = ->
@settings = $.extend yes, {}, @defaults, options
@paper = Raphael element, @settings.width, @settings.height
@paper.setViewBox 0, 0, 110, 235, yes
@paper.rect(0, 21, 110, 193).attr @settings.colors.container
@paper.ellipse(55, 213, 55, 21).attr @settings.colors.container
@paper.ellipse(55, 23, 55, 21).attr @settings.colors.container
@fluid = @paper.rect(0, 193, 110, 0).attr @settings.colors.fluid
@paper.ellipse(55, 213, 55, 21).attr @settings.colors.fluid
@fluidTop = @paper.ellipse(55, 213, 55, 21).attr @settings.colors.accent
@glow = @paper.path('M 12 35.665 L 12 211 C 12 211.5 17.125 214.375 23.125 216.25 C 28.294 217.8653 36.875 219.25 37 219 L 37 42.75 C 28.4056 41.56 19.8109 39.536 12 35.665 L 12 35.665 Z')
@glow.attr @settings.colors.glow
@paper.ellipse(55, 23, 55, 21).attr $.extend { 'fill-opacity': .4 }, @settings.colors.container
@_value @settings.value
@setState 'ready'
@_value = (value) ->
height = 190 * parseFloat(value)
y = 210 - height
@fluid.attr
height: height
y: y
@fluidTop.attr 'cy', y
@value = (newValue) ->
unless @settings.value == newValue
@_value @settings.value = newValue
# initialise the plugin
@init()
# make the plugin chainable
@
# default plugin settings
$.cylinder::defaults =
colors:
container:
fill: '#e5e5e5'
stroke: '#dcdada'
'stroke-width': 1
fluid:
fill: '#0051A6'
stroke: '#003974'
'stroke-width': 1
accent:
fill: '#5d98d7'
stroke: '#4483c4'
'stroke-width': 1
glow:
fill: '#ffffff'
stroke: '#e9e9e9'
'stroke-width': 1
opacity: .4
height: 235
width: 110
value: .3
$.fn.cylinder = (options) ->
args = Array.prototype.slice.call(arguments, 1)
@each ->
if $(@).data('cylinder') is undefined
plugin = new $.cylinder(@, options)
$(@).data('cylinder', plugin)
else
if $(@).data('cylinder')[options]
$(@).data('cylinder')[options].apply($(@).data('cylinder'), args) | 196111 | #
# Name : Cylinder
# Author : <NAME>, http://stachl.me/, @thomasstachl
# Version : 1.0
# Repo : <EMAIL>:tstachl/cylinder.git
# Website : https://github.com/tstachl/cylinder
#
jQuery ->
$.cylinder = (element, options) ->
# current state
state = ''
# plugin settings
@settings = {}
# jQuery version of DOM element attached to the plugin
@$element = $ element
# set current state
@setState = (_state) -> state = _state
#get current state
@getState = -> state
# get particular plugin setting
@getSetting = (key) ->
@settings[key]
# call one of the plugin setting functions
@callSettingFunction = (name, args = []) ->
@settings[name].apply this, args
@init = ->
@settings = $.extend yes, {}, @defaults, options
@paper = Raphael element, @settings.width, @settings.height
@paper.setViewBox 0, 0, 110, 235, yes
@paper.rect(0, 21, 110, 193).attr @settings.colors.container
@paper.ellipse(55, 213, 55, 21).attr @settings.colors.container
@paper.ellipse(55, 23, 55, 21).attr @settings.colors.container
@fluid = @paper.rect(0, 193, 110, 0).attr @settings.colors.fluid
@paper.ellipse(55, 213, 55, 21).attr @settings.colors.fluid
@fluidTop = @paper.ellipse(55, 213, 55, 21).attr @settings.colors.accent
@glow = @paper.path('M 12 35.665 L 12 211 C 12 211.5 17.125 214.375 23.125 216.25 C 28.294 217.8653 36.875 219.25 37 219 L 37 42.75 C 28.4056 41.56 19.8109 39.536 12 35.665 L 12 35.665 Z')
@glow.attr @settings.colors.glow
@paper.ellipse(55, 23, 55, 21).attr $.extend { 'fill-opacity': .4 }, @settings.colors.container
@_value @settings.value
@setState 'ready'
@_value = (value) ->
height = 190 * parseFloat(value)
y = 210 - height
@fluid.attr
height: height
y: y
@fluidTop.attr 'cy', y
@value = (newValue) ->
unless @settings.value == newValue
@_value @settings.value = newValue
# initialise the plugin
@init()
# make the plugin chainable
@
# default plugin settings
$.cylinder::defaults =
colors:
container:
fill: '#e5e5e5'
stroke: '#dcdada'
'stroke-width': 1
fluid:
fill: '#0051A6'
stroke: '#003974'
'stroke-width': 1
accent:
fill: '#5d98d7'
stroke: '#4483c4'
'stroke-width': 1
glow:
fill: '#ffffff'
stroke: '#e9e9e9'
'stroke-width': 1
opacity: .4
height: 235
width: 110
value: .3
$.fn.cylinder = (options) ->
args = Array.prototype.slice.call(arguments, 1)
@each ->
if $(@).data('cylinder') is undefined
plugin = new $.cylinder(@, options)
$(@).data('cylinder', plugin)
else
if $(@).data('cylinder')[options]
$(@).data('cylinder')[options].apply($(@).data('cylinder'), args) | true | #
# Name : Cylinder
# Author : PI:NAME:<NAME>END_PI, http://stachl.me/, @thomasstachl
# Version : 1.0
# Repo : PI:EMAIL:<EMAIL>END_PI:tstachl/cylinder.git
# Website : https://github.com/tstachl/cylinder
#
jQuery ->
$.cylinder = (element, options) ->
# current state
state = ''
# plugin settings
@settings = {}
# jQuery version of DOM element attached to the plugin
@$element = $ element
# set current state
@setState = (_state) -> state = _state
#get current state
@getState = -> state
# get particular plugin setting
@getSetting = (key) ->
@settings[key]
# call one of the plugin setting functions
@callSettingFunction = (name, args = []) ->
@settings[name].apply this, args
@init = ->
@settings = $.extend yes, {}, @defaults, options
@paper = Raphael element, @settings.width, @settings.height
@paper.setViewBox 0, 0, 110, 235, yes
@paper.rect(0, 21, 110, 193).attr @settings.colors.container
@paper.ellipse(55, 213, 55, 21).attr @settings.colors.container
@paper.ellipse(55, 23, 55, 21).attr @settings.colors.container
@fluid = @paper.rect(0, 193, 110, 0).attr @settings.colors.fluid
@paper.ellipse(55, 213, 55, 21).attr @settings.colors.fluid
@fluidTop = @paper.ellipse(55, 213, 55, 21).attr @settings.colors.accent
@glow = @paper.path('M 12 35.665 L 12 211 C 12 211.5 17.125 214.375 23.125 216.25 C 28.294 217.8653 36.875 219.25 37 219 L 37 42.75 C 28.4056 41.56 19.8109 39.536 12 35.665 L 12 35.665 Z')
@glow.attr @settings.colors.glow
@paper.ellipse(55, 23, 55, 21).attr $.extend { 'fill-opacity': .4 }, @settings.colors.container
@_value @settings.value
@setState 'ready'
@_value = (value) ->
height = 190 * parseFloat(value)
y = 210 - height
@fluid.attr
height: height
y: y
@fluidTop.attr 'cy', y
@value = (newValue) ->
unless @settings.value == newValue
@_value @settings.value = newValue
# initialise the plugin
@init()
# make the plugin chainable
@
# default plugin settings
$.cylinder::defaults =
colors:
container:
fill: '#e5e5e5'
stroke: '#dcdada'
'stroke-width': 1
fluid:
fill: '#0051A6'
stroke: '#003974'
'stroke-width': 1
accent:
fill: '#5d98d7'
stroke: '#4483c4'
'stroke-width': 1
glow:
fill: '#ffffff'
stroke: '#e9e9e9'
'stroke-width': 1
opacity: .4
height: 235
width: 110
value: .3
$.fn.cylinder = (options) ->
args = Array.prototype.slice.call(arguments, 1)
@each ->
if $(@).data('cylinder') is undefined
plugin = new $.cylinder(@, options)
$(@).data('cylinder', plugin)
else
if $(@).data('cylinder')[options]
$(@).data('cylinder')[options].apply($(@).data('cylinder'), args) |
[
{
"context": "n('click', (event) =>\n username = this.$('#username').val()\n password = this.$('#password').va",
"end": 365,
"score": 0.6093173027038574,
"start": 357,
"tag": "USERNAME",
"value": "username"
},
{
"context": "rd, redirectUrl) ->\n user =\n use... | client/src/signup/signup-view.coffee | Studyokee/studyokee-youtube | 0 | define [
'backbone',
'handlebars',
'purl',
'templates'
], (Backbone, Handlebars, Purl) ->
SignupView = Backbone.View.extend(
className: "container"
initialize: () ->
render: () ->
this.$el.html(Handlebars.templates['signup'](this.model.toJSON()))
this.$('#submit').on('click', (event) =>
username = this.$('#username').val()
password = this.$('#password').val()
this.signup(username, password, $.url(document.location).param().redirectUrl)
event.preventDefault()
)
return this
signup: (username, password, redirectUrl) ->
user =
username: username
password: password
$('.registerWarning .alert').alert('close')
$.ajax(
type: 'POST'
url: '/signup'
data: user
success: (response, s, t) =>
if redirectUrl
document.location = redirectUrl
else
document.location = '/classrooms/1'
error: (err) =>
if err.responseText.indexOf('User already exists') > 0
$('.registerWarning').html(this.getAlert('Username already exists!'))
else if err.responseText.indexOf('User signup limit') > 0
$('.registerWarning').html(this.getAlert('User signup limit reached!'))
)
getAlert: (text) ->
return '<div class="alert alert-warning alert-dismissible fade in" role="alert"> <button type="button" class="close" data-dismiss="alert" aria-label="Close"><span aria-hidden="true">×</span></button> ' + text + '</div>'
)
return SignupView | 174278 | define [
'backbone',
'handlebars',
'purl',
'templates'
], (Backbone, Handlebars, Purl) ->
SignupView = Backbone.View.extend(
className: "container"
initialize: () ->
render: () ->
this.$el.html(Handlebars.templates['signup'](this.model.toJSON()))
this.$('#submit').on('click', (event) =>
username = this.$('#username').val()
password = this.$('#password').val()
this.signup(username, password, $.url(document.location).param().redirectUrl)
event.preventDefault()
)
return this
signup: (username, password, redirectUrl) ->
user =
username: username
password: <PASSWORD>
$('.registerWarning .alert').alert('close')
$.ajax(
type: 'POST'
url: '/signup'
data: user
success: (response, s, t) =>
if redirectUrl
document.location = redirectUrl
else
document.location = '/classrooms/1'
error: (err) =>
if err.responseText.indexOf('User already exists') > 0
$('.registerWarning').html(this.getAlert('Username already exists!'))
else if err.responseText.indexOf('User signup limit') > 0
$('.registerWarning').html(this.getAlert('User signup limit reached!'))
)
getAlert: (text) ->
return '<div class="alert alert-warning alert-dismissible fade in" role="alert"> <button type="button" class="close" data-dismiss="alert" aria-label="Close"><span aria-hidden="true">×</span></button> ' + text + '</div>'
)
return SignupView | true | define [
'backbone',
'handlebars',
'purl',
'templates'
], (Backbone, Handlebars, Purl) ->
SignupView = Backbone.View.extend(
className: "container"
initialize: () ->
render: () ->
this.$el.html(Handlebars.templates['signup'](this.model.toJSON()))
this.$('#submit').on('click', (event) =>
username = this.$('#username').val()
password = this.$('#password').val()
this.signup(username, password, $.url(document.location).param().redirectUrl)
event.preventDefault()
)
return this
signup: (username, password, redirectUrl) ->
user =
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
$('.registerWarning .alert').alert('close')
$.ajax(
type: 'POST'
url: '/signup'
data: user
success: (response, s, t) =>
if redirectUrl
document.location = redirectUrl
else
document.location = '/classrooms/1'
error: (err) =>
if err.responseText.indexOf('User already exists') > 0
$('.registerWarning').html(this.getAlert('Username already exists!'))
else if err.responseText.indexOf('User signup limit') > 0
$('.registerWarning').html(this.getAlert('User signup limit reached!'))
)
getAlert: (text) ->
return '<div class="alert alert-warning alert-dismissible fade in" role="alert"> <button type="button" class="close" data-dismiss="alert" aria-label="Close"><span aria-hidden="true">×</span></button> ' + text + '</div>'
)
return SignupView |
[
{
"context": "Index], headers))\n keyDiv = div('hx-data-table-cell-key')\n getColumnOpt",
"end": 49249,
"score": 0.6289083361625671,
"start": 49249,
"tag": "KEY",
"value": ""
},
{
"context": "], headers))\n keyDiv = div('hx-data-table-cell... | src/components/data-table/index.coffee | p-koscielniak/hexagonjs | 61 | import { userFacingText } from 'utils/user-facing-text'
import { div, select, detached, button, i, span } from 'utils/selection'
import {
debounce,
defined,
find,
flatten,
identity,
isArray,
isFunction,
isString,
merge,
randomId,
unique
} from 'utils/utils'
import { Set as HSet } from 'utils/set'
import {compare } from 'utils/sort'
import { EventEmitter } from 'utils/event-emitter'
import * as filter from 'utils/filter'
import logger from 'utils/logger'
import { json } from 'utils/request'
import { pickerBase as picker, PickerBase as Picker } from 'components/picker'
import { Toggle } from 'components/toggle'
import { StickyTableHeaders } from 'components/sticky-table-headers'
userFacingText({
dataTable: {
addFilter: 'Add Filter',
advancedSearch: 'Advanced Search',
and: 'and',
anyColumn: 'Any column'
clearFilters: 'Clear Filters',
clearSelection: 'clear selection',
loading: 'Loading',
noData: 'No Data',
noSort: 'No Sort',
or: 'or',
rowsPerPage: 'Rows Per Page',
search: 'Search',
selectedRows: '$selected of $total selected.',
sortBy: 'Sort By',
contains: 'contains',
excludes: 'does not contain',
startsWith: 'starts with',
fuzzy: 'fuzzy matches',
regex: 'matches regex',
exact: 'is exactly',
greater: 'is greater than',
less: 'is less than'
}
})
fullWidthColSpan = 999 # the colspan used to make a cell display as an entire row
collapseBreakPoint = 480
columnOptionLookup = (options, name, id) ->
if options.columns isnt undefined and options.columns[id] isnt undefined and options.columns[id][name] isnt undefined
options.columns[id][name]
else
options[name]
toCriteriaItems = (list) ->
unique(list).map (item) ->
{
value: item,
text: userFacingText('dataTable', item)
}
advancedSearchCriteriaValidate = (value) ->
allowedTypes = filter.filterTypes()
if (isArray(value) and value.every((c) -> ~allowedTypes.indexOf(c))) or value is undefined
value or []
else if isArray(value)
invalidTypes = value.filter((c) -> not ~allowedTypes.indexOf(c))
logger.warn('Invalid Filter Criteria Specified:', invalidTypes, '\nPlease select a value from filterStringTypes()', allowedTypes)
[]
else
logger.warn('Expected an array of filter criteria but was passed:', value)
[]
splitArray = (array, index) ->
left = if index is 0 then [] else array[0...index]
right = if index is array.length - 1 then [] else array[index+1...array.length]
[left, array[index], right]
# pagination block (the page selector and the rows per page selector)
createPaginationBlock = (table) ->
container = div('hx-data-table-paginator')
pickerNode = container.append('button').class('hx-data-table-paginator-picker hx-btn hx-btn-invisible').node()
dtPicker = new Picker(pickerNode, { dropdownOptions: { align: 'rbrt' } })
.on 'change', 'hx.data-table', (d) =>
if d.cause is 'user'
table.page(d.value.value, undefined, d.cause)
totalRows = container.append('span').class('hx-data-table-paginator-total-rows')
back = container.append('button').class('hx-data-table-paginator-back hx-btn hx-btn-invisible')
back.append('i').class('hx-icon hx-icon-chevron-left')
back.on 'click', 'hx.data-table', => if not back.classed('hx-data-table-btn-disabled') then table.page(table.page()-1)
forward = container.append('button').class('hx-data-table-paginator-forward hx-btn hx-btn-invisible')
forward.append('i').class('hx-icon hx-icon-chevron-right')
forward.on 'click', 'hx.data-table', => if not forward.classed('hx-data-table-btn-disabled') then table.page(table.page()+1)
[container, dtPicker]
# pageSizeOptions select
createPageSizeBlock = (table, options) ->
container = div('hx-data-table-page-size')
container.append('span').text(options.rowsPerPageText + ': ')
node = container.append('button').class('hx-data-table-page-size-picker hx-btn hx-btn-invisible').node()
dtPicker = new Picker(node, { dropdownOptions: { align: 'rbrt' } })
.on 'change', 'hx.data-table', (d) ->
if d.cause is 'user'
table.pageSize(d.value.value, undefined, 'user')
table.page(1, undefined, 'user')
[container, dtPicker]
spacer = -> div('hx-data-table-spacer')
createAdvancedSearchView = (selection, dataTable, options) ->
# Render individual row
advancedSearchRowEnter = (filterGroup, filterGroupIndex) ->
(filterRow, index, trueIndex) ->
typePickerOptions =
items: [
{ text: userFacingText('dataTable', 'and'), value: 'and' }
{ text: userFacingText('dataTable', 'or'), value: 'or' }
]
fullWidth: true
typePickerSel = picker(typePickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-type hx-section hx-fixed', true)
typePickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
newFilters = if data.value.value is 'or'
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
[leftFilterGroups..., leftFilters, [currFilter, rightFilters...], rightFilterGroups...]
else
[leftAllButLast..., leftLast] = leftFilterGroups
[leftAllButLast..., [leftLast..., filterGroup...], rightFilterGroups...]
dataTable.advancedSearch(newFilters)
anyColumn = {
text: options.anyColumnText
value: 'any'
anyColumn: true
}
columnItems = filterRow.headers.map (header) ->
value: header.id
orig: header
# XXX Breaking: Renderer
# columnRenderer = (cell) ->
# if cell.anyColumn then span().text(cell.text)
# else columnOptionLookup(options, 'headerCellRenderer', cell.orig.id)(cell.orig, filterRow.headers)
columnRenderer = (element, cell) ->
if cell.anyColumn then select(element).text(cell.text)
else columnOptionLookup(options, 'headerCellRenderer', cell.orig.id)(element, cell.orig, filterRow.headers)
columnPickerOptions =
items: [anyColumn, columnItems...]
renderer: columnRenderer
fullWidth: true
columnPickerSel = picker(columnPickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-column hx-section hx-fixed', true)
columnPickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
column: data.value.value
})
delete newFilter.criteria
columnCriteria = columnOptionLookup(options, 'advancedSearchCriteria', data.value.value) || []
criteriaItems = ['contains', advancedSearchCriteriaValidate(columnCriteria)...]
criteriaPickerSel.api('picker')
.items(toCriteriaItems(criteriaItems))
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
criteriaPickerOptions =
items: toCriteriaItems(['contains', advancedSearchCriteriaValidate(options.advancedSearchCriteria)...])
fullWidth: true
criteriaPickerSel = picker(criteriaPickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-criteria hx-section hx-fixed', true)
criteriaPickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
criteria: data.value.value
})
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
criteriaAnyPlaceholder = div('hx-data-table-advanced-search-criteria-placeholder hx-text-disabled hx-background-disabled')
.text(userFacingText('dataTable', 'contains'))
debouncedInput = debounce 200, (e) ->
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
term: e.target.value
})
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
termInput = detached('input').attr('placeholder', options.advancedSearchPlaceholder)
.class('hx-data-table-advanced-search-input hx-section')
.attr('required', 'required')
.on 'input', debouncedInput
removeBtn = button('hx-btn hx-negative hx-btn-outline hx-data-table-advanced-search-remove')
.add(i('hx-icon hx-icon-close'))
.on 'click', ->
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, _, rightFilters] = splitArray(filterGroup, trueIndex)
newFilters = if trueIndex is 0 and filterGroupIndex is 0
[rightFilters, rightFilterGroups...]
else if trueIndex is 0
[leftFilterGroup..., leftFilterGroupLast] = leftFilterGroups
[_, filters...] = filterGroup
[leftFilterGroup..., [leftFilterGroupLast..., filters...], rightFilterGroups...]
else
[leftFilterGroups..., [leftFilters..., rightFilters...], rightFilterGroups...]
filterToUse = newFilters.filter((group) => group.length)
dataTable.advancedSearch(if filterToUse.length then filterToUse else undefined)
@append('div').class('hx-data-table-advanced-search-filter hx-section hx-input-group hx-input-group-full-width')
.add(typePickerSel)
.add(columnPickerSel)
.add(criteriaAnyPlaceholder)
.add(criteriaPickerSel)
.add(div('hx-data-table-advanced-search-filter-input-container hx-input-group hx-no-pad hx-no-border')
.add(termInput)
.add(removeBtn))
.node()
advancedSearchRowUpdate = ({term, column, criteria}, element, index) ->
filterRowSel = select(element)
validContext = if not term then 'negative' else undefined
filterRowSel.select('.hx-data-table-advanced-search-type').api('picker')
.value(if index is 0 then 'or' else 'and')
trueColumn = column or 'any'
filterRowSel.select('.hx-data-table-advanced-search-column').api('picker')
.value(trueColumn)
columnCriteria = columnOptionLookup(options, 'advancedSearchCriteria', column) || []
criteriaItems = if trueColumn is 'any' then ['contains'] else ['contains', advancedSearchCriteriaValidate(columnCriteria)...]
filterRowSel.select('.hx-data-table-advanced-search-criteria')
.style('display', if criteriaItems.length is 1 then 'none' else 'block')
.api('picker')
.items(toCriteriaItems(criteriaItems))
.value(criteria || 'contains')
filterRowSel.select('.hx-data-table-advanced-search-criteria-placeholder')
.style('display', if criteriaItems.length is 1 then 'block' else 'none')
filterRowSel.select('.hx-data-table-advanced-search-input')
.value(term or '')
# Render grouped filters
advancedSearchGroupEnter = (filterGroup, index, trueIndex) ->
filterGroupSel = div('hx-data-table-advanced-search-filter-group')
filterGroupView = filterGroupSel.view('.hx-data-table-advanced-search-filter')
.enter(advancedSearchRowEnter(filterGroup, trueIndex))
.update(advancedSearchRowUpdate)
filterGroupSel.api('data-table.group', {
filterGroupView
})
@append(filterGroupSel).node()
advancedSearchGroupUpdate = (filterGroup, element, index) ->
select(element).api('data-table.group').filterGroupView.apply(filterGroup)
selection.view('.hx-data-table-advanced-search-filter-group')
.enter(advancedSearchGroupEnter)
.update(advancedSearchGroupUpdate)
class DataTable extends EventEmitter
constructor: (selector, options) ->
super()
resolvedOptions = merge({
allowHeaderWrap: false
compact: 'auto' # 'auto', true, false
displayMode: 'paginate' # 'paginate', 'all'
feed: undefined
showSearchAboveTable: false
filter: undefined
filterEnabled: true
showAdvancedSearch: false
advancedSearchEnabled: false
advancedSearchCriteria: undefined
advancedSearch: undefined
pageSize: 15
pageSizeOptions: undefined # supply an array of numbers to show the user
retainHorizontalScrollOnRender: true
retainVerticalScrollOnRender: false
selectEnabled: false
singleSelection: false
sort: undefined
sortEnabled: true
highlightOnHover: true
useStickyHeaders: true
selectedRows: []
expandedRows: []
# functions used for getting row state
rowIDLookup: (row) -> row.id
rowEnabledLookup: (row) -> not row.disabled
rowSelectableLookup: (row) -> true
rowCollapsibleLookup: (row) -> false
# functions for rendering
collapsibleRenderer: undefined
# XXX Breaking: Renderer
# cellRenderer: (cell, row) -> span().text(cell)
# headerCellRenderer: (cell, headers) -> span().text(cell.name)
cellRenderer: (element, cell, row) -> select(element).text(cell)
headerCellRenderer: (element, cell, headers) -> select(element).text(cell.name)
# per column options (headerCellRenderer, cellRenderer, sortEnabled)
columns: {}
clearSelectionText: userFacingText('dataTable','clearSelection')
loadingText: userFacingText('dataTable','loading')
noDataMessage: userFacingText('dataTable','noData')
noSortText: userFacingText('dataTable', 'noSort')
rowsPerPageText: userFacingText('dataTable','rowsPerPage')
searchPlaceholder: userFacingText('dataTable','search')
selectedRowsText: userFacingText('dataTable', 'selectedRows', true)
sortByText: userFacingText('dataTable','sortBy')
addFilterText: userFacingText('dataTable', 'addFilter')
clearFiltersText: userFacingText('dataTable', 'clearFilters')
anyColumnText: userFacingText('dataTable', 'anyColumn')
advancedSearchText: userFacingText('dataTable','advancedSearch')
advancedSearchPlaceholder: userFacingText('dataTable', 'search')
}, options)
resolvedOptions.pageSize = Math.min resolvedOptions.pageSize, 1000
resolvedOptions.advancedSearchEnabled = true if resolvedOptions.advancedSearch
resolvedOptions.showAdvancedSearch = true if resolvedOptions.advancedSearchEnabled
selection = select(selector)
.classed('hx-data-table', true)
.api('data-table', this)
.api(this)
content = div('hx-data-table-content')
# loading div
loadingDiv = div('hx-data-table-loading')
.add(div('hx-data-table-loading-inner')
.add(div('hx-spinner'))
.add(span().text(' ' + resolvedOptions.loadingText)))
statusBar = div('hx-data-table-status-bar')
statusBarText = span('hx-data-table-status-bar-text')
statusBarClear = span('hx-data-table-status-bar-clear')
.text(" (#{resolvedOptions.clearSelectionText})")
.on 'click', 'hx.data-table', =>
@_.selectedRows.clear()
selection.select('.hx-data-table-content').selectAll('.hx-data-table-row-selected').classed('hx-data-table-row-selected', false)
@updateSelected()
@emit 'selectedrowsclear'
controlPanelCompact = div('hx-data-table-control-panel-compact')
controlPanelCompactToggle = button('hx-data-table-control-panel-compact-toggle hx-btn hx-btn-invisible')
.add(i('hx-icon hx-icon-bars'))
.on 'click', ->
toggleElem = controlPanel
if toggleElem.classed('hx-data-table-compact-hide')
toggleElem.classed('hx-data-table-compact-hide', false)
.style('height', '0px')
.morph().with('expandv', 150)
.then ->
controlPanelCompact.classed('hx-data-table-control-panel-compact-open', true)
.go()
else
toggleElem.morph().with('collapsev', 50)
.then ->
toggleElem.classed('hx-data-table-compact-hide', true)
controlPanelCompact.classed('hx-data-table-control-panel-compact-open', false)
.thenStyle('display', '')
.go()
controlPanel = div('hx-data-table-control-panel hx-data-table-compact-hide')
controlPanelInner = div('hx-data-table-control-panel-inner')
# compact sort - always on the page, only visible in compact mode (so we can just change the class and everything will work)
compactSort = div('hx-data-table-sort')
.classed('hx-data-table-sort-visible', resolvedOptions.sortEnabled)
.add(span().text(resolvedOptions.sortByText + ': '))
sortColPicker = new Picker(compactSort.append('button').class('hx-btn hx-btn-invisible').node())
sortColPicker.on 'change', 'hx.data-table', (d) =>
if d.cause is 'user' then @sort({column: sortColPicker.value().column, direction: sortColPicker.value().direction})
filterContainer = div('hx-data-table-filter-container')
onInput = debounce 200, => @filter(filterInput.value(), undefined, 'user')
filterInput = detached('input').class('hx-data-table-filter')
.attr('placeholder', resolvedOptions.searchPlaceholder)
.classed('hx-data-table-filter-visible', resolvedOptions.filterEnabled)
.on 'input', 'hx.data-table', onInput
advancedSearchContainer = div('hx-data-table-advanced-search-container')
advancedSearchToggle = button('hx-data-table-advanced-search-toggle hx-btn hx-btn-invisible')
.text(resolvedOptions.advancedSearchText)
advancedSearchToggleButton = new Toggle(advancedSearchToggle.node())
advancedSearchToggleButton.on 'change', (data) => @advancedSearchEnabled(data)
advancedSearch = div('hx-data-table-advanced-search')
advancedSearchView = createAdvancedSearchView(advancedSearch, this, resolvedOptions)
advancedSearchButtons = div('hx-data-table-advanced-search-buttons')
addFilter = =>
currentFilters = @advancedSearch() or [[]]
[previousFilterGroups..., lastFilterGroup] = currentFilters
newLastFilterGroup = [lastFilterGroup..., {
column: 'any',
term: ''
}]
@advancedSearch([previousFilterGroups..., newLastFilterGroup])
clearFilters = => @advancedSearch(undefined)
advancedSearchAddFilterButton = button('hx-btn hx-positive hx-data-table-advanced-search-add-filter hx-data-table-advanced-search-button hx-btn-outline')
.add(i('hx-data-table-advanced-search-icon hx-icon hx-icon-plus hx-text-positive'))
.add(span().text(resolvedOptions.addFilterText))
.on('click', addFilter)
advancedSearchClearFilterButton = button('hx-btn hx-negative hx-data-table-advanced-search-clear-filters hx-data-table-advanced-search-button hx-btn-outline')
.add(i('hx-data-table-advanced-search-icon hx-icon hx-icon-close hx-text-negative'))
.add(span().text(resolvedOptions.clearFiltersText))
.on('click', clearFilters)
# We create multiple copies of these to show in different places
# This makes it easier to change the UI as we can show/hide instead of moving them
[pageSize, pageSizePicker] = createPageSizeBlock(this, resolvedOptions)
[pageSizeBottom, pageSizePickerBottom] = createPageSizeBlock(this, resolvedOptions)
[pagination, pagePicker] = createPaginationBlock(this)
[paginationBottom, pagePickerBottom] = createPaginationBlock(this)
[paginationCompact, pagePickerCompact] = createPaginationBlock(this)
# The main pagination is hidden as the compact control panel contains a version of it
pagination.classed('hx-data-table-compact-hide', true)
controlPanelBottom = div('hx-data-table-control-panel-bottom')
# Create the structure in one place
# Some entities still make sense to be built individually (e.g. the loading div)
selection
.add(content)
.add(statusBar
.add(statusBarText)
.add(statusBarClear))
# Control panel displayed at the top for compact mode
.add(controlPanelCompact
.add(paginationCompact)
.add(spacer())
.add(controlPanelCompactToggle))
# Main control panel - contains all the components
.add(controlPanel
.add(controlPanelInner
.add(compactSort)
.add(pagination)
.add(pageSize)
.add(spacer())
.add(filterContainer
.add(advancedSearchToggle)
.add(filterInput)))
# The advanced search container isn't in the main control panel as it is easier to style outside
.add(advancedSearchContainer
.add(advancedSearch)
.add(advancedSearchButtons
.add(advancedSearchAddFilterButton)
.add(advancedSearchClearFilterButton))))
# Bottom control panel - shown in compact mode and when the search is at the top
.add(controlPanelBottom
.add(spacer())
.add(pageSizeBottom)
.add(paginationBottom))
# Add the loading div last - helps keep it on top of everything
.add(loadingDiv)
# 'private' variables
@_ = {
selection: selection
options: resolvedOptions
page: 1
pagePickers: [pagePicker, pagePickerCompact, pagePickerBottom]
pageSizePickers: [pageSizePicker, pageSizePickerBottom]
statusBar: statusBar
sortColPicker: sortColPicker
selectedRows: new HSet(resolvedOptions.selectedRows) # holds the ids of the selected rows
expandedRows: new HSet(resolvedOptions.expandedRows)
renderedCollapsibles: {}
compactState: (resolvedOptions.compact is 'auto' and selection.width() < collapseBreakPoint) or resolvedOptions.compact is true
advancedSearchView: advancedSearchView
advancedSearchToggleButton: advancedSearchToggleButton
}
# responsive page resize when compact is 'auto'
selection.on 'resize', 'hx.data-table', =>
selection.selectAll('.hx-data-table-collapsible-content-container').map (e) =>
e.style('max-width', (parseInt(selection.style('width')) - @_.collapsibleSizeDiff) + 'px')
state = (@compact() is 'auto' and selection.width() < collapseBreakPoint) or @compact() is true
selection.classed 'hx-data-table-compact', state
if @_.compactState isnt state
@_.compactState = state
@emit('compactchange', {value: @compact(), state: state, cause: 'user'})
dtRandomId = randomId()
# deal with shift being down - prevents the text in the table being selected when shift
# selecting multiple rows (as it looks bad) but also means that data can be selected if required
# XXX: make this work better / come up with a better solution
select('body').on 'keydown', 'hx.data-table.shift.' + dtRandomId, (e) =>
if e.shiftKey and @selectEnabled()
selection.classed('hx-data-table-disable-text-selection', true)
select('body').on 'keyup', 'hx.data-table.shift.' + dtRandomId, (e) =>
if not e.shiftKey and @selectEnabled()
selection.classed('hx-data-table-disable-text-selection', false)
# Methods for changing the options
#---------------------------------
# general purpose function for setting / getting an option
option = (name) ->
(value, cb, cause) ->
options = @_.options
if arguments.length > 0
options[name] = value
@emit(name.toLowerCase() + 'change', {value: value, cause: (cause or 'api')})
@render(cb)
this
else options[name]
collapsibleRenderer: option('collapsibleRenderer')
compact: option('compact')
displayMode: option('displayMode')
feed: option('feed')
filter: option('filter')
advancedSearch: option('advancedSearch')
showAdvancedSearch: option('showAdvancedSearch')
advancedSearchEnabled: option('advancedSearchEnabled')
showSearchAboveTable: option('showSearchAboveTable')
filterEnabled: option('filterEnabled')
noDataMessage: option('noDataMessage')
pageSize: option('pageSize')
pageSizeOptions: option('pageSizeOptions')
retainHorizontalScrollOnRender: option('retainHorizontalScrollOnRender')
retainVerticalScrollOnRender: option('retainVerticalScrollOnRender')
rowCollapsibleLookup: option('rowCollapsibleLookup')
rowEnabledLookup: option('rowEnabledLookup')
rowIDLookup: option('rowIDLookup')
rowSelectableLookup: option('rowSelectableLookup')
selectEnabled: option('selectEnabled')
highlightOnHover: option('highlightOnHover')
singleSelection: option('singleSelection')
useStickyHeaders: option('useStickyHeaders')
sort: option('sort')
# general purpose function for setting / getting a column option (or the default option of the column id is not specified)
columnOption = (name) ->
(columnId, value, cb) ->
options = @_.options
if arguments.length > 1 and isString(columnId)
options.columns[columnId] ?= {}
options.columns[columnId][name] = value
@emit(name.toLowerCase() + 'change', {column: columnId, value: value, cause: 'api'})
@render(cb)
this
else if arguments.length > 0
if isString(columnId) and options.columns[columnId]
options.columns[columnId][name]
else
options[name] = arguments[0]
@emit(name.toLowerCase() + 'change', {value: value, cause: 'api'})
@render(arguments[1])
this
else options[name]
advancedSearchCriteria: columnOption('advancedSearchCriteria')
allowHeaderWrap: columnOption('allowHeaderWrap')
cellRenderer: columnOption('cellRenderer')
headerCellRenderer: columnOption('headerCellRenderer')
sortEnabled: columnOption('sortEnabled')
# function for setting / getting options that are only column specific and cannot be set for the whole table
columnOnlyOption = (name) ->
(columnId, value, cb) ->
options = @_.options
if isString(columnId)
if arguments.length > 1
options.columns[columnId] ?= {}
options.columns[columnId][name] = value
@emit(name.toLowerCase() + 'change', {column: columnId, value: value, cause: 'api'})
@render(cb)
this
else if options.columns[columnId]
options.columns[columnId][name]
maxWidth: columnOnlyOption('maxWidth')
# Methods for changing the state of the table
# -------------------------------------------
page: (value, cb, cause) ->
if arguments.length > 0
@_.page = Math.max(1, value)
if @_.numPages?
@_.page = Math.min @_.page, @_.numPages
@emit('pagechange', {value: @_.page, cause: cause or 'api'})
@render(cb)
this
else @_.page
selectedRows: (value, cb) ->
if arguments.length > 0 and not isFunction(value)
# Deal with single select mode when setting the selected rows
if @singleSelection() and isArray(value) and value.length
value = [value[0]]
@_.selectedRows = new HSet(value)
newSelectedRows = @_.selectedRows.values()
@emit('selectedrowschange', {value: newSelectedRows, cause: 'api'})
@_.userLastSelectedIndex = undefined
@render(cb)
this
else
@_.selectedRows.values()
expandedRows: (value, cb) ->
if arguments.length > 0 and not isFunction(value)
@_.expandedRows = new HSet(value)
@render(cb)
@emit('expandedrowschange', {value: @_.expandedRows.values(), cause: 'api'})
this
else
@_.expandedRows.values()
rowsForIds: (ids, cb) ->
if cb? then @feed().rowsForIds(ids, @rowIDLookup(), cb)
this
# Methods that perform an action on the table
# -------------------------------------------
renderSuppressed: (value) ->
if arguments.length > 0
@_.renderSuppressed = value
this
else @_.renderSuppressed
# redraws the table
render: (cb) ->
if @_.renderSuppressed then return
feed = @feed()
# check that the feed has been defined - if it hasn't then there is no point in continuing
if feed is undefined or (feed.headers is undefined or feed.totalCount is undefined or feed.rows is undefined)
logger.warn('No feed specified when rendering data table')
return
selection = @_.selection
options = @_.options
# some utility functions
getColumnOption = (name, id) -> columnOptionLookup(options, name, id)
rowToArray = (headers, obj) -> headers.map (header) -> obj.cells[header.id]
# build the main structure of the table in a detached container
container = div('hx-data-table-content')
table = container.append('table').class('hx-data-table-table hx-table')
.classed('hx-table-no-hover', not options.highlightOnHover)
thead = table.append('thead').class('hx-data-table-head')
tbody = table.append('tbody').class('hx-data-table-body')
headerRow = thead.append('tr').class('hx-data-table-row')
# make the loading div visible
selection.select('.hx-data-table-loading').style('display', '')
advancedSearchVisibleAndEnabled = (not options.filterEnabled or options.showAdvancedSearch) and options.advancedSearchEnabled
filterSel = selection.select('.hx-data-table-filter')
.classed('hx-data-table-filter-visible', options.filterEnabled and not advancedSearchVisibleAndEnabled)
nextFilterValue = @filter()
prevFilterValue = filterSel.value()
if nextFilterValue isnt prevFilterValue
filterSel.value(nextFilterValue)
@_.advancedSearchToggleButton.value(options.advancedSearchEnabled)
selection.select('.hx-data-table-advanced-search-toggle')
.classed('hx-data-table-advanced-search-visible', options.filterEnabled and options.showAdvancedSearch)
selection.select('.hx-data-table-advanced-search-container')
.classed('hx-data-table-advanced-search-visible', advancedSearchVisibleAndEnabled)
selection.select('.hx-data-table-control-panel')
.classed('hx-data-table-filter-enabled', options.filterEnabled)
showCompactControlPanelToggle = options.filterEnabled or options.sortEnabled or options.advancedSearchEnabled or options.pageSizeOptions?.length
selection.select('.hx-data-table-control-panel-compact-toggle')
.classed('hx-data-table-control-panel-compact-toggle-visible', showCompactControlPanelToggle)
# load in the data needed
# XXX: how much of this could be split out so it's not re-defined every time render is called?
feed.headers (headers) =>
if advancedSearchVisibleAndEnabled
currentFilters = @advancedSearch() or []
@_.advancedSearchView.apply currentFilters.filter((x) -> x.length).map (filterGroup) ->
filterGroup.map (filterRow) ->
merge(filterRow, {
headers,
getColumnOption
})
selection.select('.hx-data-table-sort')
.classed('hx-data-table-sort-visible', options.sortEnabled or headers.some((header) -> getColumnOption('sortEnabled', header.id)))
feed.totalCount (totalCount) =>
if options.displayMode is 'paginate'
start = (@page() - 1) * options.pageSize
end = @page() * options.pageSize - 1
else
start = undefined
end = undefined
range = {
start: start,
end: end,
sort: @sort(),
filter: @filter(),
advancedSearch: @advancedSearch(),
useAdvancedSearch: options.showAdvancedSearch and options.advancedSearchEnabled
}
feed.rows range, ({rows, filteredCount}) =>
if options.displayMode is 'paginate'
multiPage = false
selection.classed('hx-data-table-infinite', filteredCount is undefined)
if filteredCount is undefined
@_.numPages = undefined
numText = (start+1) + ' - ' + (end+1)
multiPage = true
else
@_.numPages = Math.max(1, Math.ceil(filteredCount / options.pageSize))
if @page() > @_.numPages then @page(@_.numPages)
multiPage = @_.numPages > 1
if filteredCount > 0 and @_.numPages > 1
numText = 'of ' + filteredCount
items = for idx in [1..@_.numPages] by 1
num = idx * options.pageSize
text: (num + 1 - options.pageSize) + ' - ' + Math.min(num, filteredCount) # e.g. 1 - 15
value: idx
@_.pagePickers.forEach (picker) =>
picker
.items(items)
.value(@page())
selection.selectAll('.hx-data-table-paginator').classed('hx-data-table-paginator-visible', multiPage)
selection.selectAll('.hx-data-table-paginator-total-rows').text(numText or '')
selection.selectAll('.hx-data-table-paginator-back').classed('hx-data-table-btn-disabled', @page() is 1)
selection.selectAll('.hx-data-table-paginator-forward').classed('hx-data-table-btn-disabled', @page() is @_.numPages)
selection.select('.hx-data-table-control-panel-compact')
.classed('hx-data-table-control-panel-compact-visible', multiPage or showCompactControlPanelToggle)
selection.select('.hx-data-table-control-panel-bottom')
.classed('hx-data-table-control-panel-bottom-visible', multiPage or options.pageSizeOptions?.length)
selection.select('.hx-data-table-control-panel')
.classed('hx-data-table-control-panel-visible', multiPage or showCompactControlPanelToggle)
if headers.some((header) -> getColumnOption('sortEnabled', header.id))
currentSort = (@sort() or {})
# filter out columns that are not sortable so they don't show in the list for compact mode
sortColumns = flatten(headers
.map((header) -> if getColumnOption('sortEnabled', header.id)
[
{text: header.name, value: header.id + 'asc', column: header.id, direction: 'asc', cell: header}
{text: header.name, value: header.id + 'desc', column: header.id, direction: 'desc', cell: header}
])
.filter(defined))
# set the values for the compact sort control
@_.sortColPicker
# XXX Breaking: Renderer
# .renderer((option) ->
# if option.value
# getColumnOption('headerCellRenderer', option.cell.id)(option.cell, headers)
# .add(i('hx-data-table-compact-sort-arrow hx-icon hx-icon-chevron-' + (if option.direction is 'asc' then 'up' else 'down')))
# else
# span().text(option.text)
# )
.renderer((element, option) ->
if option.value
getColumnOption('headerCellRenderer', option.cell.id)(element, option.cell, headers)
select(element).append('i')
.class('hx-data-table-compact-sort-arrow hx-icon hx-icon-chevron-' + (if option.direction is 'asc' then 'up' else 'down'))
else
select(element).text(option.text)
)
.items([{text: options.noSortText, value: undefined}].concat sortColumns)
if currentSort.column and @_.sortColPicker.value().value isnt (currentSort.column + currentSort.direction)
@_.sortColPicker.value({value: currentSort.column + currentSort.direction})
# populate the page size picker if there are options set
selectPageSize = options.pageSizeOptions? and options.pageSizeOptions.length > 0
selection.selectAll('.hx-data-table-page-size').classed('hx-data-table-page-size-visible', selectPageSize)
if selectPageSize
if options.pageSizeOptions.indexOf(options.pageSize) is -1
options.pageSizeOptions.push options.pageSize
pageSizeOptions = options.pageSizeOptions
.sort(compare)
.map((item) -> {text: item, value: item})
@_.pageSizePickers.forEach (picker) ->
picker
.items(pageSizeOptions)
.value(options.pageSize)
# build the grouped header
if headers.some((header) -> header.groups?)
relevantHeaders = headers.filter((e) -> e.groups?).map((e) -> e.groups.length)
maxHeaderDepth = Math.max.apply(null, relevantHeaders)
# Map over to populate columns with groups of '' where not included
headerGroups = headers.map (e) ->
groups = e.groups or []
groups.push '' while groups.length < maxHeaderDepth
groups
for row in [maxHeaderDepth-1..0] by -1
groupedRow = headerRow.insertBefore 'tr'
groupedRow.append('th').class('hx-data-table-control') if options.selectEnabled or options.collapsibleRenderer?
count = 1
for column in [1..headerGroups.length] by 1
col = headerGroups[column]
prevCol = headerGroups[column-1]
if col? and prevCol?
parent = col.slice(row, maxHeaderDepth).toString()
prevParent = prevCol.slice(row, maxHeaderDepth).toString()
if column is headerGroups.length or col[row] isnt prevCol[row] or parent isnt prevParent
groupedRow.append('th')
.attr('colspan', count)
.class('hx-data-table-cell-grouped')
.text(prevCol[row])
count = 0
count++
# add the 'select all' checkbox to the header
if options.selectEnabled or options.collapsibleRenderer?
headerControlBox = headerRow.append('th').class('hx-data-table-control hx-table-head-no-border')
if options.selectEnabled and not options.singleSelection
headerCheckBox = headerControlBox.append('div').class('hx-data-table-checkbox')
.on 'click', 'hx.data-table', =>
if rows.length > 0
enabledRows = rows.filter (row) -> options.rowEnabledLookup(row)
selectMulti(0, rows.length - 1, not enabledRows.every((row) => @_.selectedRows.has(options.rowIDLookup(row))))
headerCheckBox.append('i').class('hx-icon hx-icon-check')
# build the header
headers.forEach (header, i) =>
cellDiv = headerRow.append('th').class('hx-data-table-cell')
.classed('hx-table-header-allow-wrap', getColumnOption('allowHeaderWrap', header.id))
cellDivContent = cellDiv.append('div').class('hx-data-table-cell-inner')
# XXX Breaking: Renderer
# cellDivContent
# .add(div('hx-data-table-title')
# .add(getColumnOption('headerCellRenderer', header.id)(header, headers)))
getColumnOption('headerCellRenderer', header.id)(
cellDivContent.append('span').class('hx-data-table-title').node(),
header,
headers,
)
if getColumnOption('sortEnabled', header.id)
cellDiv.classed('hx-data-table-cell-sort-enabled', true)
currentSort = @sort()
dirClass = if currentSort and currentSort.column is header.id
'hx-icon-sort-' + currentSort.direction + ' hx-data-table-sort-on'
else 'hx-icon-sort'
cellDivContent.append('i').class('hx-icon ' + dirClass + ' hx-data-table-sort-icon')
cellDiv.on 'click', 'hx.data-table', =>
currentSort = @sort() or {}
direction = if currentSort.column is header.id
if currentSort.direction is 'asc' then 'desc'
else 'asc'
column = if direction isnt undefined then header.id
@sort({column: column, direction: direction}, undefined, 'user')
@updateSelected = =>
parentFilter = (parent) ->
(sel) -> sel.node().parentNode is parent.node()
getSelectableRows = (parent) ->
parent
.selectAll('.hx-data-table-row')
.filter(parentFilter(parent))
.classed('hx-data-table-row-selected', false)
rowDivs = getSelectableRows(tbody)
leftHeaderBody = container.select('.hx-sticky-table-header-left').select('tbody')
checkBoxDivs = getSelectableRows(leftHeaderBody)
if @_.selectedRows.size > 0
for row, rowIndex in rows
if @_.selectedRows.has(options.rowIDLookup(row))
select(rowDivs.nodes[rowIndex]).classed('hx-data-table-row-selected', true)
if checkBoxDivs.nodes[rowIndex]?
select(checkBoxDivs.nodes[rowIndex]).classed('hx-data-table-row-selected', true)
pageHasSelection = tbody.selectAll('.hx-data-table-row-selected').size() > 0
selection.classed('hx-data-table-has-page-selection', pageHasSelection and not options.singleSelection)
selection.classed('hx-data-table-has-selection', @_.selectedRows.size > 0 and not options.singleSelection)
if totalCount isnt undefined
@_.statusBar
.select('.hx-data-table-status-bar-text')
.text(userFacingText.format(options.selectedRowsText, { selected: @_.selectedRows.size, total: totalCount }))
# handles multi row selection ('select all' and shift selection)
selectMulti = (start, end, force) =>
newRows = []
newRows.push rows[idx] for idx in [start..end] by 1
for row in newRows
if options.rowEnabledLookup(row) and options.rowSelectableLookup(row)
id = options.rowIDLookup(row)
@_.selectedRows[if force then 'add' else 'delete'](id)
@emit 'selectedrowschange', {row: row, rowValue: @_.selectedRows.has(id), value: @selectedRows(), cause: 'user'}
@updateSelected()
# handles row selection.
selectRow = (row, index, shiftDown) =>
if @_.userLastSelectedIndex?
if options.singleSelection and index isnt @_.userLastSelectedIndex
@_.selectedRows.clear()
else
# does the check for whether we're shift selecting and calls into selectMulti if we are
if shiftDown and index isnt @_.userLastSelectedIndex
force = @_.selectedRows.has(options.rowIDLookup(rows[@_.userLastSelectedIndex]))
if index > @_.userLastSelectedIndex then selectMulti(@_.userLastSelectedIndex + 1, index, force)
else selectMulti(index, @_.userLastSelectedIndex, force)
return
@_.userLastSelectedIndex = index
if options.rowSelectableLookup(row)
id = options.rowIDLookup(row)
deleteOrAdd = if @_.selectedRows.has(id) then 'delete' else 'add'
@_.selectedRows[deleteOrAdd](id)
@emit 'selectedrowschange', {row: row, rowValue: @_.selectedRows.has(id), value: @selectedRows(), cause: 'user'}
@updateSelected()
# Deal with collapsible rows
buildCollapsible = ->
contentRow = detached('tr').class('hx-data-table-collapsible-content-row')
hiddenRow = detached('tr').class('hx-data-table-collapsible-row-spacer')
# Add an empty cell so the sticky headers display correctly
contentRow.append('td').class('hx-data-table-collapsible-cell hx-data-table-collapsible-cell-empty')
# The div that the user will populate with the collapsibleRender function
contentDiv = contentRow.append('td').class('hx-data-table-collapsible-cell')
.attr('colspan',fullWidthColSpan)
.append('div').class('hx-data-table-collapsible-content-container')
.append('div').class('hx-data-table-collapsible-content')
{contentRow: contentRow, hiddenRow: hiddenRow, contentDiv: contentDiv}
toggleCollapsible = (node, row, force) =>
# once rows have been clicked once, the nodes are stored in the _.renderedCollapsibles object for re-use
rowId = options.rowIDLookup(row)
cc = @_.renderedCollapsibles[rowId] or buildCollapsible(row)
@_.renderedCollapsibles[rowId] = cc
# We always insert after here to make sure the nodes are added when setting the collapsible rows with the API
node.insertAfter(cc.hiddenRow).insertAfter(cc.contentRow)
currentVis = if force? then force else !cc.contentRow.classed('hx-data-table-collapsible-row-visible')
cc.contentRow.classed('hx-data-table-collapsible-row-visible', currentVis)
node.classed('hx-data-table-collapsible-row-visible', currentVis)
node.select('.hx-data-table-collapsible-toggle').select('i').class(if currentVis then 'hx-icon hx-icon-minus' else 'hx-icon hx-icon-plus')
# XXX Breaking: Renderer
# if currentVis then cc.contentDiv.append(options.collapsibleRenderer(row))
if currentVis
options.collapsibleRenderer(cc.contentDiv.node(), row)
else
@_.renderedCollapsibles[rowId].contentRow.remove()
@_.renderedCollapsibles[rowId].hiddenRow.remove()
delete @_.renderedCollapsibles[rowId]
@_.expandedRows[if currentVis then 'add' else 'delete'](rowId)
@_.stickyHeaders?.render()
@_.collapsibleSizeDiff = parseInt(selection.style('width')) - parseInt(select(cc.contentDiv.node().parentNode).style('width'))
currentVis
# build the rows
if filteredCount is undefined or filteredCount > 0
rows.forEach (row, rowIndex) =>
tr = tbody.append('tr').class('hx-data-table-row')
.classed('hx-data-table-row-selected', @_.selectedRows.has(options.rowIDLookup(row)))
.classed('hx-data-table-row-disabled', not options.rowEnabledLookup(row))
tr.on 'click', 'hx.data-table', (e) => @emit 'rowclick', {data: row, node: tr.node()}
rowIsCollapsible = options.rowCollapsibleLookup(row) # stored as we use it more than once
# used in compact mode to display the tick correctly without letting text flow behind it.
tr.classed('hx-data-table-row-select-enabled', options.selectEnabled)
if options.selectEnabled or options.collapsibleRenderer?
controlDiv = tr.append('th').class('hx-data-table-control')
if options.selectEnabled
checkbox = controlDiv.append('div').class('hx-data-table-checkbox')
checkbox.append('i').class('hx-icon hx-icon-check')
if options.rowEnabledLookup(row)
checkbox.on 'click', 'hx.data-table', (e) ->
e.stopPropagation() # prevent collapsibles being toggled by tick selection in compact mode
selectRow(row, rowIndex, e.shiftKey)
if options.collapsibleRenderer?
collapsibleControl = controlDiv.append('div')
.class('hx-data-table-collapsible-toggle')
.classed('hx-data-table-collapsible-disabled', not rowIsCollapsible)
collapsibleControl.append('i').class('hx-icon hx-icon-plus')
if rowIsCollapsible
# restore open collapsibles on render
if @_.expandedRows.has(options.rowIDLookup(row)) then toggleCollapsible(tr, row, true)
collapsibleControl.on 'click', 'hx.data-table.collapse-row', (e) =>
currentVis = toggleCollapsible(tr, row)
@emit('expandedrowschange', {value: @_.expandedRows.values(), row: row, rowValue: currentVis, cause: 'user'})
# populate the row
for cell, columnIndex in rowToArray(headers, row)
# Render the 'key' value using the headerCellRenderer
# XXX Breaking: Renderer
# keyDiv = div('hx-data-table-cell-key')
# .add(getColumnOption('headerCellRenderer', headers[columnIndex].id)(headers[columnIndex], headers))
keyDiv = div('hx-data-table-cell-key')
getColumnOption('headerCellRenderer', headers[columnIndex].id)(keyDiv.node(), headers[columnIndex], headers)
cellElem = tr.append('td').class('hx-data-table-cell')
columnMaxWidth = getColumnOption('maxWidth', headers[columnIndex].id)
if columnMaxWidth?
columnMaxWidth = parseInt(columnMaxWidth) + 'px'
cellElem
.style('max-width', columnMaxWidth)
.style('width', columnMaxWidth)
.style('min-width', columnMaxWidth)
# XXX Breaking: Renderer
# cellDiv = cellElem.add(keyDiv)
# .append('div').class('hx-data-table-cell-value')
# .add(getColumnOption('cellRenderer', headers[columnIndex].id)(cell, row)).node()
cellDiv = cellElem.add(keyDiv)
.append('div').class('hx-data-table-cell-value').node()
getColumnOption('cellRenderer', headers[columnIndex].id)(cellDiv, cell, row)
else # append the 'No Data' row.
tbody.append('tr').class('hx-data-table-row-no-data').append('td').attr('colspan', fullWidthColSpan).text(options.noDataMessage)
@updateSelected()
# retain the horizontal scroll unless the page has been changed.
# We only retain the horizontal scroll as when sorting/filtering on
# the first page it retains the vertical scroll which looks weird.
if options.useStickyHeaders and @page() is @_.oldPage
wrapperNode = selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node()
scrollLeft = wrapperNode.scrollLeft if options.retainHorizontalScrollOnRender
scrollTop = wrapperNode.scrollTop if options.retainVerticalScrollOnRender
# store the old page - only used for retaining the scroll positions
@_.oldPage = @page()
# remove the old content div, and slot in the new one
selection.select('.hx-data-table-content').insertAfter(container)
selection.select('.hx-data-table-content').remove()
selection.classed('hx-data-table-compact', ((options.compact is 'auto') and (selection.width() < collapseBreakPoint)) or (options.compact is true))
.classed('hx-data-table-show-search-above-content', options.showSearchAboveTable)
# set up the sticky headers
if options.useStickyHeaders
stickFirstColumn = options.selectEnabled or options.collapsibleRenderer?
stickyOpts = {stickFirstColumn: stickFirstColumn and (filteredCount is undefined or filteredCount > 0), fullWidth: true}
@_.stickyHeaders = new StickyTableHeaders(container.node(), stickyOpts)
# restore horizontal scroll position
selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node().scrollLeft = scrollLeft if scrollLeft?
selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node().scrollTop = scrollTop if scrollTop?
# hide the loading spinner as we're done rendering
selection.shallowSelect('.hx-data-table-loading').style('display', 'none')
@emit 'render'
cb?()
this
###
Feeds
A feed should be an object with the following functions:
{
headers: (cb) -> # returns a list of header objects ({name, id})
totalCount: (cb) -> # returns the total number of rows in the data set
rows: (range, cb) -> # returns the row data for the range object specified (range = { start, end, filter, sort }) along with the filtered count
rowsForIds: (ids, lookupRow, cb) -> # returns the rows for the ids supplied
}
There are predefined feeds for objects and urls.
###
whitespaceSplitRegex = /\s+/
stripLeadingAndTrailingWhitespaceRegex = /^\s+|\s+$/g
getRowSearchTerm = (cellValueLookup, row) ->
(v for k, v of row.cells).map(cellValueLookup).join(' ').toLowerCase()
capitalize = (str) ->
str.charAt(0).toUpperCase() + str.slice(1)
defaultTermLookup = (term, rowSearchTerm, criteria = 'contains') ->
lookupArr = if isString(rowSearchTerm) then [rowSearchTerm] else rowSearchTerm
arr = term.replace(stripLeadingAndTrailingWhitespaceRegex,'')
.split whitespaceSplitRegex
validPart = find arr, (part) -> filter["filter" + capitalize(criteria)](lookupArr, part.toLowerCase()).length
defined validPart
getAdvancedSearchFilter = (cellValueLookup = identity, termLookup = defaultTermLookup) ->
(filters, row) ->
rowSearchTerm = (v for k, v of row.cells).map(cellValueLookup).join(' ').toLowerCase()
# If term is empty this will return false
validFilters = find filters, (groupedFilters) ->
invalidFilter = find groupedFilters, (currFilter) ->
searchTerm = if currFilter.column is 'any' then rowSearchTerm else (cellValueLookup(row.cells[currFilter.column]) + '').toLowerCase()
currFilter.term and not termLookup(currFilter.term.toLowerCase(), searchTerm, currFilter.criteria)
not defined invalidFilter
defined validFilters
getFiltered = (rows, term, filterCache, filterCacheTerm, fn) ->
# term can be a string (regular filter) or an array (advanced search)
if (term?.length and (filterCache is undefined or filterCacheTerm isnt term))
rows.filter fn
else if filterCache is undefined or not term?.length
rows.slice()
else
filterCache
objectFeed = (data, options) ->
options = merge({
cellValueLookup: identity
termLookup: defaultTermLookup
#XXX: should this provide more information - like the column id being sorted on?
compare: compare
}, options)
options.filter ?= (term, row) -> options.termLookup(term.toLowerCase(), getRowSearchTerm(options.cellValueLookup, row))
options.advancedSearch ?= getAdvancedSearchFilter(options.cellValueLookup, options.termLookup)
# cached values
filterCache = undefined
filterCacheTerm = undefined
sorted = undefined
sortCacheTerm = {}
rowsByIdMap = undefined
{
data: data # for debugging
headers: (cb) -> cb(data.headers)
totalCount: (cb) -> cb(data.rows.length)
rows: (range, cb) ->
if range.sort?.column isnt sortCacheTerm.column
filterCache = undefined
if range.useAdvancedSearch
advancedSearchFilterFn = (row) -> options.advancedSearch(range.advancedSearch, row)
filterCache = getFiltered(data.rows, range.advancedSearch, filterCache, filterCacheTerm, advancedSearchFilterFn)
filterCacheTerm = range.advancedSearch
sorted = undefined
else
filterFn = (row) -> options.filter(range.filter, row)
filterCache = getFiltered(data.rows, range.filter, filterCache, filterCacheTerm, filterFn)
filterCacheTerm = range.filter
sorted = undefined
if sorted is undefined or sortCacheTerm.column isnt range.sort?.column or sortCacheTerm.direction isnt range.sort?.direction
sorted = if range.sort and range.sort.column
direction = if range.sort.direction is 'asc' then 1 else -1
column = range.sort.column
filterCache.sort (r1, r2) -> direction * options.compare(r1.cells[column], r2.cells[column])
filterCache
else filterCache
sortCacheTerm.column = range.sort?.column
sortCacheTerm.direction = range.sort?.direction
cb({rows: sorted[range.start..range.end], filteredCount: sorted.length})
rowsForIds: (ids, lookupRow, cb) ->
if rowsByIdMap is undefined
rowsByIdMap = {}
for row in data.rows
rowsByIdMap[lookupRow(row)] = row
cb(rowsByIdMap[id] for id in ids)
}
# XXX Deprecated: alongside request
urlFeed = (url, options) ->
#XXX: when new calls come in, ignore the ongoing request if there is one / cancel the request if possible
options = merge({
extra: undefined,
cache: false
}, options)
# creates a function that might perform caching, depending on the options.cache value
maybeCached = (fetcher) ->
if options.cache
value = undefined
(cb) ->
if value
cb(value)
else
fetcher (res) ->
value = res
cb(value)
else
(cb) -> fetcher(cb)
jsonCallback = (cb) ->
(err, value) ->
logger.warn(err) if err
cb(value)
{
url: url # for debugging
headers: maybeCached (cb) ->
json url, { type: 'headers', extra: options.extra }, jsonCallback(cb)
totalCount: maybeCached (cb) ->
json url, { type: 'totalCount', extra: options.extra }, (err, res) ->
jsonCallback(cb)(err, res.count)
rows: (range, cb) ->
json url, { type: 'rows', range: range, extra: options.extra }, jsonCallback(cb)
rowsForIds: (ids, lookupRow, cb) ->
json url, { type: 'rowsForIds', ids: ids, extra: options.extra }, jsonCallback(cb)
}
dataTable = (options) ->
selection = div()
dt = new DataTable(selection, options)
if options and options.feed then dt.render()
selection
dataTable.objectFeed = objectFeed
dataTable.urlFeed = urlFeed
export {
dataTable,
DataTable,
objectFeed,
urlFeed,
getAdvancedSearchFilter,
}
| 42911 | import { userFacingText } from 'utils/user-facing-text'
import { div, select, detached, button, i, span } from 'utils/selection'
import {
debounce,
defined,
find,
flatten,
identity,
isArray,
isFunction,
isString,
merge,
randomId,
unique
} from 'utils/utils'
import { Set as HSet } from 'utils/set'
import {compare } from 'utils/sort'
import { EventEmitter } from 'utils/event-emitter'
import * as filter from 'utils/filter'
import logger from 'utils/logger'
import { json } from 'utils/request'
import { pickerBase as picker, PickerBase as Picker } from 'components/picker'
import { Toggle } from 'components/toggle'
import { StickyTableHeaders } from 'components/sticky-table-headers'
userFacingText({
dataTable: {
addFilter: 'Add Filter',
advancedSearch: 'Advanced Search',
and: 'and',
anyColumn: 'Any column'
clearFilters: 'Clear Filters',
clearSelection: 'clear selection',
loading: 'Loading',
noData: 'No Data',
noSort: 'No Sort',
or: 'or',
rowsPerPage: 'Rows Per Page',
search: 'Search',
selectedRows: '$selected of $total selected.',
sortBy: 'Sort By',
contains: 'contains',
excludes: 'does not contain',
startsWith: 'starts with',
fuzzy: 'fuzzy matches',
regex: 'matches regex',
exact: 'is exactly',
greater: 'is greater than',
less: 'is less than'
}
})
fullWidthColSpan = 999 # the colspan used to make a cell display as an entire row
collapseBreakPoint = 480
columnOptionLookup = (options, name, id) ->
if options.columns isnt undefined and options.columns[id] isnt undefined and options.columns[id][name] isnt undefined
options.columns[id][name]
else
options[name]
toCriteriaItems = (list) ->
unique(list).map (item) ->
{
value: item,
text: userFacingText('dataTable', item)
}
advancedSearchCriteriaValidate = (value) ->
allowedTypes = filter.filterTypes()
if (isArray(value) and value.every((c) -> ~allowedTypes.indexOf(c))) or value is undefined
value or []
else if isArray(value)
invalidTypes = value.filter((c) -> not ~allowedTypes.indexOf(c))
logger.warn('Invalid Filter Criteria Specified:', invalidTypes, '\nPlease select a value from filterStringTypes()', allowedTypes)
[]
else
logger.warn('Expected an array of filter criteria but was passed:', value)
[]
splitArray = (array, index) ->
left = if index is 0 then [] else array[0...index]
right = if index is array.length - 1 then [] else array[index+1...array.length]
[left, array[index], right]
# pagination block (the page selector and the rows per page selector)
createPaginationBlock = (table) ->
container = div('hx-data-table-paginator')
pickerNode = container.append('button').class('hx-data-table-paginator-picker hx-btn hx-btn-invisible').node()
dtPicker = new Picker(pickerNode, { dropdownOptions: { align: 'rbrt' } })
.on 'change', 'hx.data-table', (d) =>
if d.cause is 'user'
table.page(d.value.value, undefined, d.cause)
totalRows = container.append('span').class('hx-data-table-paginator-total-rows')
back = container.append('button').class('hx-data-table-paginator-back hx-btn hx-btn-invisible')
back.append('i').class('hx-icon hx-icon-chevron-left')
back.on 'click', 'hx.data-table', => if not back.classed('hx-data-table-btn-disabled') then table.page(table.page()-1)
forward = container.append('button').class('hx-data-table-paginator-forward hx-btn hx-btn-invisible')
forward.append('i').class('hx-icon hx-icon-chevron-right')
forward.on 'click', 'hx.data-table', => if not forward.classed('hx-data-table-btn-disabled') then table.page(table.page()+1)
[container, dtPicker]
# pageSizeOptions select
createPageSizeBlock = (table, options) ->
container = div('hx-data-table-page-size')
container.append('span').text(options.rowsPerPageText + ': ')
node = container.append('button').class('hx-data-table-page-size-picker hx-btn hx-btn-invisible').node()
dtPicker = new Picker(node, { dropdownOptions: { align: 'rbrt' } })
.on 'change', 'hx.data-table', (d) ->
if d.cause is 'user'
table.pageSize(d.value.value, undefined, 'user')
table.page(1, undefined, 'user')
[container, dtPicker]
spacer = -> div('hx-data-table-spacer')
createAdvancedSearchView = (selection, dataTable, options) ->
# Render individual row
advancedSearchRowEnter = (filterGroup, filterGroupIndex) ->
(filterRow, index, trueIndex) ->
typePickerOptions =
items: [
{ text: userFacingText('dataTable', 'and'), value: 'and' }
{ text: userFacingText('dataTable', 'or'), value: 'or' }
]
fullWidth: true
typePickerSel = picker(typePickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-type hx-section hx-fixed', true)
typePickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
newFilters = if data.value.value is 'or'
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
[leftFilterGroups..., leftFilters, [currFilter, rightFilters...], rightFilterGroups...]
else
[leftAllButLast..., leftLast] = leftFilterGroups
[leftAllButLast..., [leftLast..., filterGroup...], rightFilterGroups...]
dataTable.advancedSearch(newFilters)
anyColumn = {
text: options.anyColumnText
value: 'any'
anyColumn: true
}
columnItems = filterRow.headers.map (header) ->
value: header.id
orig: header
# XXX Breaking: Renderer
# columnRenderer = (cell) ->
# if cell.anyColumn then span().text(cell.text)
# else columnOptionLookup(options, 'headerCellRenderer', cell.orig.id)(cell.orig, filterRow.headers)
columnRenderer = (element, cell) ->
if cell.anyColumn then select(element).text(cell.text)
else columnOptionLookup(options, 'headerCellRenderer', cell.orig.id)(element, cell.orig, filterRow.headers)
columnPickerOptions =
items: [anyColumn, columnItems...]
renderer: columnRenderer
fullWidth: true
columnPickerSel = picker(columnPickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-column hx-section hx-fixed', true)
columnPickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
column: data.value.value
})
delete newFilter.criteria
columnCriteria = columnOptionLookup(options, 'advancedSearchCriteria', data.value.value) || []
criteriaItems = ['contains', advancedSearchCriteriaValidate(columnCriteria)...]
criteriaPickerSel.api('picker')
.items(toCriteriaItems(criteriaItems))
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
criteriaPickerOptions =
items: toCriteriaItems(['contains', advancedSearchCriteriaValidate(options.advancedSearchCriteria)...])
fullWidth: true
criteriaPickerSel = picker(criteriaPickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-criteria hx-section hx-fixed', true)
criteriaPickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
criteria: data.value.value
})
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
criteriaAnyPlaceholder = div('hx-data-table-advanced-search-criteria-placeholder hx-text-disabled hx-background-disabled')
.text(userFacingText('dataTable', 'contains'))
debouncedInput = debounce 200, (e) ->
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
term: e.target.value
})
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
termInput = detached('input').attr('placeholder', options.advancedSearchPlaceholder)
.class('hx-data-table-advanced-search-input hx-section')
.attr('required', 'required')
.on 'input', debouncedInput
removeBtn = button('hx-btn hx-negative hx-btn-outline hx-data-table-advanced-search-remove')
.add(i('hx-icon hx-icon-close'))
.on 'click', ->
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, _, rightFilters] = splitArray(filterGroup, trueIndex)
newFilters = if trueIndex is 0 and filterGroupIndex is 0
[rightFilters, rightFilterGroups...]
else if trueIndex is 0
[leftFilterGroup..., leftFilterGroupLast] = leftFilterGroups
[_, filters...] = filterGroup
[leftFilterGroup..., [leftFilterGroupLast..., filters...], rightFilterGroups...]
else
[leftFilterGroups..., [leftFilters..., rightFilters...], rightFilterGroups...]
filterToUse = newFilters.filter((group) => group.length)
dataTable.advancedSearch(if filterToUse.length then filterToUse else undefined)
@append('div').class('hx-data-table-advanced-search-filter hx-section hx-input-group hx-input-group-full-width')
.add(typePickerSel)
.add(columnPickerSel)
.add(criteriaAnyPlaceholder)
.add(criteriaPickerSel)
.add(div('hx-data-table-advanced-search-filter-input-container hx-input-group hx-no-pad hx-no-border')
.add(termInput)
.add(removeBtn))
.node()
advancedSearchRowUpdate = ({term, column, criteria}, element, index) ->
filterRowSel = select(element)
validContext = if not term then 'negative' else undefined
filterRowSel.select('.hx-data-table-advanced-search-type').api('picker')
.value(if index is 0 then 'or' else 'and')
trueColumn = column or 'any'
filterRowSel.select('.hx-data-table-advanced-search-column').api('picker')
.value(trueColumn)
columnCriteria = columnOptionLookup(options, 'advancedSearchCriteria', column) || []
criteriaItems = if trueColumn is 'any' then ['contains'] else ['contains', advancedSearchCriteriaValidate(columnCriteria)...]
filterRowSel.select('.hx-data-table-advanced-search-criteria')
.style('display', if criteriaItems.length is 1 then 'none' else 'block')
.api('picker')
.items(toCriteriaItems(criteriaItems))
.value(criteria || 'contains')
filterRowSel.select('.hx-data-table-advanced-search-criteria-placeholder')
.style('display', if criteriaItems.length is 1 then 'block' else 'none')
filterRowSel.select('.hx-data-table-advanced-search-input')
.value(term or '')
# Render grouped filters
advancedSearchGroupEnter = (filterGroup, index, trueIndex) ->
filterGroupSel = div('hx-data-table-advanced-search-filter-group')
filterGroupView = filterGroupSel.view('.hx-data-table-advanced-search-filter')
.enter(advancedSearchRowEnter(filterGroup, trueIndex))
.update(advancedSearchRowUpdate)
filterGroupSel.api('data-table.group', {
filterGroupView
})
@append(filterGroupSel).node()
advancedSearchGroupUpdate = (filterGroup, element, index) ->
select(element).api('data-table.group').filterGroupView.apply(filterGroup)
selection.view('.hx-data-table-advanced-search-filter-group')
.enter(advancedSearchGroupEnter)
.update(advancedSearchGroupUpdate)
class DataTable extends EventEmitter
constructor: (selector, options) ->
super()
resolvedOptions = merge({
allowHeaderWrap: false
compact: 'auto' # 'auto', true, false
displayMode: 'paginate' # 'paginate', 'all'
feed: undefined
showSearchAboveTable: false
filter: undefined
filterEnabled: true
showAdvancedSearch: false
advancedSearchEnabled: false
advancedSearchCriteria: undefined
advancedSearch: undefined
pageSize: 15
pageSizeOptions: undefined # supply an array of numbers to show the user
retainHorizontalScrollOnRender: true
retainVerticalScrollOnRender: false
selectEnabled: false
singleSelection: false
sort: undefined
sortEnabled: true
highlightOnHover: true
useStickyHeaders: true
selectedRows: []
expandedRows: []
# functions used for getting row state
rowIDLookup: (row) -> row.id
rowEnabledLookup: (row) -> not row.disabled
rowSelectableLookup: (row) -> true
rowCollapsibleLookup: (row) -> false
# functions for rendering
collapsibleRenderer: undefined
# XXX Breaking: Renderer
# cellRenderer: (cell, row) -> span().text(cell)
# headerCellRenderer: (cell, headers) -> span().text(cell.name)
cellRenderer: (element, cell, row) -> select(element).text(cell)
headerCellRenderer: (element, cell, headers) -> select(element).text(cell.name)
# per column options (headerCellRenderer, cellRenderer, sortEnabled)
columns: {}
clearSelectionText: userFacingText('dataTable','clearSelection')
loadingText: userFacingText('dataTable','loading')
noDataMessage: userFacingText('dataTable','noData')
noSortText: userFacingText('dataTable', 'noSort')
rowsPerPageText: userFacingText('dataTable','rowsPerPage')
searchPlaceholder: userFacingText('dataTable','search')
selectedRowsText: userFacingText('dataTable', 'selectedRows', true)
sortByText: userFacingText('dataTable','sortBy')
addFilterText: userFacingText('dataTable', 'addFilter')
clearFiltersText: userFacingText('dataTable', 'clearFilters')
anyColumnText: userFacingText('dataTable', 'anyColumn')
advancedSearchText: userFacingText('dataTable','advancedSearch')
advancedSearchPlaceholder: userFacingText('dataTable', 'search')
}, options)
resolvedOptions.pageSize = Math.min resolvedOptions.pageSize, 1000
resolvedOptions.advancedSearchEnabled = true if resolvedOptions.advancedSearch
resolvedOptions.showAdvancedSearch = true if resolvedOptions.advancedSearchEnabled
selection = select(selector)
.classed('hx-data-table', true)
.api('data-table', this)
.api(this)
content = div('hx-data-table-content')
# loading div
loadingDiv = div('hx-data-table-loading')
.add(div('hx-data-table-loading-inner')
.add(div('hx-spinner'))
.add(span().text(' ' + resolvedOptions.loadingText)))
statusBar = div('hx-data-table-status-bar')
statusBarText = span('hx-data-table-status-bar-text')
statusBarClear = span('hx-data-table-status-bar-clear')
.text(" (#{resolvedOptions.clearSelectionText})")
.on 'click', 'hx.data-table', =>
@_.selectedRows.clear()
selection.select('.hx-data-table-content').selectAll('.hx-data-table-row-selected').classed('hx-data-table-row-selected', false)
@updateSelected()
@emit 'selectedrowsclear'
controlPanelCompact = div('hx-data-table-control-panel-compact')
controlPanelCompactToggle = button('hx-data-table-control-panel-compact-toggle hx-btn hx-btn-invisible')
.add(i('hx-icon hx-icon-bars'))
.on 'click', ->
toggleElem = controlPanel
if toggleElem.classed('hx-data-table-compact-hide')
toggleElem.classed('hx-data-table-compact-hide', false)
.style('height', '0px')
.morph().with('expandv', 150)
.then ->
controlPanelCompact.classed('hx-data-table-control-panel-compact-open', true)
.go()
else
toggleElem.morph().with('collapsev', 50)
.then ->
toggleElem.classed('hx-data-table-compact-hide', true)
controlPanelCompact.classed('hx-data-table-control-panel-compact-open', false)
.thenStyle('display', '')
.go()
controlPanel = div('hx-data-table-control-panel hx-data-table-compact-hide')
controlPanelInner = div('hx-data-table-control-panel-inner')
# compact sort - always on the page, only visible in compact mode (so we can just change the class and everything will work)
compactSort = div('hx-data-table-sort')
.classed('hx-data-table-sort-visible', resolvedOptions.sortEnabled)
.add(span().text(resolvedOptions.sortByText + ': '))
sortColPicker = new Picker(compactSort.append('button').class('hx-btn hx-btn-invisible').node())
sortColPicker.on 'change', 'hx.data-table', (d) =>
if d.cause is 'user' then @sort({column: sortColPicker.value().column, direction: sortColPicker.value().direction})
filterContainer = div('hx-data-table-filter-container')
onInput = debounce 200, => @filter(filterInput.value(), undefined, 'user')
filterInput = detached('input').class('hx-data-table-filter')
.attr('placeholder', resolvedOptions.searchPlaceholder)
.classed('hx-data-table-filter-visible', resolvedOptions.filterEnabled)
.on 'input', 'hx.data-table', onInput
advancedSearchContainer = div('hx-data-table-advanced-search-container')
advancedSearchToggle = button('hx-data-table-advanced-search-toggle hx-btn hx-btn-invisible')
.text(resolvedOptions.advancedSearchText)
advancedSearchToggleButton = new Toggle(advancedSearchToggle.node())
advancedSearchToggleButton.on 'change', (data) => @advancedSearchEnabled(data)
advancedSearch = div('hx-data-table-advanced-search')
advancedSearchView = createAdvancedSearchView(advancedSearch, this, resolvedOptions)
advancedSearchButtons = div('hx-data-table-advanced-search-buttons')
addFilter = =>
currentFilters = @advancedSearch() or [[]]
[previousFilterGroups..., lastFilterGroup] = currentFilters
newLastFilterGroup = [lastFilterGroup..., {
column: 'any',
term: ''
}]
@advancedSearch([previousFilterGroups..., newLastFilterGroup])
clearFilters = => @advancedSearch(undefined)
advancedSearchAddFilterButton = button('hx-btn hx-positive hx-data-table-advanced-search-add-filter hx-data-table-advanced-search-button hx-btn-outline')
.add(i('hx-data-table-advanced-search-icon hx-icon hx-icon-plus hx-text-positive'))
.add(span().text(resolvedOptions.addFilterText))
.on('click', addFilter)
advancedSearchClearFilterButton = button('hx-btn hx-negative hx-data-table-advanced-search-clear-filters hx-data-table-advanced-search-button hx-btn-outline')
.add(i('hx-data-table-advanced-search-icon hx-icon hx-icon-close hx-text-negative'))
.add(span().text(resolvedOptions.clearFiltersText))
.on('click', clearFilters)
# We create multiple copies of these to show in different places
# This makes it easier to change the UI as we can show/hide instead of moving them
[pageSize, pageSizePicker] = createPageSizeBlock(this, resolvedOptions)
[pageSizeBottom, pageSizePickerBottom] = createPageSizeBlock(this, resolvedOptions)
[pagination, pagePicker] = createPaginationBlock(this)
[paginationBottom, pagePickerBottom] = createPaginationBlock(this)
[paginationCompact, pagePickerCompact] = createPaginationBlock(this)
# The main pagination is hidden as the compact control panel contains a version of it
pagination.classed('hx-data-table-compact-hide', true)
controlPanelBottom = div('hx-data-table-control-panel-bottom')
# Create the structure in one place
# Some entities still make sense to be built individually (e.g. the loading div)
selection
.add(content)
.add(statusBar
.add(statusBarText)
.add(statusBarClear))
# Control panel displayed at the top for compact mode
.add(controlPanelCompact
.add(paginationCompact)
.add(spacer())
.add(controlPanelCompactToggle))
# Main control panel - contains all the components
.add(controlPanel
.add(controlPanelInner
.add(compactSort)
.add(pagination)
.add(pageSize)
.add(spacer())
.add(filterContainer
.add(advancedSearchToggle)
.add(filterInput)))
# The advanced search container isn't in the main control panel as it is easier to style outside
.add(advancedSearchContainer
.add(advancedSearch)
.add(advancedSearchButtons
.add(advancedSearchAddFilterButton)
.add(advancedSearchClearFilterButton))))
# Bottom control panel - shown in compact mode and when the search is at the top
.add(controlPanelBottom
.add(spacer())
.add(pageSizeBottom)
.add(paginationBottom))
# Add the loading div last - helps keep it on top of everything
.add(loadingDiv)
# 'private' variables
@_ = {
selection: selection
options: resolvedOptions
page: 1
pagePickers: [pagePicker, pagePickerCompact, pagePickerBottom]
pageSizePickers: [pageSizePicker, pageSizePickerBottom]
statusBar: statusBar
sortColPicker: sortColPicker
selectedRows: new HSet(resolvedOptions.selectedRows) # holds the ids of the selected rows
expandedRows: new HSet(resolvedOptions.expandedRows)
renderedCollapsibles: {}
compactState: (resolvedOptions.compact is 'auto' and selection.width() < collapseBreakPoint) or resolvedOptions.compact is true
advancedSearchView: advancedSearchView
advancedSearchToggleButton: advancedSearchToggleButton
}
# responsive page resize when compact is 'auto'
selection.on 'resize', 'hx.data-table', =>
selection.selectAll('.hx-data-table-collapsible-content-container').map (e) =>
e.style('max-width', (parseInt(selection.style('width')) - @_.collapsibleSizeDiff) + 'px')
state = (@compact() is 'auto' and selection.width() < collapseBreakPoint) or @compact() is true
selection.classed 'hx-data-table-compact', state
if @_.compactState isnt state
@_.compactState = state
@emit('compactchange', {value: @compact(), state: state, cause: 'user'})
dtRandomId = randomId()
# deal with shift being down - prevents the text in the table being selected when shift
# selecting multiple rows (as it looks bad) but also means that data can be selected if required
# XXX: make this work better / come up with a better solution
select('body').on 'keydown', 'hx.data-table.shift.' + dtRandomId, (e) =>
if e.shiftKey and @selectEnabled()
selection.classed('hx-data-table-disable-text-selection', true)
select('body').on 'keyup', 'hx.data-table.shift.' + dtRandomId, (e) =>
if not e.shiftKey and @selectEnabled()
selection.classed('hx-data-table-disable-text-selection', false)
# Methods for changing the options
#---------------------------------
# general purpose function for setting / getting an option
option = (name) ->
(value, cb, cause) ->
options = @_.options
if arguments.length > 0
options[name] = value
@emit(name.toLowerCase() + 'change', {value: value, cause: (cause or 'api')})
@render(cb)
this
else options[name]
collapsibleRenderer: option('collapsibleRenderer')
compact: option('compact')
displayMode: option('displayMode')
feed: option('feed')
filter: option('filter')
advancedSearch: option('advancedSearch')
showAdvancedSearch: option('showAdvancedSearch')
advancedSearchEnabled: option('advancedSearchEnabled')
showSearchAboveTable: option('showSearchAboveTable')
filterEnabled: option('filterEnabled')
noDataMessage: option('noDataMessage')
pageSize: option('pageSize')
pageSizeOptions: option('pageSizeOptions')
retainHorizontalScrollOnRender: option('retainHorizontalScrollOnRender')
retainVerticalScrollOnRender: option('retainVerticalScrollOnRender')
rowCollapsibleLookup: option('rowCollapsibleLookup')
rowEnabledLookup: option('rowEnabledLookup')
rowIDLookup: option('rowIDLookup')
rowSelectableLookup: option('rowSelectableLookup')
selectEnabled: option('selectEnabled')
highlightOnHover: option('highlightOnHover')
singleSelection: option('singleSelection')
useStickyHeaders: option('useStickyHeaders')
sort: option('sort')
# general purpose function for setting / getting a column option (or the default option of the column id is not specified)
columnOption = (name) ->
(columnId, value, cb) ->
options = @_.options
if arguments.length > 1 and isString(columnId)
options.columns[columnId] ?= {}
options.columns[columnId][name] = value
@emit(name.toLowerCase() + 'change', {column: columnId, value: value, cause: 'api'})
@render(cb)
this
else if arguments.length > 0
if isString(columnId) and options.columns[columnId]
options.columns[columnId][name]
else
options[name] = arguments[0]
@emit(name.toLowerCase() + 'change', {value: value, cause: 'api'})
@render(arguments[1])
this
else options[name]
advancedSearchCriteria: columnOption('advancedSearchCriteria')
allowHeaderWrap: columnOption('allowHeaderWrap')
cellRenderer: columnOption('cellRenderer')
headerCellRenderer: columnOption('headerCellRenderer')
sortEnabled: columnOption('sortEnabled')
# function for setting / getting options that are only column specific and cannot be set for the whole table
columnOnlyOption = (name) ->
(columnId, value, cb) ->
options = @_.options
if isString(columnId)
if arguments.length > 1
options.columns[columnId] ?= {}
options.columns[columnId][name] = value
@emit(name.toLowerCase() + 'change', {column: columnId, value: value, cause: 'api'})
@render(cb)
this
else if options.columns[columnId]
options.columns[columnId][name]
maxWidth: columnOnlyOption('maxWidth')
# Methods for changing the state of the table
# -------------------------------------------
page: (value, cb, cause) ->
if arguments.length > 0
@_.page = Math.max(1, value)
if @_.numPages?
@_.page = Math.min @_.page, @_.numPages
@emit('pagechange', {value: @_.page, cause: cause or 'api'})
@render(cb)
this
else @_.page
selectedRows: (value, cb) ->
if arguments.length > 0 and not isFunction(value)
# Deal with single select mode when setting the selected rows
if @singleSelection() and isArray(value) and value.length
value = [value[0]]
@_.selectedRows = new HSet(value)
newSelectedRows = @_.selectedRows.values()
@emit('selectedrowschange', {value: newSelectedRows, cause: 'api'})
@_.userLastSelectedIndex = undefined
@render(cb)
this
else
@_.selectedRows.values()
expandedRows: (value, cb) ->
if arguments.length > 0 and not isFunction(value)
@_.expandedRows = new HSet(value)
@render(cb)
@emit('expandedrowschange', {value: @_.expandedRows.values(), cause: 'api'})
this
else
@_.expandedRows.values()
rowsForIds: (ids, cb) ->
if cb? then @feed().rowsForIds(ids, @rowIDLookup(), cb)
this
# Methods that perform an action on the table
# -------------------------------------------
renderSuppressed: (value) ->
if arguments.length > 0
@_.renderSuppressed = value
this
else @_.renderSuppressed
# redraws the table
render: (cb) ->
if @_.renderSuppressed then return
feed = @feed()
# check that the feed has been defined - if it hasn't then there is no point in continuing
if feed is undefined or (feed.headers is undefined or feed.totalCount is undefined or feed.rows is undefined)
logger.warn('No feed specified when rendering data table')
return
selection = @_.selection
options = @_.options
# some utility functions
getColumnOption = (name, id) -> columnOptionLookup(options, name, id)
rowToArray = (headers, obj) -> headers.map (header) -> obj.cells[header.id]
# build the main structure of the table in a detached container
container = div('hx-data-table-content')
table = container.append('table').class('hx-data-table-table hx-table')
.classed('hx-table-no-hover', not options.highlightOnHover)
thead = table.append('thead').class('hx-data-table-head')
tbody = table.append('tbody').class('hx-data-table-body')
headerRow = thead.append('tr').class('hx-data-table-row')
# make the loading div visible
selection.select('.hx-data-table-loading').style('display', '')
advancedSearchVisibleAndEnabled = (not options.filterEnabled or options.showAdvancedSearch) and options.advancedSearchEnabled
filterSel = selection.select('.hx-data-table-filter')
.classed('hx-data-table-filter-visible', options.filterEnabled and not advancedSearchVisibleAndEnabled)
nextFilterValue = @filter()
prevFilterValue = filterSel.value()
if nextFilterValue isnt prevFilterValue
filterSel.value(nextFilterValue)
@_.advancedSearchToggleButton.value(options.advancedSearchEnabled)
selection.select('.hx-data-table-advanced-search-toggle')
.classed('hx-data-table-advanced-search-visible', options.filterEnabled and options.showAdvancedSearch)
selection.select('.hx-data-table-advanced-search-container')
.classed('hx-data-table-advanced-search-visible', advancedSearchVisibleAndEnabled)
selection.select('.hx-data-table-control-panel')
.classed('hx-data-table-filter-enabled', options.filterEnabled)
showCompactControlPanelToggle = options.filterEnabled or options.sortEnabled or options.advancedSearchEnabled or options.pageSizeOptions?.length
selection.select('.hx-data-table-control-panel-compact-toggle')
.classed('hx-data-table-control-panel-compact-toggle-visible', showCompactControlPanelToggle)
# load in the data needed
# XXX: how much of this could be split out so it's not re-defined every time render is called?
feed.headers (headers) =>
if advancedSearchVisibleAndEnabled
currentFilters = @advancedSearch() or []
@_.advancedSearchView.apply currentFilters.filter((x) -> x.length).map (filterGroup) ->
filterGroup.map (filterRow) ->
merge(filterRow, {
headers,
getColumnOption
})
selection.select('.hx-data-table-sort')
.classed('hx-data-table-sort-visible', options.sortEnabled or headers.some((header) -> getColumnOption('sortEnabled', header.id)))
feed.totalCount (totalCount) =>
if options.displayMode is 'paginate'
start = (@page() - 1) * options.pageSize
end = @page() * options.pageSize - 1
else
start = undefined
end = undefined
range = {
start: start,
end: end,
sort: @sort(),
filter: @filter(),
advancedSearch: @advancedSearch(),
useAdvancedSearch: options.showAdvancedSearch and options.advancedSearchEnabled
}
feed.rows range, ({rows, filteredCount}) =>
if options.displayMode is 'paginate'
multiPage = false
selection.classed('hx-data-table-infinite', filteredCount is undefined)
if filteredCount is undefined
@_.numPages = undefined
numText = (start+1) + ' - ' + (end+1)
multiPage = true
else
@_.numPages = Math.max(1, Math.ceil(filteredCount / options.pageSize))
if @page() > @_.numPages then @page(@_.numPages)
multiPage = @_.numPages > 1
if filteredCount > 0 and @_.numPages > 1
numText = 'of ' + filteredCount
items = for idx in [1..@_.numPages] by 1
num = idx * options.pageSize
text: (num + 1 - options.pageSize) + ' - ' + Math.min(num, filteredCount) # e.g. 1 - 15
value: idx
@_.pagePickers.forEach (picker) =>
picker
.items(items)
.value(@page())
selection.selectAll('.hx-data-table-paginator').classed('hx-data-table-paginator-visible', multiPage)
selection.selectAll('.hx-data-table-paginator-total-rows').text(numText or '')
selection.selectAll('.hx-data-table-paginator-back').classed('hx-data-table-btn-disabled', @page() is 1)
selection.selectAll('.hx-data-table-paginator-forward').classed('hx-data-table-btn-disabled', @page() is @_.numPages)
selection.select('.hx-data-table-control-panel-compact')
.classed('hx-data-table-control-panel-compact-visible', multiPage or showCompactControlPanelToggle)
selection.select('.hx-data-table-control-panel-bottom')
.classed('hx-data-table-control-panel-bottom-visible', multiPage or options.pageSizeOptions?.length)
selection.select('.hx-data-table-control-panel')
.classed('hx-data-table-control-panel-visible', multiPage or showCompactControlPanelToggle)
if headers.some((header) -> getColumnOption('sortEnabled', header.id))
currentSort = (@sort() or {})
# filter out columns that are not sortable so they don't show in the list for compact mode
sortColumns = flatten(headers
.map((header) -> if getColumnOption('sortEnabled', header.id)
[
{text: header.name, value: header.id + 'asc', column: header.id, direction: 'asc', cell: header}
{text: header.name, value: header.id + 'desc', column: header.id, direction: 'desc', cell: header}
])
.filter(defined))
# set the values for the compact sort control
@_.sortColPicker
# XXX Breaking: Renderer
# .renderer((option) ->
# if option.value
# getColumnOption('headerCellRenderer', option.cell.id)(option.cell, headers)
# .add(i('hx-data-table-compact-sort-arrow hx-icon hx-icon-chevron-' + (if option.direction is 'asc' then 'up' else 'down')))
# else
# span().text(option.text)
# )
.renderer((element, option) ->
if option.value
getColumnOption('headerCellRenderer', option.cell.id)(element, option.cell, headers)
select(element).append('i')
.class('hx-data-table-compact-sort-arrow hx-icon hx-icon-chevron-' + (if option.direction is 'asc' then 'up' else 'down'))
else
select(element).text(option.text)
)
.items([{text: options.noSortText, value: undefined}].concat sortColumns)
if currentSort.column and @_.sortColPicker.value().value isnt (currentSort.column + currentSort.direction)
@_.sortColPicker.value({value: currentSort.column + currentSort.direction})
# populate the page size picker if there are options set
selectPageSize = options.pageSizeOptions? and options.pageSizeOptions.length > 0
selection.selectAll('.hx-data-table-page-size').classed('hx-data-table-page-size-visible', selectPageSize)
if selectPageSize
if options.pageSizeOptions.indexOf(options.pageSize) is -1
options.pageSizeOptions.push options.pageSize
pageSizeOptions = options.pageSizeOptions
.sort(compare)
.map((item) -> {text: item, value: item})
@_.pageSizePickers.forEach (picker) ->
picker
.items(pageSizeOptions)
.value(options.pageSize)
# build the grouped header
if headers.some((header) -> header.groups?)
relevantHeaders = headers.filter((e) -> e.groups?).map((e) -> e.groups.length)
maxHeaderDepth = Math.max.apply(null, relevantHeaders)
# Map over to populate columns with groups of '' where not included
headerGroups = headers.map (e) ->
groups = e.groups or []
groups.push '' while groups.length < maxHeaderDepth
groups
for row in [maxHeaderDepth-1..0] by -1
groupedRow = headerRow.insertBefore 'tr'
groupedRow.append('th').class('hx-data-table-control') if options.selectEnabled or options.collapsibleRenderer?
count = 1
for column in [1..headerGroups.length] by 1
col = headerGroups[column]
prevCol = headerGroups[column-1]
if col? and prevCol?
parent = col.slice(row, maxHeaderDepth).toString()
prevParent = prevCol.slice(row, maxHeaderDepth).toString()
if column is headerGroups.length or col[row] isnt prevCol[row] or parent isnt prevParent
groupedRow.append('th')
.attr('colspan', count)
.class('hx-data-table-cell-grouped')
.text(prevCol[row])
count = 0
count++
# add the 'select all' checkbox to the header
if options.selectEnabled or options.collapsibleRenderer?
headerControlBox = headerRow.append('th').class('hx-data-table-control hx-table-head-no-border')
if options.selectEnabled and not options.singleSelection
headerCheckBox = headerControlBox.append('div').class('hx-data-table-checkbox')
.on 'click', 'hx.data-table', =>
if rows.length > 0
enabledRows = rows.filter (row) -> options.rowEnabledLookup(row)
selectMulti(0, rows.length - 1, not enabledRows.every((row) => @_.selectedRows.has(options.rowIDLookup(row))))
headerCheckBox.append('i').class('hx-icon hx-icon-check')
# build the header
headers.forEach (header, i) =>
cellDiv = headerRow.append('th').class('hx-data-table-cell')
.classed('hx-table-header-allow-wrap', getColumnOption('allowHeaderWrap', header.id))
cellDivContent = cellDiv.append('div').class('hx-data-table-cell-inner')
# XXX Breaking: Renderer
# cellDivContent
# .add(div('hx-data-table-title')
# .add(getColumnOption('headerCellRenderer', header.id)(header, headers)))
getColumnOption('headerCellRenderer', header.id)(
cellDivContent.append('span').class('hx-data-table-title').node(),
header,
headers,
)
if getColumnOption('sortEnabled', header.id)
cellDiv.classed('hx-data-table-cell-sort-enabled', true)
currentSort = @sort()
dirClass = if currentSort and currentSort.column is header.id
'hx-icon-sort-' + currentSort.direction + ' hx-data-table-sort-on'
else 'hx-icon-sort'
cellDivContent.append('i').class('hx-icon ' + dirClass + ' hx-data-table-sort-icon')
cellDiv.on 'click', 'hx.data-table', =>
currentSort = @sort() or {}
direction = if currentSort.column is header.id
if currentSort.direction is 'asc' then 'desc'
else 'asc'
column = if direction isnt undefined then header.id
@sort({column: column, direction: direction}, undefined, 'user')
@updateSelected = =>
parentFilter = (parent) ->
(sel) -> sel.node().parentNode is parent.node()
getSelectableRows = (parent) ->
parent
.selectAll('.hx-data-table-row')
.filter(parentFilter(parent))
.classed('hx-data-table-row-selected', false)
rowDivs = getSelectableRows(tbody)
leftHeaderBody = container.select('.hx-sticky-table-header-left').select('tbody')
checkBoxDivs = getSelectableRows(leftHeaderBody)
if @_.selectedRows.size > 0
for row, rowIndex in rows
if @_.selectedRows.has(options.rowIDLookup(row))
select(rowDivs.nodes[rowIndex]).classed('hx-data-table-row-selected', true)
if checkBoxDivs.nodes[rowIndex]?
select(checkBoxDivs.nodes[rowIndex]).classed('hx-data-table-row-selected', true)
pageHasSelection = tbody.selectAll('.hx-data-table-row-selected').size() > 0
selection.classed('hx-data-table-has-page-selection', pageHasSelection and not options.singleSelection)
selection.classed('hx-data-table-has-selection', @_.selectedRows.size > 0 and not options.singleSelection)
if totalCount isnt undefined
@_.statusBar
.select('.hx-data-table-status-bar-text')
.text(userFacingText.format(options.selectedRowsText, { selected: @_.selectedRows.size, total: totalCount }))
# handles multi row selection ('select all' and shift selection)
selectMulti = (start, end, force) =>
newRows = []
newRows.push rows[idx] for idx in [start..end] by 1
for row in newRows
if options.rowEnabledLookup(row) and options.rowSelectableLookup(row)
id = options.rowIDLookup(row)
@_.selectedRows[if force then 'add' else 'delete'](id)
@emit 'selectedrowschange', {row: row, rowValue: @_.selectedRows.has(id), value: @selectedRows(), cause: 'user'}
@updateSelected()
# handles row selection.
selectRow = (row, index, shiftDown) =>
if @_.userLastSelectedIndex?
if options.singleSelection and index isnt @_.userLastSelectedIndex
@_.selectedRows.clear()
else
# does the check for whether we're shift selecting and calls into selectMulti if we are
if shiftDown and index isnt @_.userLastSelectedIndex
force = @_.selectedRows.has(options.rowIDLookup(rows[@_.userLastSelectedIndex]))
if index > @_.userLastSelectedIndex then selectMulti(@_.userLastSelectedIndex + 1, index, force)
else selectMulti(index, @_.userLastSelectedIndex, force)
return
@_.userLastSelectedIndex = index
if options.rowSelectableLookup(row)
id = options.rowIDLookup(row)
deleteOrAdd = if @_.selectedRows.has(id) then 'delete' else 'add'
@_.selectedRows[deleteOrAdd](id)
@emit 'selectedrowschange', {row: row, rowValue: @_.selectedRows.has(id), value: @selectedRows(), cause: 'user'}
@updateSelected()
# Deal with collapsible rows
buildCollapsible = ->
contentRow = detached('tr').class('hx-data-table-collapsible-content-row')
hiddenRow = detached('tr').class('hx-data-table-collapsible-row-spacer')
# Add an empty cell so the sticky headers display correctly
contentRow.append('td').class('hx-data-table-collapsible-cell hx-data-table-collapsible-cell-empty')
# The div that the user will populate with the collapsibleRender function
contentDiv = contentRow.append('td').class('hx-data-table-collapsible-cell')
.attr('colspan',fullWidthColSpan)
.append('div').class('hx-data-table-collapsible-content-container')
.append('div').class('hx-data-table-collapsible-content')
{contentRow: contentRow, hiddenRow: hiddenRow, contentDiv: contentDiv}
toggleCollapsible = (node, row, force) =>
# once rows have been clicked once, the nodes are stored in the _.renderedCollapsibles object for re-use
rowId = options.rowIDLookup(row)
cc = @_.renderedCollapsibles[rowId] or buildCollapsible(row)
@_.renderedCollapsibles[rowId] = cc
# We always insert after here to make sure the nodes are added when setting the collapsible rows with the API
node.insertAfter(cc.hiddenRow).insertAfter(cc.contentRow)
currentVis = if force? then force else !cc.contentRow.classed('hx-data-table-collapsible-row-visible')
cc.contentRow.classed('hx-data-table-collapsible-row-visible', currentVis)
node.classed('hx-data-table-collapsible-row-visible', currentVis)
node.select('.hx-data-table-collapsible-toggle').select('i').class(if currentVis then 'hx-icon hx-icon-minus' else 'hx-icon hx-icon-plus')
# XXX Breaking: Renderer
# if currentVis then cc.contentDiv.append(options.collapsibleRenderer(row))
if currentVis
options.collapsibleRenderer(cc.contentDiv.node(), row)
else
@_.renderedCollapsibles[rowId].contentRow.remove()
@_.renderedCollapsibles[rowId].hiddenRow.remove()
delete @_.renderedCollapsibles[rowId]
@_.expandedRows[if currentVis then 'add' else 'delete'](rowId)
@_.stickyHeaders?.render()
@_.collapsibleSizeDiff = parseInt(selection.style('width')) - parseInt(select(cc.contentDiv.node().parentNode).style('width'))
currentVis
# build the rows
if filteredCount is undefined or filteredCount > 0
rows.forEach (row, rowIndex) =>
tr = tbody.append('tr').class('hx-data-table-row')
.classed('hx-data-table-row-selected', @_.selectedRows.has(options.rowIDLookup(row)))
.classed('hx-data-table-row-disabled', not options.rowEnabledLookup(row))
tr.on 'click', 'hx.data-table', (e) => @emit 'rowclick', {data: row, node: tr.node()}
rowIsCollapsible = options.rowCollapsibleLookup(row) # stored as we use it more than once
# used in compact mode to display the tick correctly without letting text flow behind it.
tr.classed('hx-data-table-row-select-enabled', options.selectEnabled)
if options.selectEnabled or options.collapsibleRenderer?
controlDiv = tr.append('th').class('hx-data-table-control')
if options.selectEnabled
checkbox = controlDiv.append('div').class('hx-data-table-checkbox')
checkbox.append('i').class('hx-icon hx-icon-check')
if options.rowEnabledLookup(row)
checkbox.on 'click', 'hx.data-table', (e) ->
e.stopPropagation() # prevent collapsibles being toggled by tick selection in compact mode
selectRow(row, rowIndex, e.shiftKey)
if options.collapsibleRenderer?
collapsibleControl = controlDiv.append('div')
.class('hx-data-table-collapsible-toggle')
.classed('hx-data-table-collapsible-disabled', not rowIsCollapsible)
collapsibleControl.append('i').class('hx-icon hx-icon-plus')
if rowIsCollapsible
# restore open collapsibles on render
if @_.expandedRows.has(options.rowIDLookup(row)) then toggleCollapsible(tr, row, true)
collapsibleControl.on 'click', 'hx.data-table.collapse-row', (e) =>
currentVis = toggleCollapsible(tr, row)
@emit('expandedrowschange', {value: @_.expandedRows.values(), row: row, rowValue: currentVis, cause: 'user'})
# populate the row
for cell, columnIndex in rowToArray(headers, row)
# Render the 'key' value using the headerCellRenderer
# XXX Breaking: Renderer
# keyDiv = div('hx-data-table-cell-key')
# .add(getColumnOption('headerCellRenderer', headers[columnIndex].id)(headers[columnIndex], headers))
keyDiv = div('hx<KEY>-data<KEY>-table<KEY>-cell<KEY>-key')
getColumnOption('headerCellRenderer', headers[columnIndex].id)(keyDiv.node(), headers[columnIndex], headers)
cellElem = tr.append('td').class('hx-data-table-cell')
columnMaxWidth = getColumnOption('maxWidth', headers[columnIndex].id)
if columnMaxWidth?
columnMaxWidth = parseInt(columnMaxWidth) + 'px'
cellElem
.style('max-width', columnMaxWidth)
.style('width', columnMaxWidth)
.style('min-width', columnMaxWidth)
# XXX Breaking: Renderer
# cellDiv = cellElem.add(keyDiv)
# .append('div').class('hx-data-table-cell-value')
# .add(getColumnOption('cellRenderer', headers[columnIndex].id)(cell, row)).node()
cellDiv = cellElem.add(keyDiv)
.append('div').class('hx-data-table-cell-value').node()
getColumnOption('cellRenderer', headers[columnIndex].id)(cellDiv, cell, row)
else # append the 'No Data' row.
tbody.append('tr').class('hx-data-table-row-no-data').append('td').attr('colspan', fullWidthColSpan).text(options.noDataMessage)
@updateSelected()
# retain the horizontal scroll unless the page has been changed.
# We only retain the horizontal scroll as when sorting/filtering on
# the first page it retains the vertical scroll which looks weird.
if options.useStickyHeaders and @page() is @_.oldPage
wrapperNode = selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node()
scrollLeft = wrapperNode.scrollLeft if options.retainHorizontalScrollOnRender
scrollTop = wrapperNode.scrollTop if options.retainVerticalScrollOnRender
# store the old page - only used for retaining the scroll positions
@_.oldPage = @page()
# remove the old content div, and slot in the new one
selection.select('.hx-data-table-content').insertAfter(container)
selection.select('.hx-data-table-content').remove()
selection.classed('hx-data-table-compact', ((options.compact is 'auto') and (selection.width() < collapseBreakPoint)) or (options.compact is true))
.classed('hx-data-table-show-search-above-content', options.showSearchAboveTable)
# set up the sticky headers
if options.useStickyHeaders
stickFirstColumn = options.selectEnabled or options.collapsibleRenderer?
stickyOpts = {stickFirstColumn: stickFirstColumn and (filteredCount is undefined or filteredCount > 0), fullWidth: true}
@_.stickyHeaders = new StickyTableHeaders(container.node(), stickyOpts)
# restore horizontal scroll position
selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node().scrollLeft = scrollLeft if scrollLeft?
selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node().scrollTop = scrollTop if scrollTop?
# hide the loading spinner as we're done rendering
selection.shallowSelect('.hx-data-table-loading').style('display', 'none')
@emit 'render'
cb?()
this
###
Feeds
A feed should be an object with the following functions:
{
headers: (cb) -> # returns a list of header objects ({name, id})
totalCount: (cb) -> # returns the total number of rows in the data set
rows: (range, cb) -> # returns the row data for the range object specified (range = { start, end, filter, sort }) along with the filtered count
rowsForIds: (ids, lookupRow, cb) -> # returns the rows for the ids supplied
}
There are predefined feeds for objects and urls.
###
whitespaceSplitRegex = /\s+/
stripLeadingAndTrailingWhitespaceRegex = /^\s+|\s+$/g
getRowSearchTerm = (cellValueLookup, row) ->
(v for k, v of row.cells).map(cellValueLookup).join(' ').toLowerCase()
capitalize = (str) ->
str.charAt(0).toUpperCase() + str.slice(1)
defaultTermLookup = (term, rowSearchTerm, criteria = 'contains') ->
lookupArr = if isString(rowSearchTerm) then [rowSearchTerm] else rowSearchTerm
arr = term.replace(stripLeadingAndTrailingWhitespaceRegex,'')
.split whitespaceSplitRegex
validPart = find arr, (part) -> filter["filter" + capitalize(criteria)](lookupArr, part.toLowerCase()).length
defined validPart
getAdvancedSearchFilter = (cellValueLookup = identity, termLookup = defaultTermLookup) ->
(filters, row) ->
rowSearchTerm = (v for k, v of row.cells).map(cellValueLookup).join(' ').toLowerCase()
# If term is empty this will return false
validFilters = find filters, (groupedFilters) ->
invalidFilter = find groupedFilters, (currFilter) ->
searchTerm = if currFilter.column is 'any' then rowSearchTerm else (cellValueLookup(row.cells[currFilter.column]) + '').toLowerCase()
currFilter.term and not termLookup(currFilter.term.toLowerCase(), searchTerm, currFilter.criteria)
not defined invalidFilter
defined validFilters
getFiltered = (rows, term, filterCache, filterCacheTerm, fn) ->
# term can be a string (regular filter) or an array (advanced search)
if (term?.length and (filterCache is undefined or filterCacheTerm isnt term))
rows.filter fn
else if filterCache is undefined or not term?.length
rows.slice()
else
filterCache
objectFeed = (data, options) ->
options = merge({
cellValueLookup: identity
termLookup: defaultTermLookup
#XXX: should this provide more information - like the column id being sorted on?
compare: compare
}, options)
options.filter ?= (term, row) -> options.termLookup(term.toLowerCase(), getRowSearchTerm(options.cellValueLookup, row))
options.advancedSearch ?= getAdvancedSearchFilter(options.cellValueLookup, options.termLookup)
# cached values
filterCache = undefined
filterCacheTerm = undefined
sorted = undefined
sortCacheTerm = {}
rowsByIdMap = undefined
{
data: data # for debugging
headers: (cb) -> cb(data.headers)
totalCount: (cb) -> cb(data.rows.length)
rows: (range, cb) ->
if range.sort?.column isnt sortCacheTerm.column
filterCache = undefined
if range.useAdvancedSearch
advancedSearchFilterFn = (row) -> options.advancedSearch(range.advancedSearch, row)
filterCache = getFiltered(data.rows, range.advancedSearch, filterCache, filterCacheTerm, advancedSearchFilterFn)
filterCacheTerm = range.advancedSearch
sorted = undefined
else
filterFn = (row) -> options.filter(range.filter, row)
filterCache = getFiltered(data.rows, range.filter, filterCache, filterCacheTerm, filterFn)
filterCacheTerm = range.filter
sorted = undefined
if sorted is undefined or sortCacheTerm.column isnt range.sort?.column or sortCacheTerm.direction isnt range.sort?.direction
sorted = if range.sort and range.sort.column
direction = if range.sort.direction is 'asc' then 1 else -1
column = range.sort.column
filterCache.sort (r1, r2) -> direction * options.compare(r1.cells[column], r2.cells[column])
filterCache
else filterCache
sortCacheTerm.column = range.sort?.column
sortCacheTerm.direction = range.sort?.direction
cb({rows: sorted[range.start..range.end], filteredCount: sorted.length})
rowsForIds: (ids, lookupRow, cb) ->
if rowsByIdMap is undefined
rowsByIdMap = {}
for row in data.rows
rowsByIdMap[lookupRow(row)] = row
cb(rowsByIdMap[id] for id in ids)
}
# XXX Deprecated: alongside request
urlFeed = (url, options) ->
#XXX: when new calls come in, ignore the ongoing request if there is one / cancel the request if possible
options = merge({
extra: undefined,
cache: false
}, options)
# creates a function that might perform caching, depending on the options.cache value
maybeCached = (fetcher) ->
if options.cache
value = undefined
(cb) ->
if value
cb(value)
else
fetcher (res) ->
value = res
cb(value)
else
(cb) -> fetcher(cb)
jsonCallback = (cb) ->
(err, value) ->
logger.warn(err) if err
cb(value)
{
url: url # for debugging
headers: maybeCached (cb) ->
json url, { type: 'headers', extra: options.extra }, jsonCallback(cb)
totalCount: maybeCached (cb) ->
json url, { type: 'totalCount', extra: options.extra }, (err, res) ->
jsonCallback(cb)(err, res.count)
rows: (range, cb) ->
json url, { type: 'rows', range: range, extra: options.extra }, jsonCallback(cb)
rowsForIds: (ids, lookupRow, cb) ->
json url, { type: 'rowsForIds', ids: ids, extra: options.extra }, jsonCallback(cb)
}
dataTable = (options) ->
selection = div()
dt = new DataTable(selection, options)
if options and options.feed then dt.render()
selection
dataTable.objectFeed = objectFeed
dataTable.urlFeed = urlFeed
export {
dataTable,
DataTable,
objectFeed,
urlFeed,
getAdvancedSearchFilter,
}
| true | import { userFacingText } from 'utils/user-facing-text'
import { div, select, detached, button, i, span } from 'utils/selection'
import {
debounce,
defined,
find,
flatten,
identity,
isArray,
isFunction,
isString,
merge,
randomId,
unique
} from 'utils/utils'
import { Set as HSet } from 'utils/set'
import {compare } from 'utils/sort'
import { EventEmitter } from 'utils/event-emitter'
import * as filter from 'utils/filter'
import logger from 'utils/logger'
import { json } from 'utils/request'
import { pickerBase as picker, PickerBase as Picker } from 'components/picker'
import { Toggle } from 'components/toggle'
import { StickyTableHeaders } from 'components/sticky-table-headers'
userFacingText({
dataTable: {
addFilter: 'Add Filter',
advancedSearch: 'Advanced Search',
and: 'and',
anyColumn: 'Any column'
clearFilters: 'Clear Filters',
clearSelection: 'clear selection',
loading: 'Loading',
noData: 'No Data',
noSort: 'No Sort',
or: 'or',
rowsPerPage: 'Rows Per Page',
search: 'Search',
selectedRows: '$selected of $total selected.',
sortBy: 'Sort By',
contains: 'contains',
excludes: 'does not contain',
startsWith: 'starts with',
fuzzy: 'fuzzy matches',
regex: 'matches regex',
exact: 'is exactly',
greater: 'is greater than',
less: 'is less than'
}
})
fullWidthColSpan = 999 # the colspan used to make a cell display as an entire row
collapseBreakPoint = 480
columnOptionLookup = (options, name, id) ->
if options.columns isnt undefined and options.columns[id] isnt undefined and options.columns[id][name] isnt undefined
options.columns[id][name]
else
options[name]
toCriteriaItems = (list) ->
unique(list).map (item) ->
{
value: item,
text: userFacingText('dataTable', item)
}
advancedSearchCriteriaValidate = (value) ->
allowedTypes = filter.filterTypes()
if (isArray(value) and value.every((c) -> ~allowedTypes.indexOf(c))) or value is undefined
value or []
else if isArray(value)
invalidTypes = value.filter((c) -> not ~allowedTypes.indexOf(c))
logger.warn('Invalid Filter Criteria Specified:', invalidTypes, '\nPlease select a value from filterStringTypes()', allowedTypes)
[]
else
logger.warn('Expected an array of filter criteria but was passed:', value)
[]
splitArray = (array, index) ->
left = if index is 0 then [] else array[0...index]
right = if index is array.length - 1 then [] else array[index+1...array.length]
[left, array[index], right]
# pagination block (the page selector and the rows per page selector)
createPaginationBlock = (table) ->
container = div('hx-data-table-paginator')
pickerNode = container.append('button').class('hx-data-table-paginator-picker hx-btn hx-btn-invisible').node()
dtPicker = new Picker(pickerNode, { dropdownOptions: { align: 'rbrt' } })
.on 'change', 'hx.data-table', (d) =>
if d.cause is 'user'
table.page(d.value.value, undefined, d.cause)
totalRows = container.append('span').class('hx-data-table-paginator-total-rows')
back = container.append('button').class('hx-data-table-paginator-back hx-btn hx-btn-invisible')
back.append('i').class('hx-icon hx-icon-chevron-left')
back.on 'click', 'hx.data-table', => if not back.classed('hx-data-table-btn-disabled') then table.page(table.page()-1)
forward = container.append('button').class('hx-data-table-paginator-forward hx-btn hx-btn-invisible')
forward.append('i').class('hx-icon hx-icon-chevron-right')
forward.on 'click', 'hx.data-table', => if not forward.classed('hx-data-table-btn-disabled') then table.page(table.page()+1)
[container, dtPicker]
# pageSizeOptions select
createPageSizeBlock = (table, options) ->
container = div('hx-data-table-page-size')
container.append('span').text(options.rowsPerPageText + ': ')
node = container.append('button').class('hx-data-table-page-size-picker hx-btn hx-btn-invisible').node()
dtPicker = new Picker(node, { dropdownOptions: { align: 'rbrt' } })
.on 'change', 'hx.data-table', (d) ->
if d.cause is 'user'
table.pageSize(d.value.value, undefined, 'user')
table.page(1, undefined, 'user')
[container, dtPicker]
spacer = -> div('hx-data-table-spacer')
createAdvancedSearchView = (selection, dataTable, options) ->
# Render individual row
advancedSearchRowEnter = (filterGroup, filterGroupIndex) ->
(filterRow, index, trueIndex) ->
typePickerOptions =
items: [
{ text: userFacingText('dataTable', 'and'), value: 'and' }
{ text: userFacingText('dataTable', 'or'), value: 'or' }
]
fullWidth: true
typePickerSel = picker(typePickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-type hx-section hx-fixed', true)
typePickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
newFilters = if data.value.value is 'or'
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
[leftFilterGroups..., leftFilters, [currFilter, rightFilters...], rightFilterGroups...]
else
[leftAllButLast..., leftLast] = leftFilterGroups
[leftAllButLast..., [leftLast..., filterGroup...], rightFilterGroups...]
dataTable.advancedSearch(newFilters)
anyColumn = {
text: options.anyColumnText
value: 'any'
anyColumn: true
}
columnItems = filterRow.headers.map (header) ->
value: header.id
orig: header
# XXX Breaking: Renderer
# columnRenderer = (cell) ->
# if cell.anyColumn then span().text(cell.text)
# else columnOptionLookup(options, 'headerCellRenderer', cell.orig.id)(cell.orig, filterRow.headers)
columnRenderer = (element, cell) ->
if cell.anyColumn then select(element).text(cell.text)
else columnOptionLookup(options, 'headerCellRenderer', cell.orig.id)(element, cell.orig, filterRow.headers)
columnPickerOptions =
items: [anyColumn, columnItems...]
renderer: columnRenderer
fullWidth: true
columnPickerSel = picker(columnPickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-column hx-section hx-fixed', true)
columnPickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
column: data.value.value
})
delete newFilter.criteria
columnCriteria = columnOptionLookup(options, 'advancedSearchCriteria', data.value.value) || []
criteriaItems = ['contains', advancedSearchCriteriaValidate(columnCriteria)...]
criteriaPickerSel.api('picker')
.items(toCriteriaItems(criteriaItems))
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
criteriaPickerOptions =
items: toCriteriaItems(['contains', advancedSearchCriteriaValidate(options.advancedSearchCriteria)...])
fullWidth: true
criteriaPickerSel = picker(criteriaPickerOptions)
.classed('hx-btn-outline hx-data-table-advanced-search-criteria hx-section hx-fixed', true)
criteriaPickerSel.api('picker')
.on 'change', (data) ->
if data.cause is 'user'
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
criteria: data.value.value
})
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
criteriaAnyPlaceholder = div('hx-data-table-advanced-search-criteria-placeholder hx-text-disabled hx-background-disabled')
.text(userFacingText('dataTable', 'contains'))
debouncedInput = debounce 200, (e) ->
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, currFilter, rightFilters] = splitArray(filterGroup, trueIndex)
newFilter = merge(currFilter, {
term: e.target.value
})
dataTable.advancedSearch([leftFilterGroups..., [leftFilters..., newFilter, rightFilters...], rightFilterGroups...])
termInput = detached('input').attr('placeholder', options.advancedSearchPlaceholder)
.class('hx-data-table-advanced-search-input hx-section')
.attr('required', 'required')
.on 'input', debouncedInput
removeBtn = button('hx-btn hx-negative hx-btn-outline hx-data-table-advanced-search-remove')
.add(i('hx-icon hx-icon-close'))
.on 'click', ->
prevFilters = dataTable.advancedSearch()
[leftFilterGroups, filterGroup, rightFilterGroups] = splitArray(prevFilters, filterGroupIndex)
[leftFilters, _, rightFilters] = splitArray(filterGroup, trueIndex)
newFilters = if trueIndex is 0 and filterGroupIndex is 0
[rightFilters, rightFilterGroups...]
else if trueIndex is 0
[leftFilterGroup..., leftFilterGroupLast] = leftFilterGroups
[_, filters...] = filterGroup
[leftFilterGroup..., [leftFilterGroupLast..., filters...], rightFilterGroups...]
else
[leftFilterGroups..., [leftFilters..., rightFilters...], rightFilterGroups...]
filterToUse = newFilters.filter((group) => group.length)
dataTable.advancedSearch(if filterToUse.length then filterToUse else undefined)
@append('div').class('hx-data-table-advanced-search-filter hx-section hx-input-group hx-input-group-full-width')
.add(typePickerSel)
.add(columnPickerSel)
.add(criteriaAnyPlaceholder)
.add(criteriaPickerSel)
.add(div('hx-data-table-advanced-search-filter-input-container hx-input-group hx-no-pad hx-no-border')
.add(termInput)
.add(removeBtn))
.node()
advancedSearchRowUpdate = ({term, column, criteria}, element, index) ->
filterRowSel = select(element)
validContext = if not term then 'negative' else undefined
filterRowSel.select('.hx-data-table-advanced-search-type').api('picker')
.value(if index is 0 then 'or' else 'and')
trueColumn = column or 'any'
filterRowSel.select('.hx-data-table-advanced-search-column').api('picker')
.value(trueColumn)
columnCriteria = columnOptionLookup(options, 'advancedSearchCriteria', column) || []
criteriaItems = if trueColumn is 'any' then ['contains'] else ['contains', advancedSearchCriteriaValidate(columnCriteria)...]
filterRowSel.select('.hx-data-table-advanced-search-criteria')
.style('display', if criteriaItems.length is 1 then 'none' else 'block')
.api('picker')
.items(toCriteriaItems(criteriaItems))
.value(criteria || 'contains')
filterRowSel.select('.hx-data-table-advanced-search-criteria-placeholder')
.style('display', if criteriaItems.length is 1 then 'block' else 'none')
filterRowSel.select('.hx-data-table-advanced-search-input')
.value(term or '')
# Render grouped filters
advancedSearchGroupEnter = (filterGroup, index, trueIndex) ->
filterGroupSel = div('hx-data-table-advanced-search-filter-group')
filterGroupView = filterGroupSel.view('.hx-data-table-advanced-search-filter')
.enter(advancedSearchRowEnter(filterGroup, trueIndex))
.update(advancedSearchRowUpdate)
filterGroupSel.api('data-table.group', {
filterGroupView
})
@append(filterGroupSel).node()
advancedSearchGroupUpdate = (filterGroup, element, index) ->
select(element).api('data-table.group').filterGroupView.apply(filterGroup)
selection.view('.hx-data-table-advanced-search-filter-group')
.enter(advancedSearchGroupEnter)
.update(advancedSearchGroupUpdate)
class DataTable extends EventEmitter
constructor: (selector, options) ->
super()
resolvedOptions = merge({
allowHeaderWrap: false
compact: 'auto' # 'auto', true, false
displayMode: 'paginate' # 'paginate', 'all'
feed: undefined
showSearchAboveTable: false
filter: undefined
filterEnabled: true
showAdvancedSearch: false
advancedSearchEnabled: false
advancedSearchCriteria: undefined
advancedSearch: undefined
pageSize: 15
pageSizeOptions: undefined # supply an array of numbers to show the user
retainHorizontalScrollOnRender: true
retainVerticalScrollOnRender: false
selectEnabled: false
singleSelection: false
sort: undefined
sortEnabled: true
highlightOnHover: true
useStickyHeaders: true
selectedRows: []
expandedRows: []
# functions used for getting row state
rowIDLookup: (row) -> row.id
rowEnabledLookup: (row) -> not row.disabled
rowSelectableLookup: (row) -> true
rowCollapsibleLookup: (row) -> false
# functions for rendering
collapsibleRenderer: undefined
# XXX Breaking: Renderer
# cellRenderer: (cell, row) -> span().text(cell)
# headerCellRenderer: (cell, headers) -> span().text(cell.name)
cellRenderer: (element, cell, row) -> select(element).text(cell)
headerCellRenderer: (element, cell, headers) -> select(element).text(cell.name)
# per column options (headerCellRenderer, cellRenderer, sortEnabled)
columns: {}
clearSelectionText: userFacingText('dataTable','clearSelection')
loadingText: userFacingText('dataTable','loading')
noDataMessage: userFacingText('dataTable','noData')
noSortText: userFacingText('dataTable', 'noSort')
rowsPerPageText: userFacingText('dataTable','rowsPerPage')
searchPlaceholder: userFacingText('dataTable','search')
selectedRowsText: userFacingText('dataTable', 'selectedRows', true)
sortByText: userFacingText('dataTable','sortBy')
addFilterText: userFacingText('dataTable', 'addFilter')
clearFiltersText: userFacingText('dataTable', 'clearFilters')
anyColumnText: userFacingText('dataTable', 'anyColumn')
advancedSearchText: userFacingText('dataTable','advancedSearch')
advancedSearchPlaceholder: userFacingText('dataTable', 'search')
}, options)
resolvedOptions.pageSize = Math.min resolvedOptions.pageSize, 1000
resolvedOptions.advancedSearchEnabled = true if resolvedOptions.advancedSearch
resolvedOptions.showAdvancedSearch = true if resolvedOptions.advancedSearchEnabled
selection = select(selector)
.classed('hx-data-table', true)
.api('data-table', this)
.api(this)
content = div('hx-data-table-content')
# loading div
loadingDiv = div('hx-data-table-loading')
.add(div('hx-data-table-loading-inner')
.add(div('hx-spinner'))
.add(span().text(' ' + resolvedOptions.loadingText)))
statusBar = div('hx-data-table-status-bar')
statusBarText = span('hx-data-table-status-bar-text')
statusBarClear = span('hx-data-table-status-bar-clear')
.text(" (#{resolvedOptions.clearSelectionText})")
.on 'click', 'hx.data-table', =>
@_.selectedRows.clear()
selection.select('.hx-data-table-content').selectAll('.hx-data-table-row-selected').classed('hx-data-table-row-selected', false)
@updateSelected()
@emit 'selectedrowsclear'
controlPanelCompact = div('hx-data-table-control-panel-compact')
controlPanelCompactToggle = button('hx-data-table-control-panel-compact-toggle hx-btn hx-btn-invisible')
.add(i('hx-icon hx-icon-bars'))
.on 'click', ->
toggleElem = controlPanel
if toggleElem.classed('hx-data-table-compact-hide')
toggleElem.classed('hx-data-table-compact-hide', false)
.style('height', '0px')
.morph().with('expandv', 150)
.then ->
controlPanelCompact.classed('hx-data-table-control-panel-compact-open', true)
.go()
else
toggleElem.morph().with('collapsev', 50)
.then ->
toggleElem.classed('hx-data-table-compact-hide', true)
controlPanelCompact.classed('hx-data-table-control-panel-compact-open', false)
.thenStyle('display', '')
.go()
controlPanel = div('hx-data-table-control-panel hx-data-table-compact-hide')
controlPanelInner = div('hx-data-table-control-panel-inner')
# compact sort - always on the page, only visible in compact mode (so we can just change the class and everything will work)
compactSort = div('hx-data-table-sort')
.classed('hx-data-table-sort-visible', resolvedOptions.sortEnabled)
.add(span().text(resolvedOptions.sortByText + ': '))
sortColPicker = new Picker(compactSort.append('button').class('hx-btn hx-btn-invisible').node())
sortColPicker.on 'change', 'hx.data-table', (d) =>
if d.cause is 'user' then @sort({column: sortColPicker.value().column, direction: sortColPicker.value().direction})
filterContainer = div('hx-data-table-filter-container')
onInput = debounce 200, => @filter(filterInput.value(), undefined, 'user')
filterInput = detached('input').class('hx-data-table-filter')
.attr('placeholder', resolvedOptions.searchPlaceholder)
.classed('hx-data-table-filter-visible', resolvedOptions.filterEnabled)
.on 'input', 'hx.data-table', onInput
advancedSearchContainer = div('hx-data-table-advanced-search-container')
advancedSearchToggle = button('hx-data-table-advanced-search-toggle hx-btn hx-btn-invisible')
.text(resolvedOptions.advancedSearchText)
advancedSearchToggleButton = new Toggle(advancedSearchToggle.node())
advancedSearchToggleButton.on 'change', (data) => @advancedSearchEnabled(data)
advancedSearch = div('hx-data-table-advanced-search')
advancedSearchView = createAdvancedSearchView(advancedSearch, this, resolvedOptions)
advancedSearchButtons = div('hx-data-table-advanced-search-buttons')
addFilter = =>
currentFilters = @advancedSearch() or [[]]
[previousFilterGroups..., lastFilterGroup] = currentFilters
newLastFilterGroup = [lastFilterGroup..., {
column: 'any',
term: ''
}]
@advancedSearch([previousFilterGroups..., newLastFilterGroup])
clearFilters = => @advancedSearch(undefined)
advancedSearchAddFilterButton = button('hx-btn hx-positive hx-data-table-advanced-search-add-filter hx-data-table-advanced-search-button hx-btn-outline')
.add(i('hx-data-table-advanced-search-icon hx-icon hx-icon-plus hx-text-positive'))
.add(span().text(resolvedOptions.addFilterText))
.on('click', addFilter)
advancedSearchClearFilterButton = button('hx-btn hx-negative hx-data-table-advanced-search-clear-filters hx-data-table-advanced-search-button hx-btn-outline')
.add(i('hx-data-table-advanced-search-icon hx-icon hx-icon-close hx-text-negative'))
.add(span().text(resolvedOptions.clearFiltersText))
.on('click', clearFilters)
# We create multiple copies of these to show in different places
# This makes it easier to change the UI as we can show/hide instead of moving them
[pageSize, pageSizePicker] = createPageSizeBlock(this, resolvedOptions)
[pageSizeBottom, pageSizePickerBottom] = createPageSizeBlock(this, resolvedOptions)
[pagination, pagePicker] = createPaginationBlock(this)
[paginationBottom, pagePickerBottom] = createPaginationBlock(this)
[paginationCompact, pagePickerCompact] = createPaginationBlock(this)
# The main pagination is hidden as the compact control panel contains a version of it
pagination.classed('hx-data-table-compact-hide', true)
controlPanelBottom = div('hx-data-table-control-panel-bottom')
# Create the structure in one place
# Some entities still make sense to be built individually (e.g. the loading div)
selection
.add(content)
.add(statusBar
.add(statusBarText)
.add(statusBarClear))
# Control panel displayed at the top for compact mode
.add(controlPanelCompact
.add(paginationCompact)
.add(spacer())
.add(controlPanelCompactToggle))
# Main control panel - contains all the components
.add(controlPanel
.add(controlPanelInner
.add(compactSort)
.add(pagination)
.add(pageSize)
.add(spacer())
.add(filterContainer
.add(advancedSearchToggle)
.add(filterInput)))
# The advanced search container isn't in the main control panel as it is easier to style outside
.add(advancedSearchContainer
.add(advancedSearch)
.add(advancedSearchButtons
.add(advancedSearchAddFilterButton)
.add(advancedSearchClearFilterButton))))
# Bottom control panel - shown in compact mode and when the search is at the top
.add(controlPanelBottom
.add(spacer())
.add(pageSizeBottom)
.add(paginationBottom))
# Add the loading div last - helps keep it on top of everything
.add(loadingDiv)
# 'private' variables
@_ = {
selection: selection
options: resolvedOptions
page: 1
pagePickers: [pagePicker, pagePickerCompact, pagePickerBottom]
pageSizePickers: [pageSizePicker, pageSizePickerBottom]
statusBar: statusBar
sortColPicker: sortColPicker
selectedRows: new HSet(resolvedOptions.selectedRows) # holds the ids of the selected rows
expandedRows: new HSet(resolvedOptions.expandedRows)
renderedCollapsibles: {}
compactState: (resolvedOptions.compact is 'auto' and selection.width() < collapseBreakPoint) or resolvedOptions.compact is true
advancedSearchView: advancedSearchView
advancedSearchToggleButton: advancedSearchToggleButton
}
# responsive page resize when compact is 'auto'
selection.on 'resize', 'hx.data-table', =>
selection.selectAll('.hx-data-table-collapsible-content-container').map (e) =>
e.style('max-width', (parseInt(selection.style('width')) - @_.collapsibleSizeDiff) + 'px')
state = (@compact() is 'auto' and selection.width() < collapseBreakPoint) or @compact() is true
selection.classed 'hx-data-table-compact', state
if @_.compactState isnt state
@_.compactState = state
@emit('compactchange', {value: @compact(), state: state, cause: 'user'})
dtRandomId = randomId()
# deal with shift being down - prevents the text in the table being selected when shift
# selecting multiple rows (as it looks bad) but also means that data can be selected if required
# XXX: make this work better / come up with a better solution
select('body').on 'keydown', 'hx.data-table.shift.' + dtRandomId, (e) =>
if e.shiftKey and @selectEnabled()
selection.classed('hx-data-table-disable-text-selection', true)
select('body').on 'keyup', 'hx.data-table.shift.' + dtRandomId, (e) =>
if not e.shiftKey and @selectEnabled()
selection.classed('hx-data-table-disable-text-selection', false)
# Methods for changing the options
#---------------------------------
# general purpose function for setting / getting an option
option = (name) ->
(value, cb, cause) ->
options = @_.options
if arguments.length > 0
options[name] = value
@emit(name.toLowerCase() + 'change', {value: value, cause: (cause or 'api')})
@render(cb)
this
else options[name]
collapsibleRenderer: option('collapsibleRenderer')
compact: option('compact')
displayMode: option('displayMode')
feed: option('feed')
filter: option('filter')
advancedSearch: option('advancedSearch')
showAdvancedSearch: option('showAdvancedSearch')
advancedSearchEnabled: option('advancedSearchEnabled')
showSearchAboveTable: option('showSearchAboveTable')
filterEnabled: option('filterEnabled')
noDataMessage: option('noDataMessage')
pageSize: option('pageSize')
pageSizeOptions: option('pageSizeOptions')
retainHorizontalScrollOnRender: option('retainHorizontalScrollOnRender')
retainVerticalScrollOnRender: option('retainVerticalScrollOnRender')
rowCollapsibleLookup: option('rowCollapsibleLookup')
rowEnabledLookup: option('rowEnabledLookup')
rowIDLookup: option('rowIDLookup')
rowSelectableLookup: option('rowSelectableLookup')
selectEnabled: option('selectEnabled')
highlightOnHover: option('highlightOnHover')
singleSelection: option('singleSelection')
useStickyHeaders: option('useStickyHeaders')
sort: option('sort')
# general purpose function for setting / getting a column option (or the default option of the column id is not specified)
columnOption = (name) ->
(columnId, value, cb) ->
options = @_.options
if arguments.length > 1 and isString(columnId)
options.columns[columnId] ?= {}
options.columns[columnId][name] = value
@emit(name.toLowerCase() + 'change', {column: columnId, value: value, cause: 'api'})
@render(cb)
this
else if arguments.length > 0
if isString(columnId) and options.columns[columnId]
options.columns[columnId][name]
else
options[name] = arguments[0]
@emit(name.toLowerCase() + 'change', {value: value, cause: 'api'})
@render(arguments[1])
this
else options[name]
advancedSearchCriteria: columnOption('advancedSearchCriteria')
allowHeaderWrap: columnOption('allowHeaderWrap')
cellRenderer: columnOption('cellRenderer')
headerCellRenderer: columnOption('headerCellRenderer')
sortEnabled: columnOption('sortEnabled')
# function for setting / getting options that are only column specific and cannot be set for the whole table
columnOnlyOption = (name) ->
(columnId, value, cb) ->
options = @_.options
if isString(columnId)
if arguments.length > 1
options.columns[columnId] ?= {}
options.columns[columnId][name] = value
@emit(name.toLowerCase() + 'change', {column: columnId, value: value, cause: 'api'})
@render(cb)
this
else if options.columns[columnId]
options.columns[columnId][name]
maxWidth: columnOnlyOption('maxWidth')
# Methods for changing the state of the table
# -------------------------------------------
page: (value, cb, cause) ->
if arguments.length > 0
@_.page = Math.max(1, value)
if @_.numPages?
@_.page = Math.min @_.page, @_.numPages
@emit('pagechange', {value: @_.page, cause: cause or 'api'})
@render(cb)
this
else @_.page
selectedRows: (value, cb) ->
if arguments.length > 0 and not isFunction(value)
# Deal with single select mode when setting the selected rows
if @singleSelection() and isArray(value) and value.length
value = [value[0]]
@_.selectedRows = new HSet(value)
newSelectedRows = @_.selectedRows.values()
@emit('selectedrowschange', {value: newSelectedRows, cause: 'api'})
@_.userLastSelectedIndex = undefined
@render(cb)
this
else
@_.selectedRows.values()
expandedRows: (value, cb) ->
if arguments.length > 0 and not isFunction(value)
@_.expandedRows = new HSet(value)
@render(cb)
@emit('expandedrowschange', {value: @_.expandedRows.values(), cause: 'api'})
this
else
@_.expandedRows.values()
rowsForIds: (ids, cb) ->
if cb? then @feed().rowsForIds(ids, @rowIDLookup(), cb)
this
# Methods that perform an action on the table
# -------------------------------------------
renderSuppressed: (value) ->
if arguments.length > 0
@_.renderSuppressed = value
this
else @_.renderSuppressed
# redraws the table
render: (cb) ->
if @_.renderSuppressed then return
feed = @feed()
# check that the feed has been defined - if it hasn't then there is no point in continuing
if feed is undefined or (feed.headers is undefined or feed.totalCount is undefined or feed.rows is undefined)
logger.warn('No feed specified when rendering data table')
return
selection = @_.selection
options = @_.options
# some utility functions
getColumnOption = (name, id) -> columnOptionLookup(options, name, id)
rowToArray = (headers, obj) -> headers.map (header) -> obj.cells[header.id]
# build the main structure of the table in a detached container
container = div('hx-data-table-content')
table = container.append('table').class('hx-data-table-table hx-table')
.classed('hx-table-no-hover', not options.highlightOnHover)
thead = table.append('thead').class('hx-data-table-head')
tbody = table.append('tbody').class('hx-data-table-body')
headerRow = thead.append('tr').class('hx-data-table-row')
# make the loading div visible
selection.select('.hx-data-table-loading').style('display', '')
advancedSearchVisibleAndEnabled = (not options.filterEnabled or options.showAdvancedSearch) and options.advancedSearchEnabled
filterSel = selection.select('.hx-data-table-filter')
.classed('hx-data-table-filter-visible', options.filterEnabled and not advancedSearchVisibleAndEnabled)
nextFilterValue = @filter()
prevFilterValue = filterSel.value()
if nextFilterValue isnt prevFilterValue
filterSel.value(nextFilterValue)
@_.advancedSearchToggleButton.value(options.advancedSearchEnabled)
selection.select('.hx-data-table-advanced-search-toggle')
.classed('hx-data-table-advanced-search-visible', options.filterEnabled and options.showAdvancedSearch)
selection.select('.hx-data-table-advanced-search-container')
.classed('hx-data-table-advanced-search-visible', advancedSearchVisibleAndEnabled)
selection.select('.hx-data-table-control-panel')
.classed('hx-data-table-filter-enabled', options.filterEnabled)
showCompactControlPanelToggle = options.filterEnabled or options.sortEnabled or options.advancedSearchEnabled or options.pageSizeOptions?.length
selection.select('.hx-data-table-control-panel-compact-toggle')
.classed('hx-data-table-control-panel-compact-toggle-visible', showCompactControlPanelToggle)
# load in the data needed
# XXX: how much of this could be split out so it's not re-defined every time render is called?
feed.headers (headers) =>
if advancedSearchVisibleAndEnabled
currentFilters = @advancedSearch() or []
@_.advancedSearchView.apply currentFilters.filter((x) -> x.length).map (filterGroup) ->
filterGroup.map (filterRow) ->
merge(filterRow, {
headers,
getColumnOption
})
selection.select('.hx-data-table-sort')
.classed('hx-data-table-sort-visible', options.sortEnabled or headers.some((header) -> getColumnOption('sortEnabled', header.id)))
feed.totalCount (totalCount) =>
if options.displayMode is 'paginate'
start = (@page() - 1) * options.pageSize
end = @page() * options.pageSize - 1
else
start = undefined
end = undefined
range = {
start: start,
end: end,
sort: @sort(),
filter: @filter(),
advancedSearch: @advancedSearch(),
useAdvancedSearch: options.showAdvancedSearch and options.advancedSearchEnabled
}
feed.rows range, ({rows, filteredCount}) =>
if options.displayMode is 'paginate'
multiPage = false
selection.classed('hx-data-table-infinite', filteredCount is undefined)
if filteredCount is undefined
@_.numPages = undefined
numText = (start+1) + ' - ' + (end+1)
multiPage = true
else
@_.numPages = Math.max(1, Math.ceil(filteredCount / options.pageSize))
if @page() > @_.numPages then @page(@_.numPages)
multiPage = @_.numPages > 1
if filteredCount > 0 and @_.numPages > 1
numText = 'of ' + filteredCount
items = for idx in [1..@_.numPages] by 1
num = idx * options.pageSize
text: (num + 1 - options.pageSize) + ' - ' + Math.min(num, filteredCount) # e.g. 1 - 15
value: idx
@_.pagePickers.forEach (picker) =>
picker
.items(items)
.value(@page())
selection.selectAll('.hx-data-table-paginator').classed('hx-data-table-paginator-visible', multiPage)
selection.selectAll('.hx-data-table-paginator-total-rows').text(numText or '')
selection.selectAll('.hx-data-table-paginator-back').classed('hx-data-table-btn-disabled', @page() is 1)
selection.selectAll('.hx-data-table-paginator-forward').classed('hx-data-table-btn-disabled', @page() is @_.numPages)
selection.select('.hx-data-table-control-panel-compact')
.classed('hx-data-table-control-panel-compact-visible', multiPage or showCompactControlPanelToggle)
selection.select('.hx-data-table-control-panel-bottom')
.classed('hx-data-table-control-panel-bottom-visible', multiPage or options.pageSizeOptions?.length)
selection.select('.hx-data-table-control-panel')
.classed('hx-data-table-control-panel-visible', multiPage or showCompactControlPanelToggle)
if headers.some((header) -> getColumnOption('sortEnabled', header.id))
currentSort = (@sort() or {})
# filter out columns that are not sortable so they don't show in the list for compact mode
sortColumns = flatten(headers
.map((header) -> if getColumnOption('sortEnabled', header.id)
[
{text: header.name, value: header.id + 'asc', column: header.id, direction: 'asc', cell: header}
{text: header.name, value: header.id + 'desc', column: header.id, direction: 'desc', cell: header}
])
.filter(defined))
# set the values for the compact sort control
@_.sortColPicker
# XXX Breaking: Renderer
# .renderer((option) ->
# if option.value
# getColumnOption('headerCellRenderer', option.cell.id)(option.cell, headers)
# .add(i('hx-data-table-compact-sort-arrow hx-icon hx-icon-chevron-' + (if option.direction is 'asc' then 'up' else 'down')))
# else
# span().text(option.text)
# )
.renderer((element, option) ->
if option.value
getColumnOption('headerCellRenderer', option.cell.id)(element, option.cell, headers)
select(element).append('i')
.class('hx-data-table-compact-sort-arrow hx-icon hx-icon-chevron-' + (if option.direction is 'asc' then 'up' else 'down'))
else
select(element).text(option.text)
)
.items([{text: options.noSortText, value: undefined}].concat sortColumns)
if currentSort.column and @_.sortColPicker.value().value isnt (currentSort.column + currentSort.direction)
@_.sortColPicker.value({value: currentSort.column + currentSort.direction})
# populate the page size picker if there are options set
selectPageSize = options.pageSizeOptions? and options.pageSizeOptions.length > 0
selection.selectAll('.hx-data-table-page-size').classed('hx-data-table-page-size-visible', selectPageSize)
if selectPageSize
if options.pageSizeOptions.indexOf(options.pageSize) is -1
options.pageSizeOptions.push options.pageSize
pageSizeOptions = options.pageSizeOptions
.sort(compare)
.map((item) -> {text: item, value: item})
@_.pageSizePickers.forEach (picker) ->
picker
.items(pageSizeOptions)
.value(options.pageSize)
# build the grouped header
if headers.some((header) -> header.groups?)
relevantHeaders = headers.filter((e) -> e.groups?).map((e) -> e.groups.length)
maxHeaderDepth = Math.max.apply(null, relevantHeaders)
# Map over to populate columns with groups of '' where not included
headerGroups = headers.map (e) ->
groups = e.groups or []
groups.push '' while groups.length < maxHeaderDepth
groups
for row in [maxHeaderDepth-1..0] by -1
groupedRow = headerRow.insertBefore 'tr'
groupedRow.append('th').class('hx-data-table-control') if options.selectEnabled or options.collapsibleRenderer?
count = 1
for column in [1..headerGroups.length] by 1
col = headerGroups[column]
prevCol = headerGroups[column-1]
if col? and prevCol?
parent = col.slice(row, maxHeaderDepth).toString()
prevParent = prevCol.slice(row, maxHeaderDepth).toString()
if column is headerGroups.length or col[row] isnt prevCol[row] or parent isnt prevParent
groupedRow.append('th')
.attr('colspan', count)
.class('hx-data-table-cell-grouped')
.text(prevCol[row])
count = 0
count++
# add the 'select all' checkbox to the header
if options.selectEnabled or options.collapsibleRenderer?
headerControlBox = headerRow.append('th').class('hx-data-table-control hx-table-head-no-border')
if options.selectEnabled and not options.singleSelection
headerCheckBox = headerControlBox.append('div').class('hx-data-table-checkbox')
.on 'click', 'hx.data-table', =>
if rows.length > 0
enabledRows = rows.filter (row) -> options.rowEnabledLookup(row)
selectMulti(0, rows.length - 1, not enabledRows.every((row) => @_.selectedRows.has(options.rowIDLookup(row))))
headerCheckBox.append('i').class('hx-icon hx-icon-check')
# build the header
headers.forEach (header, i) =>
cellDiv = headerRow.append('th').class('hx-data-table-cell')
.classed('hx-table-header-allow-wrap', getColumnOption('allowHeaderWrap', header.id))
cellDivContent = cellDiv.append('div').class('hx-data-table-cell-inner')
# XXX Breaking: Renderer
# cellDivContent
# .add(div('hx-data-table-title')
# .add(getColumnOption('headerCellRenderer', header.id)(header, headers)))
getColumnOption('headerCellRenderer', header.id)(
cellDivContent.append('span').class('hx-data-table-title').node(),
header,
headers,
)
if getColumnOption('sortEnabled', header.id)
cellDiv.classed('hx-data-table-cell-sort-enabled', true)
currentSort = @sort()
dirClass = if currentSort and currentSort.column is header.id
'hx-icon-sort-' + currentSort.direction + ' hx-data-table-sort-on'
else 'hx-icon-sort'
cellDivContent.append('i').class('hx-icon ' + dirClass + ' hx-data-table-sort-icon')
cellDiv.on 'click', 'hx.data-table', =>
currentSort = @sort() or {}
direction = if currentSort.column is header.id
if currentSort.direction is 'asc' then 'desc'
else 'asc'
column = if direction isnt undefined then header.id
@sort({column: column, direction: direction}, undefined, 'user')
@updateSelected = =>
parentFilter = (parent) ->
(sel) -> sel.node().parentNode is parent.node()
getSelectableRows = (parent) ->
parent
.selectAll('.hx-data-table-row')
.filter(parentFilter(parent))
.classed('hx-data-table-row-selected', false)
rowDivs = getSelectableRows(tbody)
leftHeaderBody = container.select('.hx-sticky-table-header-left').select('tbody')
checkBoxDivs = getSelectableRows(leftHeaderBody)
if @_.selectedRows.size > 0
for row, rowIndex in rows
if @_.selectedRows.has(options.rowIDLookup(row))
select(rowDivs.nodes[rowIndex]).classed('hx-data-table-row-selected', true)
if checkBoxDivs.nodes[rowIndex]?
select(checkBoxDivs.nodes[rowIndex]).classed('hx-data-table-row-selected', true)
pageHasSelection = tbody.selectAll('.hx-data-table-row-selected').size() > 0
selection.classed('hx-data-table-has-page-selection', pageHasSelection and not options.singleSelection)
selection.classed('hx-data-table-has-selection', @_.selectedRows.size > 0 and not options.singleSelection)
if totalCount isnt undefined
@_.statusBar
.select('.hx-data-table-status-bar-text')
.text(userFacingText.format(options.selectedRowsText, { selected: @_.selectedRows.size, total: totalCount }))
# handles multi row selection ('select all' and shift selection)
selectMulti = (start, end, force) =>
newRows = []
newRows.push rows[idx] for idx in [start..end] by 1
for row in newRows
if options.rowEnabledLookup(row) and options.rowSelectableLookup(row)
id = options.rowIDLookup(row)
@_.selectedRows[if force then 'add' else 'delete'](id)
@emit 'selectedrowschange', {row: row, rowValue: @_.selectedRows.has(id), value: @selectedRows(), cause: 'user'}
@updateSelected()
# handles row selection.
selectRow = (row, index, shiftDown) =>
if @_.userLastSelectedIndex?
if options.singleSelection and index isnt @_.userLastSelectedIndex
@_.selectedRows.clear()
else
# does the check for whether we're shift selecting and calls into selectMulti if we are
if shiftDown and index isnt @_.userLastSelectedIndex
force = @_.selectedRows.has(options.rowIDLookup(rows[@_.userLastSelectedIndex]))
if index > @_.userLastSelectedIndex then selectMulti(@_.userLastSelectedIndex + 1, index, force)
else selectMulti(index, @_.userLastSelectedIndex, force)
return
@_.userLastSelectedIndex = index
if options.rowSelectableLookup(row)
id = options.rowIDLookup(row)
deleteOrAdd = if @_.selectedRows.has(id) then 'delete' else 'add'
@_.selectedRows[deleteOrAdd](id)
@emit 'selectedrowschange', {row: row, rowValue: @_.selectedRows.has(id), value: @selectedRows(), cause: 'user'}
@updateSelected()
# Deal with collapsible rows
buildCollapsible = ->
contentRow = detached('tr').class('hx-data-table-collapsible-content-row')
hiddenRow = detached('tr').class('hx-data-table-collapsible-row-spacer')
# Add an empty cell so the sticky headers display correctly
contentRow.append('td').class('hx-data-table-collapsible-cell hx-data-table-collapsible-cell-empty')
# The div that the user will populate with the collapsibleRender function
contentDiv = contentRow.append('td').class('hx-data-table-collapsible-cell')
.attr('colspan',fullWidthColSpan)
.append('div').class('hx-data-table-collapsible-content-container')
.append('div').class('hx-data-table-collapsible-content')
{contentRow: contentRow, hiddenRow: hiddenRow, contentDiv: contentDiv}
toggleCollapsible = (node, row, force) =>
# once rows have been clicked once, the nodes are stored in the _.renderedCollapsibles object for re-use
rowId = options.rowIDLookup(row)
cc = @_.renderedCollapsibles[rowId] or buildCollapsible(row)
@_.renderedCollapsibles[rowId] = cc
# We always insert after here to make sure the nodes are added when setting the collapsible rows with the API
node.insertAfter(cc.hiddenRow).insertAfter(cc.contentRow)
currentVis = if force? then force else !cc.contentRow.classed('hx-data-table-collapsible-row-visible')
cc.contentRow.classed('hx-data-table-collapsible-row-visible', currentVis)
node.classed('hx-data-table-collapsible-row-visible', currentVis)
node.select('.hx-data-table-collapsible-toggle').select('i').class(if currentVis then 'hx-icon hx-icon-minus' else 'hx-icon hx-icon-plus')
# XXX Breaking: Renderer
# if currentVis then cc.contentDiv.append(options.collapsibleRenderer(row))
if currentVis
options.collapsibleRenderer(cc.contentDiv.node(), row)
else
@_.renderedCollapsibles[rowId].contentRow.remove()
@_.renderedCollapsibles[rowId].hiddenRow.remove()
delete @_.renderedCollapsibles[rowId]
@_.expandedRows[if currentVis then 'add' else 'delete'](rowId)
@_.stickyHeaders?.render()
@_.collapsibleSizeDiff = parseInt(selection.style('width')) - parseInt(select(cc.contentDiv.node().parentNode).style('width'))
currentVis
# build the rows
if filteredCount is undefined or filteredCount > 0
rows.forEach (row, rowIndex) =>
tr = tbody.append('tr').class('hx-data-table-row')
.classed('hx-data-table-row-selected', @_.selectedRows.has(options.rowIDLookup(row)))
.classed('hx-data-table-row-disabled', not options.rowEnabledLookup(row))
tr.on 'click', 'hx.data-table', (e) => @emit 'rowclick', {data: row, node: tr.node()}
rowIsCollapsible = options.rowCollapsibleLookup(row) # stored as we use it more than once
# used in compact mode to display the tick correctly without letting text flow behind it.
tr.classed('hx-data-table-row-select-enabled', options.selectEnabled)
if options.selectEnabled or options.collapsibleRenderer?
controlDiv = tr.append('th').class('hx-data-table-control')
if options.selectEnabled
checkbox = controlDiv.append('div').class('hx-data-table-checkbox')
checkbox.append('i').class('hx-icon hx-icon-check')
if options.rowEnabledLookup(row)
checkbox.on 'click', 'hx.data-table', (e) ->
e.stopPropagation() # prevent collapsibles being toggled by tick selection in compact mode
selectRow(row, rowIndex, e.shiftKey)
if options.collapsibleRenderer?
collapsibleControl = controlDiv.append('div')
.class('hx-data-table-collapsible-toggle')
.classed('hx-data-table-collapsible-disabled', not rowIsCollapsible)
collapsibleControl.append('i').class('hx-icon hx-icon-plus')
if rowIsCollapsible
# restore open collapsibles on render
if @_.expandedRows.has(options.rowIDLookup(row)) then toggleCollapsible(tr, row, true)
collapsibleControl.on 'click', 'hx.data-table.collapse-row', (e) =>
currentVis = toggleCollapsible(tr, row)
@emit('expandedrowschange', {value: @_.expandedRows.values(), row: row, rowValue: currentVis, cause: 'user'})
# populate the row
for cell, columnIndex in rowToArray(headers, row)
# Render the 'key' value using the headerCellRenderer
# XXX Breaking: Renderer
# keyDiv = div('hx-data-table-cell-key')
# .add(getColumnOption('headerCellRenderer', headers[columnIndex].id)(headers[columnIndex], headers))
keyDiv = div('hxPI:KEY:<KEY>END_PI-dataPI:KEY:<KEY>END_PI-tablePI:KEY:<KEY>END_PI-cellPI:KEY:<KEY>END_PI-key')
getColumnOption('headerCellRenderer', headers[columnIndex].id)(keyDiv.node(), headers[columnIndex], headers)
cellElem = tr.append('td').class('hx-data-table-cell')
columnMaxWidth = getColumnOption('maxWidth', headers[columnIndex].id)
if columnMaxWidth?
columnMaxWidth = parseInt(columnMaxWidth) + 'px'
cellElem
.style('max-width', columnMaxWidth)
.style('width', columnMaxWidth)
.style('min-width', columnMaxWidth)
# XXX Breaking: Renderer
# cellDiv = cellElem.add(keyDiv)
# .append('div').class('hx-data-table-cell-value')
# .add(getColumnOption('cellRenderer', headers[columnIndex].id)(cell, row)).node()
cellDiv = cellElem.add(keyDiv)
.append('div').class('hx-data-table-cell-value').node()
getColumnOption('cellRenderer', headers[columnIndex].id)(cellDiv, cell, row)
else # append the 'No Data' row.
tbody.append('tr').class('hx-data-table-row-no-data').append('td').attr('colspan', fullWidthColSpan).text(options.noDataMessage)
@updateSelected()
# retain the horizontal scroll unless the page has been changed.
# We only retain the horizontal scroll as when sorting/filtering on
# the first page it retains the vertical scroll which looks weird.
if options.useStickyHeaders and @page() is @_.oldPage
wrapperNode = selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node()
scrollLeft = wrapperNode.scrollLeft if options.retainHorizontalScrollOnRender
scrollTop = wrapperNode.scrollTop if options.retainVerticalScrollOnRender
# store the old page - only used for retaining the scroll positions
@_.oldPage = @page()
# remove the old content div, and slot in the new one
selection.select('.hx-data-table-content').insertAfter(container)
selection.select('.hx-data-table-content').remove()
selection.classed('hx-data-table-compact', ((options.compact is 'auto') and (selection.width() < collapseBreakPoint)) or (options.compact is true))
.classed('hx-data-table-show-search-above-content', options.showSearchAboveTable)
# set up the sticky headers
if options.useStickyHeaders
stickFirstColumn = options.selectEnabled or options.collapsibleRenderer?
stickyOpts = {stickFirstColumn: stickFirstColumn and (filteredCount is undefined or filteredCount > 0), fullWidth: true}
@_.stickyHeaders = new StickyTableHeaders(container.node(), stickyOpts)
# restore horizontal scroll position
selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node().scrollLeft = scrollLeft if scrollLeft?
selection.select('.hx-data-table-content > .hx-sticky-table-wrapper').node().scrollTop = scrollTop if scrollTop?
# hide the loading spinner as we're done rendering
selection.shallowSelect('.hx-data-table-loading').style('display', 'none')
@emit 'render'
cb?()
this
###
Feeds
A feed should be an object with the following functions:
{
headers: (cb) -> # returns a list of header objects ({name, id})
totalCount: (cb) -> # returns the total number of rows in the data set
rows: (range, cb) -> # returns the row data for the range object specified (range = { start, end, filter, sort }) along with the filtered count
rowsForIds: (ids, lookupRow, cb) -> # returns the rows for the ids supplied
}
There are predefined feeds for objects and urls.
###
whitespaceSplitRegex = /\s+/
stripLeadingAndTrailingWhitespaceRegex = /^\s+|\s+$/g
getRowSearchTerm = (cellValueLookup, row) ->
(v for k, v of row.cells).map(cellValueLookup).join(' ').toLowerCase()
capitalize = (str) ->
str.charAt(0).toUpperCase() + str.slice(1)
defaultTermLookup = (term, rowSearchTerm, criteria = 'contains') ->
lookupArr = if isString(rowSearchTerm) then [rowSearchTerm] else rowSearchTerm
arr = term.replace(stripLeadingAndTrailingWhitespaceRegex,'')
.split whitespaceSplitRegex
validPart = find arr, (part) -> filter["filter" + capitalize(criteria)](lookupArr, part.toLowerCase()).length
defined validPart
getAdvancedSearchFilter = (cellValueLookup = identity, termLookup = defaultTermLookup) ->
(filters, row) ->
rowSearchTerm = (v for k, v of row.cells).map(cellValueLookup).join(' ').toLowerCase()
# If term is empty this will return false
validFilters = find filters, (groupedFilters) ->
invalidFilter = find groupedFilters, (currFilter) ->
searchTerm = if currFilter.column is 'any' then rowSearchTerm else (cellValueLookup(row.cells[currFilter.column]) + '').toLowerCase()
currFilter.term and not termLookup(currFilter.term.toLowerCase(), searchTerm, currFilter.criteria)
not defined invalidFilter
defined validFilters
getFiltered = (rows, term, filterCache, filterCacheTerm, fn) ->
# term can be a string (regular filter) or an array (advanced search)
if (term?.length and (filterCache is undefined or filterCacheTerm isnt term))
rows.filter fn
else if filterCache is undefined or not term?.length
rows.slice()
else
filterCache
objectFeed = (data, options) ->
options = merge({
cellValueLookup: identity
termLookup: defaultTermLookup
#XXX: should this provide more information - like the column id being sorted on?
compare: compare
}, options)
options.filter ?= (term, row) -> options.termLookup(term.toLowerCase(), getRowSearchTerm(options.cellValueLookup, row))
options.advancedSearch ?= getAdvancedSearchFilter(options.cellValueLookup, options.termLookup)
# cached values
filterCache = undefined
filterCacheTerm = undefined
sorted = undefined
sortCacheTerm = {}
rowsByIdMap = undefined
{
data: data # for debugging
headers: (cb) -> cb(data.headers)
totalCount: (cb) -> cb(data.rows.length)
rows: (range, cb) ->
if range.sort?.column isnt sortCacheTerm.column
filterCache = undefined
if range.useAdvancedSearch
advancedSearchFilterFn = (row) -> options.advancedSearch(range.advancedSearch, row)
filterCache = getFiltered(data.rows, range.advancedSearch, filterCache, filterCacheTerm, advancedSearchFilterFn)
filterCacheTerm = range.advancedSearch
sorted = undefined
else
filterFn = (row) -> options.filter(range.filter, row)
filterCache = getFiltered(data.rows, range.filter, filterCache, filterCacheTerm, filterFn)
filterCacheTerm = range.filter
sorted = undefined
if sorted is undefined or sortCacheTerm.column isnt range.sort?.column or sortCacheTerm.direction isnt range.sort?.direction
sorted = if range.sort and range.sort.column
direction = if range.sort.direction is 'asc' then 1 else -1
column = range.sort.column
filterCache.sort (r1, r2) -> direction * options.compare(r1.cells[column], r2.cells[column])
filterCache
else filterCache
sortCacheTerm.column = range.sort?.column
sortCacheTerm.direction = range.sort?.direction
cb({rows: sorted[range.start..range.end], filteredCount: sorted.length})
rowsForIds: (ids, lookupRow, cb) ->
if rowsByIdMap is undefined
rowsByIdMap = {}
for row in data.rows
rowsByIdMap[lookupRow(row)] = row
cb(rowsByIdMap[id] for id in ids)
}
# XXX Deprecated: alongside request
urlFeed = (url, options) ->
#XXX: when new calls come in, ignore the ongoing request if there is one / cancel the request if possible
options = merge({
extra: undefined,
cache: false
}, options)
# creates a function that might perform caching, depending on the options.cache value
maybeCached = (fetcher) ->
if options.cache
value = undefined
(cb) ->
if value
cb(value)
else
fetcher (res) ->
value = res
cb(value)
else
(cb) -> fetcher(cb)
jsonCallback = (cb) ->
(err, value) ->
logger.warn(err) if err
cb(value)
{
url: url # for debugging
headers: maybeCached (cb) ->
json url, { type: 'headers', extra: options.extra }, jsonCallback(cb)
totalCount: maybeCached (cb) ->
json url, { type: 'totalCount', extra: options.extra }, (err, res) ->
jsonCallback(cb)(err, res.count)
rows: (range, cb) ->
json url, { type: 'rows', range: range, extra: options.extra }, jsonCallback(cb)
rowsForIds: (ids, lookupRow, cb) ->
json url, { type: 'rowsForIds', ids: ids, extra: options.extra }, jsonCallback(cb)
}
dataTable = (options) ->
selection = div()
dt = new DataTable(selection, options)
if options and options.feed then dt.render()
selection
dataTable.objectFeed = objectFeed
dataTable.urlFeed = urlFeed
export {
dataTable,
DataTable,
objectFeed,
urlFeed,
getAdvancedSearchFilter,
}
|
[
{
"context": "st (empty for new DCO)\n\t\tdelegateDraft = {name: 1, firstName: '', lastName: '', email: ''}\n\t\tscope.delegates =",
"end": 716,
"score": 0.8055120706558228,
"start": 707,
"tag": "NAME",
"value": "firstName"
},
{
"context": "ew DCO)\n\t\tdelegateDraft = {name: 1, firs... | js/directives/sidebar/dco/da-sidebar-dco-manage.coffee | SwarmCorp/razzledazzle | 0 | window.app.directive 'daSidebarDcoManage', ($timeout, $location, Project, User) ->
restrict: 'A'
templateUrl: 'partials/app/blocks/dco/sidebar-dco-manage.html'
replace: true
link: (scope) ->
scope.activeSection = 'basic'
# Draft object for delegates list (empty for new DCO)
if !scope.editMode
Project.getList()
.then (data)->
scope.loaderTrigger(false)
scope.projects = data
scope.projectData = if scope.projectData then scope.projectData else {}
scope.projectData['project_contract'] = 'https://docs.google.com/a/swarmcorp.com/document/d/1JoLaDf7jRAxYNwhG6avmKvD5euTWAesSyb8g8xuzHLI/edit'
# Draft object for delegates list (empty for new DCO)
delegateDraft = {name: 1, firstName: '', lastName: '', email: ''}
scope.delegates = if scope.projectData.delegates then scope.projectData.delegates else [angular.copy delegateDraft]
scope.addDelegate = ->
delegatesLength = scope.delegates.length
newDelegate = angular.copy delegateDraft
newDelegate.name += delegatesLength
scope.delegates.push newDelegate
scope.removeDelegate = (delegateIndex)->
scope.delegates.splice delegateIndex, 1
scope.createProject = ()->
basicData = scope.form.dco.basic
publicData = scope.form.dco.public
basicData.$setSubmitted()
publicData.$setSubmitted()
if basicData.$valid
scope.loaderTrigger(true)
scope.projectData['project_id'] = scope.projectData.project_name
scope.projectData['project_owner'] = User.info.id
scope.projectData['project_delegates'] = []
scope.projectData['project_contract'] = addURLProtocol scope.projectData.project_contract
scope.projectData['project_budget'] = addURLProtocol scope.projectData.project_budget
for delegate in scope.delegates
newObj = {}
newObj.first_name = delegate.firstName
newObj.last_name = delegate.lastName
newObj.email = delegate.email
scope.projectData['project_delegates'].push newObj
Project.createDCO scope.projectData.project_id, scope.projectData
.then ()->
scope.getDCO(true)
.then ->
scope.loaderTrigger(false)
scope.cancelProjectCreation()
$location.path('projects/'+scope.projectData.project_id)
scope.saveProject = ()->
basicData = scope.form.dco.basic
basicData.$setSubmitted()
if basicData.$valid
scope.loaderTrigger(true)
scope.projectData['project_contract'] = addURLProtocol scope.projectData.project_contract
scope.projectData['project_budget'] = addURLProtocol scope.projectData.project_budget
scope.projectData['project_delegates'] = []
for delegate in scope.delegates
newObj = {}
newObj.first_name = delegate.firstName
newObj.last_name = delegate.lastName
newObj.email = delegate.email
scope.projectData['project_delegates'].push newObj
Project.updateDCO scope.projectData.$id, scope.projectData
.then ()->
scope.loaderTrigger(false)
scope.cancelProjectEdit()
$location.path('projects/'+scope.projectData.project_id)
scope.updateLogo = (file)->
scope.projectData['project_logo'] = file[0].url
scope.updateCover = (file)->
scope.projectData['project_cover'] = file[0].url
scope.cancelProjectEdit = ()->
scope.editModeTrigger(false)
scope.projectCleanup()
scope.cancelProjectCreation = ->
scope.createModeTrigger(false)
scope.projectCleanup()
scope.switchSection = (section)->
scope.activeSection = section
scope.switchPublishedState = ->
if !scope.publicInfoFormValid
return scope.projectData['project_published'] = false
if !angular.isDefined(scope.projectData['project_published']) then scope.projectData['project_published'] = false
scope.projectData['project_published'] = !scope.projectData['project_published']
scope.$watch (-> scope.form.dco.basic.$valid), (formState)->
scope.basicInfoFormValid = formState
scope.$watch (-> scope.form.dco.public.$valid), (formState)->
scope.publicInfoFormValid = formState
addURLProtocol = (url)->
if !url then return null
if url.search(/^http[s]?\:\/\//) == -1
url = 'http://'+url
return url | 205467 | window.app.directive 'daSidebarDcoManage', ($timeout, $location, Project, User) ->
restrict: 'A'
templateUrl: 'partials/app/blocks/dco/sidebar-dco-manage.html'
replace: true
link: (scope) ->
scope.activeSection = 'basic'
# Draft object for delegates list (empty for new DCO)
if !scope.editMode
Project.getList()
.then (data)->
scope.loaderTrigger(false)
scope.projects = data
scope.projectData = if scope.projectData then scope.projectData else {}
scope.projectData['project_contract'] = 'https://docs.google.com/a/swarmcorp.com/document/d/1JoLaDf7jRAxYNwhG6avmKvD5euTWAesSyb8g8xuzHLI/edit'
# Draft object for delegates list (empty for new DCO)
delegateDraft = {name: 1, <NAME>: '', <NAME>: '', email: ''}
scope.delegates = if scope.projectData.delegates then scope.projectData.delegates else [angular.copy delegateDraft]
scope.addDelegate = ->
delegatesLength = scope.delegates.length
newDelegate = angular.copy delegateDraft
newDelegate.name += delegatesLength
scope.delegates.push newDelegate
scope.removeDelegate = (delegateIndex)->
scope.delegates.splice delegateIndex, 1
scope.createProject = ()->
basicData = scope.form.dco.basic
publicData = scope.form.dco.public
basicData.$setSubmitted()
publicData.$setSubmitted()
if basicData.$valid
scope.loaderTrigger(true)
scope.projectData['project_id'] = scope.projectData.project_name
scope.projectData['project_owner'] = User.info.id
scope.projectData['project_delegates'] = []
scope.projectData['project_contract'] = addURLProtocol scope.projectData.project_contract
scope.projectData['project_budget'] = addURLProtocol scope.projectData.project_budget
for delegate in scope.delegates
newObj = {}
newObj.first_name = delegate.firstName
newObj.last_name = delegate.lastName
newObj.email = delegate.email
scope.projectData['project_delegates'].push newObj
Project.createDCO scope.projectData.project_id, scope.projectData
.then ()->
scope.getDCO(true)
.then ->
scope.loaderTrigger(false)
scope.cancelProjectCreation()
$location.path('projects/'+scope.projectData.project_id)
scope.saveProject = ()->
basicData = scope.form.dco.basic
basicData.$setSubmitted()
if basicData.$valid
scope.loaderTrigger(true)
scope.projectData['project_contract'] = addURLProtocol scope.projectData.project_contract
scope.projectData['project_budget'] = addURLProtocol scope.projectData.project_budget
scope.projectData['project_delegates'] = []
for delegate in scope.delegates
newObj = {}
newObj.first_name = delegate.firstName
newObj.last_name = delegate.lastName
newObj.email = delegate.email
scope.projectData['project_delegates'].push newObj
Project.updateDCO scope.projectData.$id, scope.projectData
.then ()->
scope.loaderTrigger(false)
scope.cancelProjectEdit()
$location.path('projects/'+scope.projectData.project_id)
scope.updateLogo = (file)->
scope.projectData['project_logo'] = file[0].url
scope.updateCover = (file)->
scope.projectData['project_cover'] = file[0].url
scope.cancelProjectEdit = ()->
scope.editModeTrigger(false)
scope.projectCleanup()
scope.cancelProjectCreation = ->
scope.createModeTrigger(false)
scope.projectCleanup()
scope.switchSection = (section)->
scope.activeSection = section
scope.switchPublishedState = ->
if !scope.publicInfoFormValid
return scope.projectData['project_published'] = false
if !angular.isDefined(scope.projectData['project_published']) then scope.projectData['project_published'] = false
scope.projectData['project_published'] = !scope.projectData['project_published']
scope.$watch (-> scope.form.dco.basic.$valid), (formState)->
scope.basicInfoFormValid = formState
scope.$watch (-> scope.form.dco.public.$valid), (formState)->
scope.publicInfoFormValid = formState
addURLProtocol = (url)->
if !url then return null
if url.search(/^http[s]?\:\/\//) == -1
url = 'http://'+url
return url | true | window.app.directive 'daSidebarDcoManage', ($timeout, $location, Project, User) ->
restrict: 'A'
templateUrl: 'partials/app/blocks/dco/sidebar-dco-manage.html'
replace: true
link: (scope) ->
scope.activeSection = 'basic'
# Draft object for delegates list (empty for new DCO)
if !scope.editMode
Project.getList()
.then (data)->
scope.loaderTrigger(false)
scope.projects = data
scope.projectData = if scope.projectData then scope.projectData else {}
scope.projectData['project_contract'] = 'https://docs.google.com/a/swarmcorp.com/document/d/1JoLaDf7jRAxYNwhG6avmKvD5euTWAesSyb8g8xuzHLI/edit'
# Draft object for delegates list (empty for new DCO)
delegateDraft = {name: 1, PI:NAME:<NAME>END_PI: '', PI:NAME:<NAME>END_PI: '', email: ''}
scope.delegates = if scope.projectData.delegates then scope.projectData.delegates else [angular.copy delegateDraft]
scope.addDelegate = ->
delegatesLength = scope.delegates.length
newDelegate = angular.copy delegateDraft
newDelegate.name += delegatesLength
scope.delegates.push newDelegate
scope.removeDelegate = (delegateIndex)->
scope.delegates.splice delegateIndex, 1
scope.createProject = ()->
basicData = scope.form.dco.basic
publicData = scope.form.dco.public
basicData.$setSubmitted()
publicData.$setSubmitted()
if basicData.$valid
scope.loaderTrigger(true)
scope.projectData['project_id'] = scope.projectData.project_name
scope.projectData['project_owner'] = User.info.id
scope.projectData['project_delegates'] = []
scope.projectData['project_contract'] = addURLProtocol scope.projectData.project_contract
scope.projectData['project_budget'] = addURLProtocol scope.projectData.project_budget
for delegate in scope.delegates
newObj = {}
newObj.first_name = delegate.firstName
newObj.last_name = delegate.lastName
newObj.email = delegate.email
scope.projectData['project_delegates'].push newObj
Project.createDCO scope.projectData.project_id, scope.projectData
.then ()->
scope.getDCO(true)
.then ->
scope.loaderTrigger(false)
scope.cancelProjectCreation()
$location.path('projects/'+scope.projectData.project_id)
scope.saveProject = ()->
basicData = scope.form.dco.basic
basicData.$setSubmitted()
if basicData.$valid
scope.loaderTrigger(true)
scope.projectData['project_contract'] = addURLProtocol scope.projectData.project_contract
scope.projectData['project_budget'] = addURLProtocol scope.projectData.project_budget
scope.projectData['project_delegates'] = []
for delegate in scope.delegates
newObj = {}
newObj.first_name = delegate.firstName
newObj.last_name = delegate.lastName
newObj.email = delegate.email
scope.projectData['project_delegates'].push newObj
Project.updateDCO scope.projectData.$id, scope.projectData
.then ()->
scope.loaderTrigger(false)
scope.cancelProjectEdit()
$location.path('projects/'+scope.projectData.project_id)
scope.updateLogo = (file)->
scope.projectData['project_logo'] = file[0].url
scope.updateCover = (file)->
scope.projectData['project_cover'] = file[0].url
scope.cancelProjectEdit = ()->
scope.editModeTrigger(false)
scope.projectCleanup()
scope.cancelProjectCreation = ->
scope.createModeTrigger(false)
scope.projectCleanup()
scope.switchSection = (section)->
scope.activeSection = section
scope.switchPublishedState = ->
if !scope.publicInfoFormValid
return scope.projectData['project_published'] = false
if !angular.isDefined(scope.projectData['project_published']) then scope.projectData['project_published'] = false
scope.projectData['project_published'] = !scope.projectData['project_published']
scope.$watch (-> scope.form.dco.basic.$valid), (formState)->
scope.basicInfoFormValid = formState
scope.$watch (-> scope.form.dco.public.$valid), (formState)->
scope.publicInfoFormValid = formState
addURLProtocol = (url)->
if !url then return null
if url.search(/^http[s]?\:\/\//) == -1
url = 'http://'+url
return url |
[
{
"context": " doc.value.password = '***************'\n documents.push doc.value",
"end": 2504,
"score": 0.9984261393547058,
"start": 2504,
"tag": "PASSWORD",
"value": ""
}
] | server/lib/searchEngine.coffee | frankrousseau/cozy-databrowser | 0 | CoreClass = require './../helpers/CoreClass'
#********************************************************
#******************** CLASS SearchEngine ****************
#********************************************************
#@description : used to perform search with the cozy databrowser
class SearchEngine extends CoreClass
#------------------ CONSTRUCTOR CONSTANTS ----------------
@CLASS_NAME : "SearchEngine"
#------------------ PROTOTYPE CONSTANTS ----------------
#required dependencies
ASYNC = require 'async'
#----------------- OBJECT PARAMETERS ---------------
constructor : (@dataSystem) ->
@path = @dataSystem.PATH
#-------------- OBJECT METHODS ----------------------
search : (res, doctypes, pageParams) ->
requests = []
path =
requests.push (callback) => #0 -> metadoctypes
@dataSystem.getView @path.metadoctype.getallbyrelated, callback
#one request per doctype
#reqCount = 0
#for dt in req.query.doctype
requests.push (callback) => #1 to n -> requests
doctypeName = doctypes[0].toLowerCase()
if pageParams['query']?
searchPath = @path.search + doctypeName
@dataSystem.getView searchPath, callback, pageParams
else
requestPath = @path.request + doctypeName + @path.all
@dataSystem.getView requestPath, callback, pageParams
#reqCount++
ASYNC.parallel requests, (error, results) =>
documents = []
if error
res.send {'no_result' : @dataSystem.ERR_MSG.retrieveData}
@_logErrorInConsole error
else
#for dt in req.query.doctype
doctypeName = doctypes[0].toLowerCase()
newFields = @prepareMetadoctypeInfo results[0], doctypeName
#for result, index in results
#if index > 0
for doc in results[1]
if doc.key? and doc.value?
doctype = doc.value.docType.toLowerCase()
doc.value.idField = newFields.idField[doctype]
doc.value.descField = newFields.descField[doctype]
displayedName = newFields.displayName[doctype]
doc.value.displayName = displayedName
if doc.value.password?
doc.value.password = '***************'
documents.push doc.value
res.send(documents)
prepareMetadoctypeInfo : (metadoctypes, currentDoctype) ->
newFields =
idField : []
descField : []
displayName : []
for metadoctype in metadoctypes
if metadoctype.key?
identifier = metadoctype.value.identificationField || null
displayName = metadoctype.value.displayName|| null
key = metadoctype.key.toLowerCase() || null
if displayName? and key is currentDoctype
newFields.displayName[currentDoctype] = displayName
if identifier? and key is currentDoctype
newFields.idField[currentDoctype] = identifier
if metadoctype.value.fields?
descripter = metadoctype.value.fields
newFields.descField[currentDoctype] = descripter
return newFields
#********************************************************
module.exports = (param) -> return new SearchEngine param
| 53671 | CoreClass = require './../helpers/CoreClass'
#********************************************************
#******************** CLASS SearchEngine ****************
#********************************************************
#@description : used to perform search with the cozy databrowser
class SearchEngine extends CoreClass
#------------------ CONSTRUCTOR CONSTANTS ----------------
@CLASS_NAME : "SearchEngine"
#------------------ PROTOTYPE CONSTANTS ----------------
#required dependencies
ASYNC = require 'async'
#----------------- OBJECT PARAMETERS ---------------
constructor : (@dataSystem) ->
@path = @dataSystem.PATH
#-------------- OBJECT METHODS ----------------------
search : (res, doctypes, pageParams) ->
requests = []
path =
requests.push (callback) => #0 -> metadoctypes
@dataSystem.getView @path.metadoctype.getallbyrelated, callback
#one request per doctype
#reqCount = 0
#for dt in req.query.doctype
requests.push (callback) => #1 to n -> requests
doctypeName = doctypes[0].toLowerCase()
if pageParams['query']?
searchPath = @path.search + doctypeName
@dataSystem.getView searchPath, callback, pageParams
else
requestPath = @path.request + doctypeName + @path.all
@dataSystem.getView requestPath, callback, pageParams
#reqCount++
ASYNC.parallel requests, (error, results) =>
documents = []
if error
res.send {'no_result' : @dataSystem.ERR_MSG.retrieveData}
@_logErrorInConsole error
else
#for dt in req.query.doctype
doctypeName = doctypes[0].toLowerCase()
newFields = @prepareMetadoctypeInfo results[0], doctypeName
#for result, index in results
#if index > 0
for doc in results[1]
if doc.key? and doc.value?
doctype = doc.value.docType.toLowerCase()
doc.value.idField = newFields.idField[doctype]
doc.value.descField = newFields.descField[doctype]
displayedName = newFields.displayName[doctype]
doc.value.displayName = displayedName
if doc.value.password?
doc.value.password = '<PASSWORD>***************'
documents.push doc.value
res.send(documents)
prepareMetadoctypeInfo : (metadoctypes, currentDoctype) ->
newFields =
idField : []
descField : []
displayName : []
for metadoctype in metadoctypes
if metadoctype.key?
identifier = metadoctype.value.identificationField || null
displayName = metadoctype.value.displayName|| null
key = metadoctype.key.toLowerCase() || null
if displayName? and key is currentDoctype
newFields.displayName[currentDoctype] = displayName
if identifier? and key is currentDoctype
newFields.idField[currentDoctype] = identifier
if metadoctype.value.fields?
descripter = metadoctype.value.fields
newFields.descField[currentDoctype] = descripter
return newFields
#********************************************************
module.exports = (param) -> return new SearchEngine param
| true | CoreClass = require './../helpers/CoreClass'
#********************************************************
#******************** CLASS SearchEngine ****************
#********************************************************
#@description : used to perform search with the cozy databrowser
class SearchEngine extends CoreClass
#------------------ CONSTRUCTOR CONSTANTS ----------------
@CLASS_NAME : "SearchEngine"
#------------------ PROTOTYPE CONSTANTS ----------------
#required dependencies
ASYNC = require 'async'
#----------------- OBJECT PARAMETERS ---------------
constructor : (@dataSystem) ->
@path = @dataSystem.PATH
#-------------- OBJECT METHODS ----------------------
search : (res, doctypes, pageParams) ->
requests = []
path =
requests.push (callback) => #0 -> metadoctypes
@dataSystem.getView @path.metadoctype.getallbyrelated, callback
#one request per doctype
#reqCount = 0
#for dt in req.query.doctype
requests.push (callback) => #1 to n -> requests
doctypeName = doctypes[0].toLowerCase()
if pageParams['query']?
searchPath = @path.search + doctypeName
@dataSystem.getView searchPath, callback, pageParams
else
requestPath = @path.request + doctypeName + @path.all
@dataSystem.getView requestPath, callback, pageParams
#reqCount++
ASYNC.parallel requests, (error, results) =>
documents = []
if error
res.send {'no_result' : @dataSystem.ERR_MSG.retrieveData}
@_logErrorInConsole error
else
#for dt in req.query.doctype
doctypeName = doctypes[0].toLowerCase()
newFields = @prepareMetadoctypeInfo results[0], doctypeName
#for result, index in results
#if index > 0
for doc in results[1]
if doc.key? and doc.value?
doctype = doc.value.docType.toLowerCase()
doc.value.idField = newFields.idField[doctype]
doc.value.descField = newFields.descField[doctype]
displayedName = newFields.displayName[doctype]
doc.value.displayName = displayedName
if doc.value.password?
doc.value.password = 'PI:PASSWORD:<PASSWORD>END_PI***************'
documents.push doc.value
res.send(documents)
prepareMetadoctypeInfo : (metadoctypes, currentDoctype) ->
newFields =
idField : []
descField : []
displayName : []
for metadoctype in metadoctypes
if metadoctype.key?
identifier = metadoctype.value.identificationField || null
displayName = metadoctype.value.displayName|| null
key = metadoctype.key.toLowerCase() || null
if displayName? and key is currentDoctype
newFields.displayName[currentDoctype] = displayName
if identifier? and key is currentDoctype
newFields.idField[currentDoctype] = identifier
if metadoctype.value.fields?
descripter = metadoctype.value.fields
newFields.descField[currentDoctype] = descripter
return newFields
#********************************************************
module.exports = (param) -> return new SearchEngine param
|
[
{
"context": " EMP_NODE_COOKIE : 'ewpcool'\n\n bash_path_key:'emp-debugger.path'\n\n EMP_MAKE_CMD_KEY: 'emp-debugger.emp-make'\n E",
"end": 2146,
"score": 0.8480503559112549,
"start": 2133,
"tag": "KEY",
"value": "debugger.path"
},
{
"context": "-debugger.emp-tmp-app-name'\n EMP_... | lib/exports/emp.coffee | jcrom/emp-debugger | 2 | # macro defined
fs = require 'fs'
path = require 'path'
os = require 'os'
remote = require 'remote'
dialog = remote.Dialog
module.exports =
parser_beam_file_mod : 'atom_pl_parse_json'
# ----------------------------- Atom Config --------------------------------
EMP_APP_EXPORT_UI_PATH :'emp-template-management.Store-UI-Snippet-Export-Path'
EMP_APP_IMPORT_UI_PATH :'emp-template-management.Store-UI-Snippet-Import-Path'
EMP_APP_STORE_UI_PATH :'emp-template-management.Store-UI-Snippet-Path'
EMP_APP_WIZARD_APP_P :'emp-debugger.Default-App-Wizard-App-Path'
EMP_APP_WIZARD_EWP_P :'emp-debugger.Default-App-Wizard-Ewp-Path'
EMP_TEMP_WIZARD_NAME :'emp-debugger.Default-Template-App-Name'
EMP_TEMP_WIZARD_PATH :'emp-debugger.Default-Template-App-Path'
EMP_TEMP_WIZARD_PORT :'emp-debugger.Default-Template-App-Port'
EMP_TEMP_WIZARD_APORT :'emp-debugger.Default-Template-App-APort'
EMP_LINK_UNIGNORE_CONF :'emp-debugger.EMP-Link-Unignore'
EMP_LOG_LINE_LIMIT :'emp-debugger.defLimitOfLogLine'
EMP_LOG_LINE_LIMIT_SELECTED :'emp-debugger.defLimitOfLogLineSelected'
EMP_LOG_LEVEL_SELECTED :'emp-debugger.defLogLevelSelected'
EMP_ERL_SOURCE_PATH: 'emp-debugger.erl_source_path'
EMP_EWP_SOURE_PATH: 'emp-debugger.ewp_source_path'
EMP_YAWS_SOURCE_PATH: 'emp-debugger.yaws_source_path'
EMP_ERL_INDENT_TAB_LEN: 'emp-debugger.defErlangIndentTabLength'
EMP_ERL_INDENT_USE_TAB: 'emp-debugger.defErlangIndentUseTab'
EMP_FILTER_FLAG:"emp-debugger.defLogFilterFlag"
EMP_DEF_API_DATA:"emp-debugger.defAPIData"
EMP_LOG_SHOW_FIND_RESULT:'emp-debugger.defOnlyShowFindedResult'
EMP_LOG_SCROLL_TO_BOTTOM:'emp-debugger.defScrollToBottom'
EMP_LOG_TIMER:'emp-debugger.defLogTimer'
DEFAULT_LESS_NAME:'untitled.less'
DEFAULT_OUT_LESS_PATH:'../css/untitled.css'
EMP_DEF_LINE_LIMIT_SELECTED : 1000
EMP_DEF_LOG_LINE_LIMIT:[500, 1000, 2000, 5000, 10000]
EMP_DEF_LOG_TYPE:"lua"
EMP_DEF_CLIENT:"All"
EMP_NODE_NAME :'emp-debugger.Default-EMP-NODE-NAME'
EMP_NODE_COOKIE :'emp-debugger.Default-EMP-NODE-COOKIE'
EMP_NODE_NAME : 'ebank@localhost'
EMP_NODE_COOKIE : 'ewpcool'
bash_path_key:'emp-debugger.path'
EMP_MAKE_CMD_KEY: 'emp-debugger.emp-make'
EMP_STAET_SCRIPT_KEY: 'emp-debugger.emp-start-script'
EMP_STAET_FRONT_SCRIPT_KEY: 'emp-debugger.emp-start-front-script'
EMP_CONFIG_KEY: 'emp-debugger.emp-config'
EMP_CONFIG_ARG_KEY: 'emp-debugger.emp-config-arg'
EMP_IMPORT_MENU_KEY : 'emp-debugger.emp-import-menu'
EMP_TMPORARY_APP_NAME:'emp-debugger.emp-tmp-app-name'
EMP_CMAKE_KEY : 'emp-debugger.emp-c_app'
EMP_LESS_IMPORT_FILES:'emp-debugger.emp-less-import-files'
EMP_LOG_GLOBAL_COLOR:'emp-debugger.emp-log-global-color'
EMP_OFFLINE_DIR: 'emp-debugger.emp-offline-dev-path'
EMP_OFFLINE_RELATE_DIR: 'emp-debugger.emp-offline-relate-path'
EMP_OFFLINE_RELATE_PATH_V: "public/www/resource_dev"
EMP_VIEW_FILTER_IGNORE:["*.json", "*.lua", "*.png", "*.jpg", "*.css", "*.js"]
EMP_SCRIPT_FILTER_IGNORE:["*.json", "*.xhtml", "*.png", "*.jpg", "*.css"]
EMP_CHANNEL_ADAPTER_PLAT:'emp-debugger.emp-channel-adapter-platform'
EMP_CHANNEL_ADAPTER_RES:'emp-debugger.emp-channel-adapter-resolution'
OS_DARWIN:'darwin'
OS_LINUX:'linux'
OS_PATH:'PATH'
COL_KEY:"collections"
CHA_KEY:"channels"
# front template macro
DEF_APP_FILE:'.app'
DEF_PORT_FILE:'.port'
DEF_APORT_FILE:'.aport'
COL_ROOT_TYPE:1
COL_CH_TYPE:0
ITEM_CHA_TYPE:1
ITEM_COL_TYPE:0
# channel 回调类型
CHANNEL_ADAPTER:'channel_adapter'
CHANNEL_NEW_CALLBACK: 'new_callback'
CHANNEL_CALLBACK: 'channel_callback'
CHANNEL_PROXY: 'channel_proxy'
EMP_CHANNEL_URI : 'emp://wizard'
EMP_APP_URI : 'emp://app_wizard'
EMP_TEMP_URI : 'emp://template_wizard'
EMP_FRONT_PAGE_URI : 'emp://front_page_wizard'
EMP_CONFIG_URI : 'emp://emp_config'
EMP_API_DEBUG_URI : 'emp://emp_api_debug'
CHA_WIZARD_VIEW: 'EmpView'
APP_WIZARD_VIEW: 'EmpAppView'
TEMP_WIZARD_VIEW: 'EmpTemplateView'
FRONT_PAGE_WIZARD_VIEW: 'EmpFrontPageView'
EMP_CONFIG_VIEW : 'EmpConfigView'
EMP_API_VIEW: 'EMPAPIView'
CHA_CODE_DIR:'src'
CHA_PUBLIC_DIR:'public'
CHA_FRONT_MENU_DIR:'public/menu'
CHA_FRONT_VITUAL_COL:'virtual_collection'
# channel adapter 代码生成过程中的取值类型,
# 分别为 params, arg, session
ADAPTER_ARG_M_P:'param'
ADAPTER_ARG_M_A:'arg'
ADAPTER_ARG_M_S:'session'
# 离线资源平台
ADAPTER_PLT_D: 'common'
ADAPTER_PLT_I: 'iphone'
ADAPTER_PLT_A: 'android'
ADAPTER_PLT_W: 'wphone'
# 离线资源分辨率
ADAPTER_PLT_R: 'default'
ADAPTER_PLT_R1: '320-480'
ADAPTER_PLT_R2: '640-960'
ADAPTER_PLT_R3: '640-1136'
ADAPTER_PLT_R4: '750-1334'
ADAPTER_PLT_R5: '768-1024'
ADAPTER_PLT_R6: '1080-1920'
ADAPTER_PLT_R7: '1536-2048'
# 普通离线资源包,头名称
ADAPTER_PACKAGE_HEAD: 'package'
ADAPTER_UNION_PACKAGE_CHEAD:"batch-normal__"
ADAPTER_UNION_PACKAGE_NAME: "batch-normal-package.zip"
DEFAULT_ZIP_FULE_NAME:"default.zip"
# channel 插件包相关定义
PACKAGE_EXTENSION_BEAM:".beam"
PACKAGE_EXTENSION_BEAM_TYPE:"beam"
PACKAGE_CHANNEL_EBIN_DIR: "ebin"
PACKAGE_CHANNEL_CS_DIR: "public/cs/channels"
PACKAGE_NORMAL_CHANNEL:"normal_channel"
PACKAGE_SPEC:"SPEC"
PACKAGE_CHECKSUM:"CHECKSUM"
# channel 管理页面分页名称
GEN_VIEW:'gen_view'
ADD_CHA_VIEW:'add_cha_view'
ADD_COL_VIEW:'add_col_view'
ATOM_CONF_CHANNEL_DIR_KEY:'emp-debugger.Channel-config-file'
ATOM_CONF_CHANNEL_DIR_DEFAULT:'/config/channel.conf'
ATOM_EMP_APGE_ENTRANCE:'/public/www/entrance.xhtml'
EMP_ENTRANCE_FIRST_ID: '${first_cha_id}'
EMP_ENTRANCE_FIRST_TRANCODE: '${first_tran_code}'
# EMP_ENTRANCE_NEXT_ID: '${next_cha_id}'
EMP_ENTRANCE_NEXT_TRANCODE: '${next_tran_code}'
# adapter template
STATIC_TEMPLATE_DIR:"/templates/"
STATIC_API_DIR:"/templates/api/api_desc.json"
STATIC_APP_TEMPLATE:"/templates/app/"
STATIC_DEF_APP_TEMPLATE:"/templates/app/5.3"
STATIC_CHANNEL_TEMPLATE:"/templates/channel/"
CHANNEL_ADAPTER_DIR:'adapter'
CHANNEL_NEW_CALLBACK_DIR: 'new_callback'
CHANNEL_CALLBACK_DIR: 'callback'
# STATIC_UI_CSS_TEMPLATE_PATH:"/templates/css/"
STATIC_UI_CSS_TEMPLATE:"/templates/css/eui.css"
STATIC_UI_LESS_TEMPLATE: "/templates/less/ui-variables.less"
STATIC_UI_CSS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/css/eui.css"
STATIC_UI_CSS_TEMPLATE_DEST_DIR:"public/www/resource_dev/common/css/"
STATIC_UI_LESS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/less/ui-variables.less"
STATIC_UI_LESS_TEMPLATE_DEST_DIR:"public/www/resource_dev/common/less/"
STATIC_UI_CSS_DEF_FILE:"eui.css"
STATIC_UI_LUA_TEMPLATE:"/templates/lua/ert.lua"
STATIC_UI_LUA_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/lua/ert.lua"
STATIC_UI_LUA_PATH:"/templates/lua/"
STATIC_UI_LUA_DEST_PATH:"public/www/resource_dev/common/lua/"
STATIC_UI_JS_TEMPLATE:"/templates/js/"
STATIC_UI_JS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/js/"
STATIC_MOB_HTML_PATH:"/templates/mobile/"
STATIC_MOB_HTML_TEMPLATE:"/templates/mobile/html/default.html"
NATIVE_CHANNEL_DEFAULT_STYLE:"/templates/css/atom_default_style.css"
DESTINATION_CHANNEL_DEFAULT_STYLE:"public/www/resource_dev/common/css/atom_default_style.css"
STATIC_ERL_TEMPLATE : "/channel_adapter_erl_template.tmp"
STATIC_ERL_FUN_TEMPLATE : "/channel_adapter_erl_function.tmp"
STATIC_CS_TEMPLATE : "/channel_adapter_cs_template.tmp"
STATIC_OFF_TEMPLATE : "/channel_adapter_xHtml_template.tmp"
STATIC_CSS_TEMPLATE : "/channel_adapter_css_template.tmp"
STATIC_LUA_TEMPLATE : "/channel_adapter_lua_template.tmp"
STATIC_LESS_TEMPLATE : "/channel_less_template.tmp"
STATIC_WEBVIEW_TEMPLATE : "/channel_webview_template.tmp"
STATIC_APP_FRONT_TEMP:"/templates/temp_app/"
COMMON_DIR_LIST :["images", "css", "lua", "xhtml","channels"]
OFF_CHA_DIR_LIST : ["xhtml", "css", "lua", "images", "json", "less"]
OFF_CHA_PLT_LIST:["wp", "iphone", "android", "common"]
OFF_BASE_DIR_LIST:["default"]
OFF_DEFAULT_BASE:"channels"
OFF_COMMON_BASE:"default"
OFF_COMMON_HTML: "webview"
OFF_WEBVIEW_DEF_APP: "app"
OFF_STORE_HTML_PATH: "module"
OFF_HTML_LIST:["js", "css", "images", "html", "module", "fonts", "jsMobile"]
OFF_HTML_CHI_DIR_LIST : ["html", "css", "js", "json"]
OFF_EXTENSION_ERL: "erl"
OFF_EXTENSION_CS: "cs"
OFF_EXTENSION_JSON: "json"
OFF_EXTENSION_XHTML:"xhtml"
OFF_EXTENSION_LUA:"lua"
OFF_EXTENSION_CSS: "css"
OFF_EXTENSION_JS: "js"
OFF_EXTENSION_HTML:"html"
OFF_EXTENSION_LESS: "less"
# the type of emp step
EMP_ADD_CHA_VIEW_TYPE_EMP: 'emp'
EMP_ADD_CHA_VIEW_TYPE_HTML: 'html'
DEFAULT_COL_ITEM:'[{item_id,"$cha_id"},{item_type,$itype},{menu_order,$order}]'
DEFAULT_CHA_TMP:'\n{channels,[[{id,\"${channel}\"},\r\n'+
' {app,\"${app}\"},\r\n'+
' {name,"${name}"},\r\n'+
' {entry,channel_adapter},\r\n'+
' {views, ${views}},\r\n'+
' {props,${props}},\r\n'+
' {state,${state}}]'
ADAPTER_REQUEST_PARAMS_FORMAT:" {'$key', $value}"
ADAPTER_VARIABLE : " $var = $getter(\"$key\", []),\r\n"
REPLACE_GETTER : "\\$getter"
ADAPTER_REQUEST_PARAMS: "{'$key', $value}"
DEFAULT_EXT_LUA : '.lua'
DEFAULT_EXT_CSS : '.css'
DEFAULT_EXT_XHTML :'.xhtml'
DEFAULT_EXT_JS :'.js'
DEFAULT_EXT_JSON :'.json'
DEFAULT_EXT_ERL: '.erl'
# emp debugger 实体文件路径
DEFAULT_TEMP_HEADER:'<!--<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>-->\n'
DEFAULT_LUATEMP_HEADER:'--<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>--\n'
DEFAULT_CSSTEMP_HEADER:'/*<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>*/\n'
DEFAULT_HEADER:'<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>\n'
DEFAULT_HEADER_CON:'<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>'
# Less import 文件格式
EMP_LESS_IMPORT_HEADER:'@import \"${file_path}\";\n'
EMP_CSS_IMPORT_HEADER:'@import (inline) \"${file_path}\";\n'
# 提示信息
EMP_PACKAGE_UNION_PKG_SUCCESS:"普通资源整合包,打包成功~"
EMP_PACKAGE_PKG_SUCCESS:"普通资源包,打包成功~"
EMP_PACKAGE_UNION_PKG_DIR_ENOENT:"打包失败:没有离线资源相关文件~"
EMP_DEFAULT_FRONT_MSG:"{\r\n \"return\": {\r\n \"error_code\": \"000000\",\r\n \"error_msg\": \"\",\r\n
\"message\": \"This is a msg~~~~~\" \r\n }\r\n}"
TEMP_PACKAGE_NAME:"emp-template-management"
PACKAGE_NAME:"emp-debugger"
OFF_LINE_LINK_DIR:"public/www/resource_dev/"
LINK_PUBLICK_DIR:["public/www", "public/www/resource_dev/common/channels","public/www/resource_dev/common/css","public/www/resource_dev/common/lua", "public/www/resource_dev/common/images"]
get_pack_path: () ->
atom.packages.resolvePackagePath(this.PACKAGE_NAME)
get_temp_path: () ->
atom.packages.resolvePackagePath(this.TEMP_PACKAGE_NAME)
get_temp_emp_path: ->
pack_path = atom.packages.resolvePackagePath(this.TEMP_PACKAGE_NAME)
if pack_path
path.join(pack_path, "lib/exports/emp")
else
null
create_editor:(tmp_file_path, tmp_grammar, callback, content) ->
changeFocus = true
tmp_editor = atom.workspace.open(tmp_file_path, { changeFocus }).then (tmp_editor) =>
gramers = @getGrammars()
# console.log content
unless content is undefined
tmp_editor.setText(content) #unless !content
tmp_editor.setGrammar(gramers[0]) unless gramers[0] is undefined
callback(tmp_editor)
# set the opened editor grammar, default is HTML
getGrammars: (grammar_name)->
grammars = atom.grammars.getGrammars().filter (grammar) ->
(grammar isnt atom.grammars.nullGrammar) and
grammar.name is 'CoffeeScript'
grammars
get_project_path: ->
project_path_list = atom.project.getPaths()
project_path = project_path_list[0]
editor = atom.workspace.getActiveTextEditor()
if editor
# 判断 project 有多个的情况
efile_path = editor.getPath?()
if project_path_list.length > 1
for tmp_path in project_path_list
relate_path = path.relative tmp_path, efile_path
if relate_path.match(/^\.\..*/ig) isnt null
project_path = tmp_path
break
project_path
color_arr: ["#000033", "#000066", "#000099", "#0000CC", "#0000FF",
"#003300", "#003333", "#003366", "#003399", "#0033CC", "#0033FF",
"#006600", "#006633", "#006666", "#006699", "#0066CC", "#0066FF",
"#009900", "#009933", "#009966", "#009999", "#0099CC", "#0099FF",
"#00CC00", "#00CC33", "#00CC66", "#00CC99", "#00CCCC", "#00CCFF",
"#00FF00", "#00FF33", "#00FF66", "#00FF99", "#00FFCC", "#00FFFF",
"#330000", "#330033", "#330066", "#330099", "#3300CC", "#3300FF",
"#333300", "#333333", "#333366", "#333399", "#3333CC", "#3333FF",
"#336600", "#336633", "#336666", "#336699", "#3366CC", "#3366FF",
"#339900", "#339933", "#339966", "#339999", "#3399CC", "#3399FF",
"#33CC00", "#33CC33", "#33CC66", "#33CC99", "#33CCCC", "#33CCFF",
"#33FF00", "#33FF33", "#33FF66", "#33FF99", "#33FFCC", "#33FFFF",
"#660000", "#660033", "#660066", "#660099", "#6600CC", "#6600FF",
"#663300", "#663333", "#663366", "#663399", "#6633CC", "#6633FF",
"#666600", "#666633", "#666666", "#666699", "#6666CC", "#6666FF",
"#669900", "#669933", "#669966", "#669999", "#6699CC", "#6699FF",
"#66CC00", "#66CC33", "#66CC66", "#66CC99", "#66CCCC", "#66CCFF",
"#66FF00", "#66FF33", "#66FF66", "#66FF99", "#66FFCC", "#66FFFF",
"#990000", "#990033", "#990066", "#990099", "#9900CC", "#9900FF",
"#993300", "#993333", "#993366", "#993399", "#9933CC", "#9933FF",
"#996600", "#996633", "#996666", "#996699", "#9966CC", "#9966FF",
"#999900", "#999933", "#999966", "#999999", "#9999CC", "#9999FF",
"#99CC00", "#99CC33", "#99CC66", "#99CC99", "#99CCCC", "#99CCFF",
"#99FF00", "#99FF33", "#99FF66", "#99FF99", "#99FFCC", "#99FFFF",
"#CC0000", "#CC0033", "#CC0066", "#CC0099", "#CC00CC", "#CC00FF",
"#CC3300", "#CC3333", "#CC3366", "#CC3399", "#CC33CC", "#CC33FF",
"#CC6600", "#CC6633", "#CC6666", "#CC6699", "#CC66CC", "#CC66FF",
"#CC9900", "#CC9933", "#CC9966", "#CC9999", "#CC99CC", "#CC99FF",
"#CCCC00", "#CCCC33", "#CCCC66", "#CCCC99", "#CCCCCC", "#CCCCFF",
"#CCFF00", "#CCFF33", "#CCFF66", "#CCFF99", "#CCFFCC", "#CCFFFF",
"#FF0000", "#FF0033", "#FF0066", "#FF0099", "#FF00CC", "#FF00FF",
"#FF3300", "#FF3333", "#FF3366", "#FF3399", "#FF33CC", "#FF33FF",
"#FF6600", "#FF6633", "#FF6666", "#FF6699", "#FF66CC", "#FF66FF",
"#FF9900", "#FF9933", "#FF9966", "#FF9999", "#FF99CC", "#FF99FF",
"#FFCC00", "#FFCC33", "#FFCC66", "#FFCC99", "#FFCCCC", "#FFCCFF",
"#FFFF00", "#FFFF33", "#FFFF66", "#FFFF99", "#FFFFCC"]
get_color: ->
@color_arr[Math.floor(Math.random()* @color_arr.length)]
module.exports.mk_node_name = (node_name="") ->
default_name = " -sname "
tmp_re = node_name.split("@")
def_node_name = "atom_js" + Math.round(Math.random()*100)
def_host = " "
if tmp_re.length >1
# console.log "node name has HOST~"
if valid_ip(tmp_re[1])
default_name = " -name "
def_host = get_def_host()
def_node_name = def_node_name + "@" +def_host
# console.log def_host
re_name = default_name + def_node_name
{name:def_node_name, node_name: re_name}
module.exports.mk_rand = (iLen=6)->
unless iLen <= 0
iAtomP = Math.pow 10, iLen
iRand = Math.round(Math.random()*iAtomP)
if iRand > (iAtomP/10)
return iRand
else
fix_rand(iRand, iAtomP)
fix_rand = (iRand, iAtomP) ->
if iRand > (iAtomP/10)
return iRand
else
fix_rand(iRand*10, iAtomP)
get_def_host = ->
add_list = os.networkInterfaces()
tmp_address = ''
for key,val of add_list
# console.log val
for tmp_obj in val
if !tmp_obj.internal and tmp_obj.family is 'IPv4'
tmp_address = tmp_obj.address
break
tmp_address
module.exports.show_error = (err_msg) ->
atom.confirm
message:"Error"
detailedMessage:err_msg
buttons:["Ok"]
module.exports.show_warnning = (warn_msg) ->
atom.confirm
message:"Warnning"
detailedMessage:warn_msg
buttons:["Ok"]
module.exports.show_info = (info_msg) ->
atom.confirm
message:"Info"
detailedMessage:info_msg
buttons:["Ok"]
module.exports.self_info = (title_msg, detail_msg) ->
atom.confirm
message:title_msg
detailedMessage:detail_msg
buttons:["Ok"]
module.exports.isEmpty = (obj) ->
for key,name of obj
false;
true;
module.exports.get_emp_os = () ->
tmp_os = os.platform().toLowerCase()
if atom.project
if !atom.project.emp_os
atom.project.emp_os = tmp_os
atom.project.emp_os
else
tmp_os
module.exports.mkdir_sync = (tmp_dir) ->
if !fs.existsSync(tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.mkdirs_sync = (root_dir, dir_list) ->
for dir in dir_list
tmp_dir = root_dir+dir
if !fs.existsSync(tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.mkdir_sync_safe = (tmp_dir) ->
if !fs.existsSync(tmp_dir)
this.mkdir_sync_safe(path.dirname tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.base64_encode = (data) ->
new Buffer(data).toString('base64')
module.exports.base64_decode = (data) ->
new Buffer(data, 'base64').toString()
mk_dirs_sync = (p, made) ->
# default mode is 0777
# mask = ~process.umask()
#
# mode = 0777 & (~process.umask()) unless mode
made = null unless made
# mode = parseInt(mode, 8) unless typeof mode isnt 'string'
p = path.resolve(p)
try
fs.mkdirSync(p)
made = made || p
catch err0
switch err0.code
when 'ENOENT'
made = mk_dirs_sync(path.dirname(p), made)
mk_dirs_sync(p, made)
# // In the case of any other error, just see if there's a dir
# // there already. If so, then hooray! If not, then something
# // is borked.
else
stat = null
try
stat = fs.statSync(p)
catch err1
throw err0
unless stat.isDirectory()
throw err0
made
# 选择路径
module.exports.chose_path_f = (def_path='', callback)->
@chose_path(['openFile'], def_path, callback)
module.exports.chose_path_d = (callback)->
@chose_path(['openFile', 'openDirectory'], '', callback)
module.exports.chose_path = (opts=['openFile', "openDirectory"], def_path, callback)->
console.log dialog
dialog.showOpenDialog title: 'Select', defaultPath:def_path, properties: opts, (cho_path) =>
if cho_path
if callback
callback(cho_path[0])
valid_ip = (ip_add)->
# console.log ip_add
ip_add.match(///^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$///ig)
module.exports.mk_dirs_sync = mk_dirs_sync
module.exports.valid_ip = valid_ip
| 23190 | # macro defined
fs = require 'fs'
path = require 'path'
os = require 'os'
remote = require 'remote'
dialog = remote.Dialog
module.exports =
parser_beam_file_mod : 'atom_pl_parse_json'
# ----------------------------- Atom Config --------------------------------
EMP_APP_EXPORT_UI_PATH :'emp-template-management.Store-UI-Snippet-Export-Path'
EMP_APP_IMPORT_UI_PATH :'emp-template-management.Store-UI-Snippet-Import-Path'
EMP_APP_STORE_UI_PATH :'emp-template-management.Store-UI-Snippet-Path'
EMP_APP_WIZARD_APP_P :'emp-debugger.Default-App-Wizard-App-Path'
EMP_APP_WIZARD_EWP_P :'emp-debugger.Default-App-Wizard-Ewp-Path'
EMP_TEMP_WIZARD_NAME :'emp-debugger.Default-Template-App-Name'
EMP_TEMP_WIZARD_PATH :'emp-debugger.Default-Template-App-Path'
EMP_TEMP_WIZARD_PORT :'emp-debugger.Default-Template-App-Port'
EMP_TEMP_WIZARD_APORT :'emp-debugger.Default-Template-App-APort'
EMP_LINK_UNIGNORE_CONF :'emp-debugger.EMP-Link-Unignore'
EMP_LOG_LINE_LIMIT :'emp-debugger.defLimitOfLogLine'
EMP_LOG_LINE_LIMIT_SELECTED :'emp-debugger.defLimitOfLogLineSelected'
EMP_LOG_LEVEL_SELECTED :'emp-debugger.defLogLevelSelected'
EMP_ERL_SOURCE_PATH: 'emp-debugger.erl_source_path'
EMP_EWP_SOURE_PATH: 'emp-debugger.ewp_source_path'
EMP_YAWS_SOURCE_PATH: 'emp-debugger.yaws_source_path'
EMP_ERL_INDENT_TAB_LEN: 'emp-debugger.defErlangIndentTabLength'
EMP_ERL_INDENT_USE_TAB: 'emp-debugger.defErlangIndentUseTab'
EMP_FILTER_FLAG:"emp-debugger.defLogFilterFlag"
EMP_DEF_API_DATA:"emp-debugger.defAPIData"
EMP_LOG_SHOW_FIND_RESULT:'emp-debugger.defOnlyShowFindedResult'
EMP_LOG_SCROLL_TO_BOTTOM:'emp-debugger.defScrollToBottom'
EMP_LOG_TIMER:'emp-debugger.defLogTimer'
DEFAULT_LESS_NAME:'untitled.less'
DEFAULT_OUT_LESS_PATH:'../css/untitled.css'
EMP_DEF_LINE_LIMIT_SELECTED : 1000
EMP_DEF_LOG_LINE_LIMIT:[500, 1000, 2000, 5000, 10000]
EMP_DEF_LOG_TYPE:"lua"
EMP_DEF_CLIENT:"All"
EMP_NODE_NAME :'emp-debugger.Default-EMP-NODE-NAME'
EMP_NODE_COOKIE :'emp-debugger.Default-EMP-NODE-COOKIE'
EMP_NODE_NAME : 'ebank@localhost'
EMP_NODE_COOKIE : 'ewpcool'
bash_path_key:'emp-<KEY>'
EMP_MAKE_CMD_KEY: 'emp-debugger.emp-make'
EMP_STAET_SCRIPT_KEY: 'emp-debugger.emp-start-script'
EMP_STAET_FRONT_SCRIPT_KEY: 'emp-debugger.emp-start-front-script'
EMP_CONFIG_KEY: 'emp-debugger.emp-config'
EMP_CONFIG_ARG_KEY: 'emp-debugger.emp-config-arg'
EMP_IMPORT_MENU_KEY : 'emp-debugger.emp-import-menu'
EMP_TMPORARY_APP_NAME:'emp-debugger.emp-tmp-app-name'
EMP_CMAKE_KEY : 'emp<KEY>-debugger.emp<KEY>-c_app'
EMP_LESS_IMPORT_FILES:'emp-debugger.emp-less-import-files'
EMP_LOG_GLOBAL_COLOR:'emp-debugger.emp-log-global-color'
EMP_OFFLINE_DIR: 'emp-debugger.emp-offline-dev-path'
EMP_OFFLINE_RELATE_DIR: 'emp-debugger.emp-offline-relate-path'
EMP_OFFLINE_RELATE_PATH_V: "public/www/resource_dev"
EMP_VIEW_FILTER_IGNORE:["*.json", "*.lua", "*.png", "*.jpg", "*.css", "*.js"]
EMP_SCRIPT_FILTER_IGNORE:["*.json", "*.xhtml", "*.png", "*.jpg", "*.css"]
EMP_CHANNEL_ADAPTER_PLAT:'emp-debugger.emp-channel-adapter-platform'
EMP_CHANNEL_ADAPTER_RES:'emp-debugger.emp-channel-adapter-resolution'
OS_DARWIN:'darwin'
OS_LINUX:'linux'
OS_PATH:'PATH'
COL_KEY:"collections"
CHA_KEY:"channels"
# front template macro
DEF_APP_FILE:'.app'
DEF_PORT_FILE:'.port'
DEF_APORT_FILE:'.aport'
COL_ROOT_TYPE:1
COL_CH_TYPE:0
ITEM_CHA_TYPE:1
ITEM_COL_TYPE:0
# channel 回调类型
CHANNEL_ADAPTER:'channel_adapter'
CHANNEL_NEW_CALLBACK: 'new_callback'
CHANNEL_CALLBACK: 'channel_callback'
CHANNEL_PROXY: 'channel_proxy'
EMP_CHANNEL_URI : 'emp://wizard'
EMP_APP_URI : 'emp://app_wizard'
EMP_TEMP_URI : 'emp://template_wizard'
EMP_FRONT_PAGE_URI : 'emp://front_page_wizard'
EMP_CONFIG_URI : 'emp://emp_config'
EMP_API_DEBUG_URI : 'emp://emp_api_debug'
CHA_WIZARD_VIEW: 'EmpView'
APP_WIZARD_VIEW: 'EmpAppView'
TEMP_WIZARD_VIEW: 'EmpTemplateView'
FRONT_PAGE_WIZARD_VIEW: 'EmpFrontPageView'
EMP_CONFIG_VIEW : 'EmpConfigView'
EMP_API_VIEW: 'EMPAPIView'
CHA_CODE_DIR:'src'
CHA_PUBLIC_DIR:'public'
CHA_FRONT_MENU_DIR:'public/menu'
CHA_FRONT_VITUAL_COL:'virtual_collection'
# channel adapter 代码生成过程中的取值类型,
# 分别为 params, arg, session
ADAPTER_ARG_M_P:'param'
ADAPTER_ARG_M_A:'arg'
ADAPTER_ARG_M_S:'session'
# 离线资源平台
ADAPTER_PLT_D: 'common'
ADAPTER_PLT_I: 'iphone'
ADAPTER_PLT_A: 'android'
ADAPTER_PLT_W: 'wphone'
# 离线资源分辨率
ADAPTER_PLT_R: 'default'
ADAPTER_PLT_R1: '320-480'
ADAPTER_PLT_R2: '640-960'
ADAPTER_PLT_R3: '640-1136'
ADAPTER_PLT_R4: '750-1334'
ADAPTER_PLT_R5: '768-1024'
ADAPTER_PLT_R6: '1080-1920'
ADAPTER_PLT_R7: '1536-2048'
# 普通离线资源包,头名称
ADAPTER_PACKAGE_HEAD: 'package'
ADAPTER_UNION_PACKAGE_CHEAD:"batch-normal__"
ADAPTER_UNION_PACKAGE_NAME: "batch-normal-package.zip"
DEFAULT_ZIP_FULE_NAME:"default.zip"
# channel 插件包相关定义
PACKAGE_EXTENSION_BEAM:".beam"
PACKAGE_EXTENSION_BEAM_TYPE:"beam"
PACKAGE_CHANNEL_EBIN_DIR: "ebin"
PACKAGE_CHANNEL_CS_DIR: "public/cs/channels"
PACKAGE_NORMAL_CHANNEL:"normal_channel"
PACKAGE_SPEC:"SPEC"
PACKAGE_CHECKSUM:"CHECKSUM"
# channel 管理页面分页名称
GEN_VIEW:'gen_view'
ADD_CHA_VIEW:'add_cha_view'
ADD_COL_VIEW:'add_col_view'
ATOM_CONF_CHANNEL_DIR_KEY:'emp-debugger.Channel-config-file'
ATOM_CONF_CHANNEL_DIR_DEFAULT:'/config/channel.conf'
ATOM_EMP_APGE_ENTRANCE:'/public/www/entrance.xhtml'
EMP_ENTRANCE_FIRST_ID: '${first_cha_id}'
EMP_ENTRANCE_FIRST_TRANCODE: '${first_tran_code}'
# EMP_ENTRANCE_NEXT_ID: '${next_cha_id}'
EMP_ENTRANCE_NEXT_TRANCODE: '${next_tran_code}'
# adapter template
STATIC_TEMPLATE_DIR:"/templates/"
STATIC_API_DIR:"/templates/api/api_desc.json"
STATIC_APP_TEMPLATE:"/templates/app/"
STATIC_DEF_APP_TEMPLATE:"/templates/app/5.3"
STATIC_CHANNEL_TEMPLATE:"/templates/channel/"
CHANNEL_ADAPTER_DIR:'adapter'
CHANNEL_NEW_CALLBACK_DIR: 'new_callback'
CHANNEL_CALLBACK_DIR: 'callback'
# STATIC_UI_CSS_TEMPLATE_PATH:"/templates/css/"
STATIC_UI_CSS_TEMPLATE:"/templates/css/eui.css"
STATIC_UI_LESS_TEMPLATE: "/templates/less/ui-variables.less"
STATIC_UI_CSS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/css/eui.css"
STATIC_UI_CSS_TEMPLATE_DEST_DIR:"public/www/resource_dev/common/css/"
STATIC_UI_LESS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/less/ui-variables.less"
STATIC_UI_LESS_TEMPLATE_DEST_DIR:"public/www/resource_dev/common/less/"
STATIC_UI_CSS_DEF_FILE:"eui.css"
STATIC_UI_LUA_TEMPLATE:"/templates/lua/ert.lua"
STATIC_UI_LUA_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/lua/ert.lua"
STATIC_UI_LUA_PATH:"/templates/lua/"
STATIC_UI_LUA_DEST_PATH:"public/www/resource_dev/common/lua/"
STATIC_UI_JS_TEMPLATE:"/templates/js/"
STATIC_UI_JS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/js/"
STATIC_MOB_HTML_PATH:"/templates/mobile/"
STATIC_MOB_HTML_TEMPLATE:"/templates/mobile/html/default.html"
NATIVE_CHANNEL_DEFAULT_STYLE:"/templates/css/atom_default_style.css"
DESTINATION_CHANNEL_DEFAULT_STYLE:"public/www/resource_dev/common/css/atom_default_style.css"
STATIC_ERL_TEMPLATE : "/channel_adapter_erl_template.tmp"
STATIC_ERL_FUN_TEMPLATE : "/channel_adapter_erl_function.tmp"
STATIC_CS_TEMPLATE : "/channel_adapter_cs_template.tmp"
STATIC_OFF_TEMPLATE : "/channel_adapter_xHtml_template.tmp"
STATIC_CSS_TEMPLATE : "/channel_adapter_css_template.tmp"
STATIC_LUA_TEMPLATE : "/channel_adapter_lua_template.tmp"
STATIC_LESS_TEMPLATE : "/channel_less_template.tmp"
STATIC_WEBVIEW_TEMPLATE : "/channel_webview_template.tmp"
STATIC_APP_FRONT_TEMP:"/templates/temp_app/"
COMMON_DIR_LIST :["images", "css", "lua", "xhtml","channels"]
OFF_CHA_DIR_LIST : ["xhtml", "css", "lua", "images", "json", "less"]
OFF_CHA_PLT_LIST:["wp", "iphone", "android", "common"]
OFF_BASE_DIR_LIST:["default"]
OFF_DEFAULT_BASE:"channels"
OFF_COMMON_BASE:"default"
OFF_COMMON_HTML: "webview"
OFF_WEBVIEW_DEF_APP: "app"
OFF_STORE_HTML_PATH: "module"
OFF_HTML_LIST:["js", "css", "images", "html", "module", "fonts", "jsMobile"]
OFF_HTML_CHI_DIR_LIST : ["html", "css", "js", "json"]
OFF_EXTENSION_ERL: "erl"
OFF_EXTENSION_CS: "cs"
OFF_EXTENSION_JSON: "json"
OFF_EXTENSION_XHTML:"xhtml"
OFF_EXTENSION_LUA:"lua"
OFF_EXTENSION_CSS: "css"
OFF_EXTENSION_JS: "js"
OFF_EXTENSION_HTML:"html"
OFF_EXTENSION_LESS: "less"
# the type of emp step
EMP_ADD_CHA_VIEW_TYPE_EMP: 'emp'
EMP_ADD_CHA_VIEW_TYPE_HTML: 'html'
DEFAULT_COL_ITEM:'[{item_id,"$cha_id"},{item_type,$itype},{menu_order,$order}]'
DEFAULT_CHA_TMP:'\n{channels,[[{id,\"${channel}\"},\r\n'+
' {app,\"${app}\"},\r\n'+
' {name,"${name}"},\r\n'+
' {entry,channel_adapter},\r\n'+
' {views, ${views}},\r\n'+
' {props,${props}},\r\n'+
' {state,${state}}]'
ADAPTER_REQUEST_PARAMS_FORMAT:" {'$key', $value}"
ADAPTER_VARIABLE : " $var = $getter(\"$key\", []),\r\n"
REPLACE_GETTER : "\\$getter"
ADAPTER_REQUEST_PARAMS: "{'$key', $value}"
DEFAULT_EXT_LUA : '.lua'
DEFAULT_EXT_CSS : '.css'
DEFAULT_EXT_XHTML :'.xhtml'
DEFAULT_EXT_JS :'.js'
DEFAULT_EXT_JSON :'.json'
DEFAULT_EXT_ERL: '.erl'
# emp debugger 实体文件路径
DEFAULT_TEMP_HEADER:'<!--<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>-->\n'
DEFAULT_LUATEMP_HEADER:'--<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>--\n'
DEFAULT_CSSTEMP_HEADER:'/*<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>*/\n'
DEFAULT_HEADER:'<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>\n'
DEFAULT_HEADER_CON:'<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>'
# Less import 文件格式
EMP_LESS_IMPORT_HEADER:'@import \"${file_path}\";\n'
EMP_CSS_IMPORT_HEADER:'@import (inline) \"${file_path}\";\n'
# 提示信息
EMP_PACKAGE_UNION_PKG_SUCCESS:"普通资源整合包,打包成功~"
EMP_PACKAGE_PKG_SUCCESS:"普通资源包,打包成功~"
EMP_PACKAGE_UNION_PKG_DIR_ENOENT:"打包失败:没有离线资源相关文件~"
EMP_DEFAULT_FRONT_MSG:"{\r\n \"return\": {\r\n \"error_code\": \"000000\",\r\n \"error_msg\": \"\",\r\n
\"message\": \"This is a msg~~~~~\" \r\n }\r\n}"
TEMP_PACKAGE_NAME:"emp-template-management"
PACKAGE_NAME:"emp-debugger"
OFF_LINE_LINK_DIR:"public/www/resource_dev/"
LINK_PUBLICK_DIR:["public/www", "public/www/resource_dev/common/channels","public/www/resource_dev/common/css","public/www/resource_dev/common/lua", "public/www/resource_dev/common/images"]
get_pack_path: () ->
atom.packages.resolvePackagePath(this.PACKAGE_NAME)
get_temp_path: () ->
atom.packages.resolvePackagePath(this.TEMP_PACKAGE_NAME)
get_temp_emp_path: ->
pack_path = atom.packages.resolvePackagePath(this.TEMP_PACKAGE_NAME)
if pack_path
path.join(pack_path, "lib/exports/emp")
else
null
create_editor:(tmp_file_path, tmp_grammar, callback, content) ->
changeFocus = true
tmp_editor = atom.workspace.open(tmp_file_path, { changeFocus }).then (tmp_editor) =>
gramers = @getGrammars()
# console.log content
unless content is undefined
tmp_editor.setText(content) #unless !content
tmp_editor.setGrammar(gramers[0]) unless gramers[0] is undefined
callback(tmp_editor)
# set the opened editor grammar, default is HTML
getGrammars: (grammar_name)->
grammars = atom.grammars.getGrammars().filter (grammar) ->
(grammar isnt atom.grammars.nullGrammar) and
grammar.name is 'CoffeeScript'
grammars
get_project_path: ->
project_path_list = atom.project.getPaths()
project_path = project_path_list[0]
editor = atom.workspace.getActiveTextEditor()
if editor
# 判断 project 有多个的情况
efile_path = editor.getPath?()
if project_path_list.length > 1
for tmp_path in project_path_list
relate_path = path.relative tmp_path, efile_path
if relate_path.match(/^\.\..*/ig) isnt null
project_path = tmp_path
break
project_path
color_arr: ["#000033", "#000066", "#000099", "#0000CC", "#0000FF",
"#003300", "#003333", "#003366", "#003399", "#0033CC", "#0033FF",
"#006600", "#006633", "#006666", "#006699", "#0066CC", "#0066FF",
"#009900", "#009933", "#009966", "#009999", "#0099CC", "#0099FF",
"#00CC00", "#00CC33", "#00CC66", "#00CC99", "#00CCCC", "#00CCFF",
"#00FF00", "#00FF33", "#00FF66", "#00FF99", "#00FFCC", "#00FFFF",
"#330000", "#330033", "#330066", "#330099", "#3300CC", "#3300FF",
"#333300", "#333333", "#333366", "#333399", "#3333CC", "#3333FF",
"#336600", "#336633", "#336666", "#336699", "#3366CC", "#3366FF",
"#339900", "#339933", "#339966", "#339999", "#3399CC", "#3399FF",
"#33CC00", "#33CC33", "#33CC66", "#33CC99", "#33CCCC", "#33CCFF",
"#33FF00", "#33FF33", "#33FF66", "#33FF99", "#33FFCC", "#33FFFF",
"#660000", "#660033", "#660066", "#660099", "#6600CC", "#6600FF",
"#663300", "#663333", "#663366", "#663399", "#6633CC", "#6633FF",
"#666600", "#666633", "#666666", "#666699", "#6666CC", "#6666FF",
"#669900", "#669933", "#669966", "#669999", "#6699CC", "#6699FF",
"#66CC00", "#66CC33", "#66CC66", "#66CC99", "#66CCCC", "#66CCFF",
"#66FF00", "#66FF33", "#66FF66", "#66FF99", "#66FFCC", "#66FFFF",
"#990000", "#990033", "#990066", "#990099", "#9900CC", "#9900FF",
"#993300", "#993333", "#993366", "#993399", "#9933CC", "#9933FF",
"#996600", "#996633", "#996666", "#996699", "#9966CC", "#9966FF",
"#999900", "#999933", "#999966", "#999999", "#9999CC", "#9999FF",
"#99CC00", "#99CC33", "#99CC66", "#99CC99", "#99CCCC", "#99CCFF",
"#99FF00", "#99FF33", "#99FF66", "#99FF99", "#99FFCC", "#99FFFF",
"#CC0000", "#CC0033", "#CC0066", "#CC0099", "#CC00CC", "#CC00FF",
"#CC3300", "#CC3333", "#CC3366", "#CC3399", "#CC33CC", "#CC33FF",
"#CC6600", "#CC6633", "#CC6666", "#CC6699", "#CC66CC", "#CC66FF",
"#CC9900", "#CC9933", "#CC9966", "#CC9999", "#CC99CC", "#CC99FF",
"#CCCC00", "#CCCC33", "#CCCC66", "#CCCC99", "#CCCCCC", "#CCCCFF",
"#CCFF00", "#CCFF33", "#CCFF66", "#CCFF99", "#CCFFCC", "#CCFFFF",
"#FF0000", "#FF0033", "#FF0066", "#FF0099", "#FF00CC", "#FF00FF",
"#FF3300", "#FF3333", "#FF3366", "#FF3399", "#FF33CC", "#FF33FF",
"#FF6600", "#FF6633", "#FF6666", "#FF6699", "#FF66CC", "#FF66FF",
"#FF9900", "#FF9933", "#FF9966", "#FF9999", "#FF99CC", "#FF99FF",
"#FFCC00", "#FFCC33", "#FFCC66", "#FFCC99", "#FFCCCC", "#FFCCFF",
"#FFFF00", "#FFFF33", "#FFFF66", "#FFFF99", "#FFFFCC"]
get_color: ->
@color_arr[Math.floor(Math.random()* @color_arr.length)]
module.exports.mk_node_name = (node_name="") ->
default_name = " -sname "
tmp_re = node_name.split("@")
def_node_name = "atom_js" + Math.round(Math.random()*100)
def_host = " "
if tmp_re.length >1
# console.log "node name has HOST~"
if valid_ip(tmp_re[1])
default_name = " -name "
def_host = get_def_host()
def_node_name = def_node_name + "@" +def_host
# console.log def_host
re_name = default_name + def_node_name
{name:def_node_name, node_name: re_name}
module.exports.mk_rand = (iLen=6)->
unless iLen <= 0
iAtomP = Math.pow 10, iLen
iRand = Math.round(Math.random()*iAtomP)
if iRand > (iAtomP/10)
return iRand
else
fix_rand(iRand, iAtomP)
fix_rand = (iRand, iAtomP) ->
if iRand > (iAtomP/10)
return iRand
else
fix_rand(iRand*10, iAtomP)
get_def_host = ->
add_list = os.networkInterfaces()
tmp_address = ''
for key,val of add_list
# console.log val
for tmp_obj in val
if !tmp_obj.internal and tmp_obj.family is 'IPv4'
tmp_address = tmp_obj.address
break
tmp_address
module.exports.show_error = (err_msg) ->
atom.confirm
message:"Error"
detailedMessage:err_msg
buttons:["Ok"]
module.exports.show_warnning = (warn_msg) ->
atom.confirm
message:"Warnning"
detailedMessage:warn_msg
buttons:["Ok"]
module.exports.show_info = (info_msg) ->
atom.confirm
message:"Info"
detailedMessage:info_msg
buttons:["Ok"]
module.exports.self_info = (title_msg, detail_msg) ->
atom.confirm
message:title_msg
detailedMessage:detail_msg
buttons:["Ok"]
module.exports.isEmpty = (obj) ->
for key,name of obj
false;
true;
module.exports.get_emp_os = () ->
tmp_os = os.platform().toLowerCase()
if atom.project
if !atom.project.emp_os
atom.project.emp_os = tmp_os
atom.project.emp_os
else
tmp_os
module.exports.mkdir_sync = (tmp_dir) ->
if !fs.existsSync(tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.mkdirs_sync = (root_dir, dir_list) ->
for dir in dir_list
tmp_dir = root_dir+dir
if !fs.existsSync(tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.mkdir_sync_safe = (tmp_dir) ->
if !fs.existsSync(tmp_dir)
this.mkdir_sync_safe(path.dirname tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.base64_encode = (data) ->
new Buffer(data).toString('base64')
module.exports.base64_decode = (data) ->
new Buffer(data, 'base64').toString()
mk_dirs_sync = (p, made) ->
# default mode is 0777
# mask = ~process.umask()
#
# mode = 0777 & (~process.umask()) unless mode
made = null unless made
# mode = parseInt(mode, 8) unless typeof mode isnt 'string'
p = path.resolve(p)
try
fs.mkdirSync(p)
made = made || p
catch err0
switch err0.code
when 'ENOENT'
made = mk_dirs_sync(path.dirname(p), made)
mk_dirs_sync(p, made)
# // In the case of any other error, just see if there's a dir
# // there already. If so, then hooray! If not, then something
# // is borked.
else
stat = null
try
stat = fs.statSync(p)
catch err1
throw err0
unless stat.isDirectory()
throw err0
made
# 选择路径
module.exports.chose_path_f = (def_path='', callback)->
@chose_path(['openFile'], def_path, callback)
module.exports.chose_path_d = (callback)->
@chose_path(['openFile', 'openDirectory'], '', callback)
module.exports.chose_path = (opts=['openFile', "openDirectory"], def_path, callback)->
console.log dialog
dialog.showOpenDialog title: 'Select', defaultPath:def_path, properties: opts, (cho_path) =>
if cho_path
if callback
callback(cho_path[0])
valid_ip = (ip_add)->
# console.log ip_add
ip_add.match(///^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$///ig)
module.exports.mk_dirs_sync = mk_dirs_sync
module.exports.valid_ip = valid_ip
| true | # macro defined
fs = require 'fs'
path = require 'path'
os = require 'os'
remote = require 'remote'
dialog = remote.Dialog
module.exports =
parser_beam_file_mod : 'atom_pl_parse_json'
# ----------------------------- Atom Config --------------------------------
EMP_APP_EXPORT_UI_PATH :'emp-template-management.Store-UI-Snippet-Export-Path'
EMP_APP_IMPORT_UI_PATH :'emp-template-management.Store-UI-Snippet-Import-Path'
EMP_APP_STORE_UI_PATH :'emp-template-management.Store-UI-Snippet-Path'
EMP_APP_WIZARD_APP_P :'emp-debugger.Default-App-Wizard-App-Path'
EMP_APP_WIZARD_EWP_P :'emp-debugger.Default-App-Wizard-Ewp-Path'
EMP_TEMP_WIZARD_NAME :'emp-debugger.Default-Template-App-Name'
EMP_TEMP_WIZARD_PATH :'emp-debugger.Default-Template-App-Path'
EMP_TEMP_WIZARD_PORT :'emp-debugger.Default-Template-App-Port'
EMP_TEMP_WIZARD_APORT :'emp-debugger.Default-Template-App-APort'
EMP_LINK_UNIGNORE_CONF :'emp-debugger.EMP-Link-Unignore'
EMP_LOG_LINE_LIMIT :'emp-debugger.defLimitOfLogLine'
EMP_LOG_LINE_LIMIT_SELECTED :'emp-debugger.defLimitOfLogLineSelected'
EMP_LOG_LEVEL_SELECTED :'emp-debugger.defLogLevelSelected'
EMP_ERL_SOURCE_PATH: 'emp-debugger.erl_source_path'
EMP_EWP_SOURE_PATH: 'emp-debugger.ewp_source_path'
EMP_YAWS_SOURCE_PATH: 'emp-debugger.yaws_source_path'
EMP_ERL_INDENT_TAB_LEN: 'emp-debugger.defErlangIndentTabLength'
EMP_ERL_INDENT_USE_TAB: 'emp-debugger.defErlangIndentUseTab'
EMP_FILTER_FLAG:"emp-debugger.defLogFilterFlag"
EMP_DEF_API_DATA:"emp-debugger.defAPIData"
EMP_LOG_SHOW_FIND_RESULT:'emp-debugger.defOnlyShowFindedResult'
EMP_LOG_SCROLL_TO_BOTTOM:'emp-debugger.defScrollToBottom'
EMP_LOG_TIMER:'emp-debugger.defLogTimer'
DEFAULT_LESS_NAME:'untitled.less'
DEFAULT_OUT_LESS_PATH:'../css/untitled.css'
EMP_DEF_LINE_LIMIT_SELECTED : 1000
EMP_DEF_LOG_LINE_LIMIT:[500, 1000, 2000, 5000, 10000]
EMP_DEF_LOG_TYPE:"lua"
EMP_DEF_CLIENT:"All"
EMP_NODE_NAME :'emp-debugger.Default-EMP-NODE-NAME'
EMP_NODE_COOKIE :'emp-debugger.Default-EMP-NODE-COOKIE'
EMP_NODE_NAME : 'ebank@localhost'
EMP_NODE_COOKIE : 'ewpcool'
bash_path_key:'emp-PI:KEY:<KEY>END_PI'
EMP_MAKE_CMD_KEY: 'emp-debugger.emp-make'
EMP_STAET_SCRIPT_KEY: 'emp-debugger.emp-start-script'
EMP_STAET_FRONT_SCRIPT_KEY: 'emp-debugger.emp-start-front-script'
EMP_CONFIG_KEY: 'emp-debugger.emp-config'
EMP_CONFIG_ARG_KEY: 'emp-debugger.emp-config-arg'
EMP_IMPORT_MENU_KEY : 'emp-debugger.emp-import-menu'
EMP_TMPORARY_APP_NAME:'emp-debugger.emp-tmp-app-name'
EMP_CMAKE_KEY : 'empPI:KEY:<KEY>END_PI-debugger.empPI:KEY:<KEY>END_PI-c_app'
EMP_LESS_IMPORT_FILES:'emp-debugger.emp-less-import-files'
EMP_LOG_GLOBAL_COLOR:'emp-debugger.emp-log-global-color'
EMP_OFFLINE_DIR: 'emp-debugger.emp-offline-dev-path'
EMP_OFFLINE_RELATE_DIR: 'emp-debugger.emp-offline-relate-path'
EMP_OFFLINE_RELATE_PATH_V: "public/www/resource_dev"
EMP_VIEW_FILTER_IGNORE:["*.json", "*.lua", "*.png", "*.jpg", "*.css", "*.js"]
EMP_SCRIPT_FILTER_IGNORE:["*.json", "*.xhtml", "*.png", "*.jpg", "*.css"]
EMP_CHANNEL_ADAPTER_PLAT:'emp-debugger.emp-channel-adapter-platform'
EMP_CHANNEL_ADAPTER_RES:'emp-debugger.emp-channel-adapter-resolution'
OS_DARWIN:'darwin'
OS_LINUX:'linux'
OS_PATH:'PATH'
COL_KEY:"collections"
CHA_KEY:"channels"
# front template macro
DEF_APP_FILE:'.app'
DEF_PORT_FILE:'.port'
DEF_APORT_FILE:'.aport'
COL_ROOT_TYPE:1
COL_CH_TYPE:0
ITEM_CHA_TYPE:1
ITEM_COL_TYPE:0
# channel 回调类型
CHANNEL_ADAPTER:'channel_adapter'
CHANNEL_NEW_CALLBACK: 'new_callback'
CHANNEL_CALLBACK: 'channel_callback'
CHANNEL_PROXY: 'channel_proxy'
EMP_CHANNEL_URI : 'emp://wizard'
EMP_APP_URI : 'emp://app_wizard'
EMP_TEMP_URI : 'emp://template_wizard'
EMP_FRONT_PAGE_URI : 'emp://front_page_wizard'
EMP_CONFIG_URI : 'emp://emp_config'
EMP_API_DEBUG_URI : 'emp://emp_api_debug'
CHA_WIZARD_VIEW: 'EmpView'
APP_WIZARD_VIEW: 'EmpAppView'
TEMP_WIZARD_VIEW: 'EmpTemplateView'
FRONT_PAGE_WIZARD_VIEW: 'EmpFrontPageView'
EMP_CONFIG_VIEW : 'EmpConfigView'
EMP_API_VIEW: 'EMPAPIView'
CHA_CODE_DIR:'src'
CHA_PUBLIC_DIR:'public'
CHA_FRONT_MENU_DIR:'public/menu'
CHA_FRONT_VITUAL_COL:'virtual_collection'
# channel adapter 代码生成过程中的取值类型,
# 分别为 params, arg, session
ADAPTER_ARG_M_P:'param'
ADAPTER_ARG_M_A:'arg'
ADAPTER_ARG_M_S:'session'
# 离线资源平台
ADAPTER_PLT_D: 'common'
ADAPTER_PLT_I: 'iphone'
ADAPTER_PLT_A: 'android'
ADAPTER_PLT_W: 'wphone'
# 离线资源分辨率
ADAPTER_PLT_R: 'default'
ADAPTER_PLT_R1: '320-480'
ADAPTER_PLT_R2: '640-960'
ADAPTER_PLT_R3: '640-1136'
ADAPTER_PLT_R4: '750-1334'
ADAPTER_PLT_R5: '768-1024'
ADAPTER_PLT_R6: '1080-1920'
ADAPTER_PLT_R7: '1536-2048'
# 普通离线资源包,头名称
ADAPTER_PACKAGE_HEAD: 'package'
ADAPTER_UNION_PACKAGE_CHEAD:"batch-normal__"
ADAPTER_UNION_PACKAGE_NAME: "batch-normal-package.zip"
DEFAULT_ZIP_FULE_NAME:"default.zip"
# channel 插件包相关定义
PACKAGE_EXTENSION_BEAM:".beam"
PACKAGE_EXTENSION_BEAM_TYPE:"beam"
PACKAGE_CHANNEL_EBIN_DIR: "ebin"
PACKAGE_CHANNEL_CS_DIR: "public/cs/channels"
PACKAGE_NORMAL_CHANNEL:"normal_channel"
PACKAGE_SPEC:"SPEC"
PACKAGE_CHECKSUM:"CHECKSUM"
# channel 管理页面分页名称
GEN_VIEW:'gen_view'
ADD_CHA_VIEW:'add_cha_view'
ADD_COL_VIEW:'add_col_view'
ATOM_CONF_CHANNEL_DIR_KEY:'emp-debugger.Channel-config-file'
ATOM_CONF_CHANNEL_DIR_DEFAULT:'/config/channel.conf'
ATOM_EMP_APGE_ENTRANCE:'/public/www/entrance.xhtml'
EMP_ENTRANCE_FIRST_ID: '${first_cha_id}'
EMP_ENTRANCE_FIRST_TRANCODE: '${first_tran_code}'
# EMP_ENTRANCE_NEXT_ID: '${next_cha_id}'
EMP_ENTRANCE_NEXT_TRANCODE: '${next_tran_code}'
# adapter template
STATIC_TEMPLATE_DIR:"/templates/"
STATIC_API_DIR:"/templates/api/api_desc.json"
STATIC_APP_TEMPLATE:"/templates/app/"
STATIC_DEF_APP_TEMPLATE:"/templates/app/5.3"
STATIC_CHANNEL_TEMPLATE:"/templates/channel/"
CHANNEL_ADAPTER_DIR:'adapter'
CHANNEL_NEW_CALLBACK_DIR: 'new_callback'
CHANNEL_CALLBACK_DIR: 'callback'
# STATIC_UI_CSS_TEMPLATE_PATH:"/templates/css/"
STATIC_UI_CSS_TEMPLATE:"/templates/css/eui.css"
STATIC_UI_LESS_TEMPLATE: "/templates/less/ui-variables.less"
STATIC_UI_CSS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/css/eui.css"
STATIC_UI_CSS_TEMPLATE_DEST_DIR:"public/www/resource_dev/common/css/"
STATIC_UI_LESS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/less/ui-variables.less"
STATIC_UI_LESS_TEMPLATE_DEST_DIR:"public/www/resource_dev/common/less/"
STATIC_UI_CSS_DEF_FILE:"eui.css"
STATIC_UI_LUA_TEMPLATE:"/templates/lua/ert.lua"
STATIC_UI_LUA_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/lua/ert.lua"
STATIC_UI_LUA_PATH:"/templates/lua/"
STATIC_UI_LUA_DEST_PATH:"public/www/resource_dev/common/lua/"
STATIC_UI_JS_TEMPLATE:"/templates/js/"
STATIC_UI_JS_TEMPLATE_DEST_PATH:"public/www/resource_dev/common/js/"
STATIC_MOB_HTML_PATH:"/templates/mobile/"
STATIC_MOB_HTML_TEMPLATE:"/templates/mobile/html/default.html"
NATIVE_CHANNEL_DEFAULT_STYLE:"/templates/css/atom_default_style.css"
DESTINATION_CHANNEL_DEFAULT_STYLE:"public/www/resource_dev/common/css/atom_default_style.css"
STATIC_ERL_TEMPLATE : "/channel_adapter_erl_template.tmp"
STATIC_ERL_FUN_TEMPLATE : "/channel_adapter_erl_function.tmp"
STATIC_CS_TEMPLATE : "/channel_adapter_cs_template.tmp"
STATIC_OFF_TEMPLATE : "/channel_adapter_xHtml_template.tmp"
STATIC_CSS_TEMPLATE : "/channel_adapter_css_template.tmp"
STATIC_LUA_TEMPLATE : "/channel_adapter_lua_template.tmp"
STATIC_LESS_TEMPLATE : "/channel_less_template.tmp"
STATIC_WEBVIEW_TEMPLATE : "/channel_webview_template.tmp"
STATIC_APP_FRONT_TEMP:"/templates/temp_app/"
COMMON_DIR_LIST :["images", "css", "lua", "xhtml","channels"]
OFF_CHA_DIR_LIST : ["xhtml", "css", "lua", "images", "json", "less"]
OFF_CHA_PLT_LIST:["wp", "iphone", "android", "common"]
OFF_BASE_DIR_LIST:["default"]
OFF_DEFAULT_BASE:"channels"
OFF_COMMON_BASE:"default"
OFF_COMMON_HTML: "webview"
OFF_WEBVIEW_DEF_APP: "app"
OFF_STORE_HTML_PATH: "module"
OFF_HTML_LIST:["js", "css", "images", "html", "module", "fonts", "jsMobile"]
OFF_HTML_CHI_DIR_LIST : ["html", "css", "js", "json"]
OFF_EXTENSION_ERL: "erl"
OFF_EXTENSION_CS: "cs"
OFF_EXTENSION_JSON: "json"
OFF_EXTENSION_XHTML:"xhtml"
OFF_EXTENSION_LUA:"lua"
OFF_EXTENSION_CSS: "css"
OFF_EXTENSION_JS: "js"
OFF_EXTENSION_HTML:"html"
OFF_EXTENSION_LESS: "less"
# the type of emp step
EMP_ADD_CHA_VIEW_TYPE_EMP: 'emp'
EMP_ADD_CHA_VIEW_TYPE_HTML: 'html'
DEFAULT_COL_ITEM:'[{item_id,"$cha_id"},{item_type,$itype},{menu_order,$order}]'
DEFAULT_CHA_TMP:'\n{channels,[[{id,\"${channel}\"},\r\n'+
' {app,\"${app}\"},\r\n'+
' {name,"${name}"},\r\n'+
' {entry,channel_adapter},\r\n'+
' {views, ${views}},\r\n'+
' {props,${props}},\r\n'+
' {state,${state}}]'
ADAPTER_REQUEST_PARAMS_FORMAT:" {'$key', $value}"
ADAPTER_VARIABLE : " $var = $getter(\"$key\", []),\r\n"
REPLACE_GETTER : "\\$getter"
ADAPTER_REQUEST_PARAMS: "{'$key', $value}"
DEFAULT_EXT_LUA : '.lua'
DEFAULT_EXT_CSS : '.css'
DEFAULT_EXT_XHTML :'.xhtml'
DEFAULT_EXT_JS :'.js'
DEFAULT_EXT_JSON :'.json'
DEFAULT_EXT_ERL: '.erl'
# emp debugger 实体文件路径
DEFAULT_TEMP_HEADER:'<!--<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>-->\n'
DEFAULT_LUATEMP_HEADER:'--<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>--\n'
DEFAULT_CSSTEMP_HEADER:'/*<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>*/\n'
DEFAULT_HEADER:'<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>\n'
DEFAULT_HEADER_CON:'<atom_emp_related_file_info>${atom_related_info}</atom_emp_related_file_info>'
# Less import 文件格式
EMP_LESS_IMPORT_HEADER:'@import \"${file_path}\";\n'
EMP_CSS_IMPORT_HEADER:'@import (inline) \"${file_path}\";\n'
# 提示信息
EMP_PACKAGE_UNION_PKG_SUCCESS:"普通资源整合包,打包成功~"
EMP_PACKAGE_PKG_SUCCESS:"普通资源包,打包成功~"
EMP_PACKAGE_UNION_PKG_DIR_ENOENT:"打包失败:没有离线资源相关文件~"
EMP_DEFAULT_FRONT_MSG:"{\r\n \"return\": {\r\n \"error_code\": \"000000\",\r\n \"error_msg\": \"\",\r\n
\"message\": \"This is a msg~~~~~\" \r\n }\r\n}"
TEMP_PACKAGE_NAME:"emp-template-management"
PACKAGE_NAME:"emp-debugger"
OFF_LINE_LINK_DIR:"public/www/resource_dev/"
LINK_PUBLICK_DIR:["public/www", "public/www/resource_dev/common/channels","public/www/resource_dev/common/css","public/www/resource_dev/common/lua", "public/www/resource_dev/common/images"]
get_pack_path: () ->
atom.packages.resolvePackagePath(this.PACKAGE_NAME)
get_temp_path: () ->
atom.packages.resolvePackagePath(this.TEMP_PACKAGE_NAME)
get_temp_emp_path: ->
pack_path = atom.packages.resolvePackagePath(this.TEMP_PACKAGE_NAME)
if pack_path
path.join(pack_path, "lib/exports/emp")
else
null
create_editor:(tmp_file_path, tmp_grammar, callback, content) ->
changeFocus = true
tmp_editor = atom.workspace.open(tmp_file_path, { changeFocus }).then (tmp_editor) =>
gramers = @getGrammars()
# console.log content
unless content is undefined
tmp_editor.setText(content) #unless !content
tmp_editor.setGrammar(gramers[0]) unless gramers[0] is undefined
callback(tmp_editor)
# set the opened editor grammar, default is HTML
getGrammars: (grammar_name)->
grammars = atom.grammars.getGrammars().filter (grammar) ->
(grammar isnt atom.grammars.nullGrammar) and
grammar.name is 'CoffeeScript'
grammars
get_project_path: ->
project_path_list = atom.project.getPaths()
project_path = project_path_list[0]
editor = atom.workspace.getActiveTextEditor()
if editor
# 判断 project 有多个的情况
efile_path = editor.getPath?()
if project_path_list.length > 1
for tmp_path in project_path_list
relate_path = path.relative tmp_path, efile_path
if relate_path.match(/^\.\..*/ig) isnt null
project_path = tmp_path
break
project_path
color_arr: ["#000033", "#000066", "#000099", "#0000CC", "#0000FF",
"#003300", "#003333", "#003366", "#003399", "#0033CC", "#0033FF",
"#006600", "#006633", "#006666", "#006699", "#0066CC", "#0066FF",
"#009900", "#009933", "#009966", "#009999", "#0099CC", "#0099FF",
"#00CC00", "#00CC33", "#00CC66", "#00CC99", "#00CCCC", "#00CCFF",
"#00FF00", "#00FF33", "#00FF66", "#00FF99", "#00FFCC", "#00FFFF",
"#330000", "#330033", "#330066", "#330099", "#3300CC", "#3300FF",
"#333300", "#333333", "#333366", "#333399", "#3333CC", "#3333FF",
"#336600", "#336633", "#336666", "#336699", "#3366CC", "#3366FF",
"#339900", "#339933", "#339966", "#339999", "#3399CC", "#3399FF",
"#33CC00", "#33CC33", "#33CC66", "#33CC99", "#33CCCC", "#33CCFF",
"#33FF00", "#33FF33", "#33FF66", "#33FF99", "#33FFCC", "#33FFFF",
"#660000", "#660033", "#660066", "#660099", "#6600CC", "#6600FF",
"#663300", "#663333", "#663366", "#663399", "#6633CC", "#6633FF",
"#666600", "#666633", "#666666", "#666699", "#6666CC", "#6666FF",
"#669900", "#669933", "#669966", "#669999", "#6699CC", "#6699FF",
"#66CC00", "#66CC33", "#66CC66", "#66CC99", "#66CCCC", "#66CCFF",
"#66FF00", "#66FF33", "#66FF66", "#66FF99", "#66FFCC", "#66FFFF",
"#990000", "#990033", "#990066", "#990099", "#9900CC", "#9900FF",
"#993300", "#993333", "#993366", "#993399", "#9933CC", "#9933FF",
"#996600", "#996633", "#996666", "#996699", "#9966CC", "#9966FF",
"#999900", "#999933", "#999966", "#999999", "#9999CC", "#9999FF",
"#99CC00", "#99CC33", "#99CC66", "#99CC99", "#99CCCC", "#99CCFF",
"#99FF00", "#99FF33", "#99FF66", "#99FF99", "#99FFCC", "#99FFFF",
"#CC0000", "#CC0033", "#CC0066", "#CC0099", "#CC00CC", "#CC00FF",
"#CC3300", "#CC3333", "#CC3366", "#CC3399", "#CC33CC", "#CC33FF",
"#CC6600", "#CC6633", "#CC6666", "#CC6699", "#CC66CC", "#CC66FF",
"#CC9900", "#CC9933", "#CC9966", "#CC9999", "#CC99CC", "#CC99FF",
"#CCCC00", "#CCCC33", "#CCCC66", "#CCCC99", "#CCCCCC", "#CCCCFF",
"#CCFF00", "#CCFF33", "#CCFF66", "#CCFF99", "#CCFFCC", "#CCFFFF",
"#FF0000", "#FF0033", "#FF0066", "#FF0099", "#FF00CC", "#FF00FF",
"#FF3300", "#FF3333", "#FF3366", "#FF3399", "#FF33CC", "#FF33FF",
"#FF6600", "#FF6633", "#FF6666", "#FF6699", "#FF66CC", "#FF66FF",
"#FF9900", "#FF9933", "#FF9966", "#FF9999", "#FF99CC", "#FF99FF",
"#FFCC00", "#FFCC33", "#FFCC66", "#FFCC99", "#FFCCCC", "#FFCCFF",
"#FFFF00", "#FFFF33", "#FFFF66", "#FFFF99", "#FFFFCC"]
get_color: ->
@color_arr[Math.floor(Math.random()* @color_arr.length)]
module.exports.mk_node_name = (node_name="") ->
default_name = " -sname "
tmp_re = node_name.split("@")
def_node_name = "atom_js" + Math.round(Math.random()*100)
def_host = " "
if tmp_re.length >1
# console.log "node name has HOST~"
if valid_ip(tmp_re[1])
default_name = " -name "
def_host = get_def_host()
def_node_name = def_node_name + "@" +def_host
# console.log def_host
re_name = default_name + def_node_name
{name:def_node_name, node_name: re_name}
module.exports.mk_rand = (iLen=6)->
unless iLen <= 0
iAtomP = Math.pow 10, iLen
iRand = Math.round(Math.random()*iAtomP)
if iRand > (iAtomP/10)
return iRand
else
fix_rand(iRand, iAtomP)
fix_rand = (iRand, iAtomP) ->
if iRand > (iAtomP/10)
return iRand
else
fix_rand(iRand*10, iAtomP)
get_def_host = ->
add_list = os.networkInterfaces()
tmp_address = ''
for key,val of add_list
# console.log val
for tmp_obj in val
if !tmp_obj.internal and tmp_obj.family is 'IPv4'
tmp_address = tmp_obj.address
break
tmp_address
module.exports.show_error = (err_msg) ->
atom.confirm
message:"Error"
detailedMessage:err_msg
buttons:["Ok"]
module.exports.show_warnning = (warn_msg) ->
atom.confirm
message:"Warnning"
detailedMessage:warn_msg
buttons:["Ok"]
module.exports.show_info = (info_msg) ->
atom.confirm
message:"Info"
detailedMessage:info_msg
buttons:["Ok"]
module.exports.self_info = (title_msg, detail_msg) ->
atom.confirm
message:title_msg
detailedMessage:detail_msg
buttons:["Ok"]
module.exports.isEmpty = (obj) ->
for key,name of obj
false;
true;
module.exports.get_emp_os = () ->
tmp_os = os.platform().toLowerCase()
if atom.project
if !atom.project.emp_os
atom.project.emp_os = tmp_os
atom.project.emp_os
else
tmp_os
module.exports.mkdir_sync = (tmp_dir) ->
if !fs.existsSync(tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.mkdirs_sync = (root_dir, dir_list) ->
for dir in dir_list
tmp_dir = root_dir+dir
if !fs.existsSync(tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.mkdir_sync_safe = (tmp_dir) ->
if !fs.existsSync(tmp_dir)
this.mkdir_sync_safe(path.dirname tmp_dir)
fs.mkdirSync(tmp_dir);
module.exports.base64_encode = (data) ->
new Buffer(data).toString('base64')
module.exports.base64_decode = (data) ->
new Buffer(data, 'base64').toString()
mk_dirs_sync = (p, made) ->
# default mode is 0777
# mask = ~process.umask()
#
# mode = 0777 & (~process.umask()) unless mode
made = null unless made
# mode = parseInt(mode, 8) unless typeof mode isnt 'string'
p = path.resolve(p)
try
fs.mkdirSync(p)
made = made || p
catch err0
switch err0.code
when 'ENOENT'
made = mk_dirs_sync(path.dirname(p), made)
mk_dirs_sync(p, made)
# // In the case of any other error, just see if there's a dir
# // there already. If so, then hooray! If not, then something
# // is borked.
else
stat = null
try
stat = fs.statSync(p)
catch err1
throw err0
unless stat.isDirectory()
throw err0
made
# 选择路径
module.exports.chose_path_f = (def_path='', callback)->
@chose_path(['openFile'], def_path, callback)
module.exports.chose_path_d = (callback)->
@chose_path(['openFile', 'openDirectory'], '', callback)
module.exports.chose_path = (opts=['openFile', "openDirectory"], def_path, callback)->
console.log dialog
dialog.showOpenDialog title: 'Select', defaultPath:def_path, properties: opts, (cho_path) =>
if cho_path
if callback
callback(cho_path[0])
valid_ip = (ip_add)->
# console.log ip_add
ip_add.match(///^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$///ig)
module.exports.mk_dirs_sync = mk_dirs_sync
module.exports.valid_ip = valid_ip
|
[
{
"context": " sess\n hashPassword({password : password}, next)\n (result, next) ->\n ",
"end": 3365,
"score": 0.9125338196754456,
"start": 3357,
"tag": "PASSWORD",
"value": "password"
}
] | src/api/authentication.coffee | bladepan/cloudbrowser | 0 | Crypto = require("crypto")
Async = require("async")
cloudbrowserError = require("../shared/cloudbrowser_error")
{LocalStrategy
, GoogleStrategy} = require("./authentication_strategies")
{hashPassword
, getParentMountPoint} = require("./utils")
class Authentication
constructor : (options) ->
{bserver, cbCtx, app} = options
localStrategy = new LocalStrategy(app, bserver, cbCtx)
googleStrategy = new GoogleStrategy(app, bserver)
###*
Sends a password reset link to the user to the email
registered with the application.
@method sendResetLink
@param {String} user
@param {booleanCallback} callback
@instance
@memberOf Authentication
###
@sendResetLink = (user, callback) ->
if typeof user isnt "string"
return callback?(cloudbrowserError('PARAM_MISSING', '- user'))
appUrl = app.getAppUrl()
token = null
Async.waterfall [
(next) ->
app.findUser(user, next)
(userRec, next) ->
if userRec then Crypto.randomBytes(32, next)
else next(cloudbrowserError('USER_NOT_REGISTERED'))
(token, next) ->
token = token.toString('hex')
app.addResetMarkerToUser
user : user
token : token
callback : next
(next) ->
esc_email = encodeURIComponent(user)
pwdResetLink = app.pwdRestApp.getAppUrl()+"?resettoken=#{token}&resetuser=#{esc_email}"
subject = "Link to reset your CloudBrowser password"
message = "You have requested to change your password." +
" If you want to continue click <a href=" +
"\"#{pwdResetLink}\"" +
">reset</a>. If you have" +
" not requested a change in password then take no" +
" action."
cbCtx.util.sendEmail
to : user
html : message
subject : subject
callback : next
], callback
return
# TODO : Add a configuration in app_config that allows only one user to connect to some
# VB types at a time.
###*
Resets the password.
A boolean is passed as an argument to indicate success/failure.
@method resetPassword
@param {String} password The new plaintext password provided by the user.
@param {booleanCallback} callback
@instance
@memberOf Authentication
###
@resetPassword = (password, callback) ->
sessionManager = bserver.server.sessionManager
session = null
Async.waterfall [
(next) ->
bserver.getFirstSession(next)
(sess, next) ->
session = sess
hashPassword({password : password}, next)
(result, next) ->
# Reset the key and salt for the corresponding user
app.resetUserPassword
email : sessionManager.findPropOnSession(session, 'resetuser')
token : sessionManager.findPropOnSession(session, 'resettoken')
salt : result.salt.toString('hex')
key : result.key.toString('hex')
callback : next
], callback
return
###*
Logs out all connected clients from the current application.
@method logout
@instance
@memberOf Authentication
###
@logout = () ->
bserver.redirect("#{app.mountPoint}/logout")
return
###*
Returns an instance of local strategy for authentication
@method getLocalStrategy
@return {LocalStrategy}
@instance
@memberOf Authentication
###
@getLocalStrategy = () ->
return localStrategy
###*
Returns an instance of google strategy for authentication
@method getGoogleStrategy
@return {GoogleStrategy}
@instance
@memberOf Authentication
###
@getGoogleStrategy = () ->
return googleStrategy
module.exports = Authentication
| 129012 | Crypto = require("crypto")
Async = require("async")
cloudbrowserError = require("../shared/cloudbrowser_error")
{LocalStrategy
, GoogleStrategy} = require("./authentication_strategies")
{hashPassword
, getParentMountPoint} = require("./utils")
class Authentication
constructor : (options) ->
{bserver, cbCtx, app} = options
localStrategy = new LocalStrategy(app, bserver, cbCtx)
googleStrategy = new GoogleStrategy(app, bserver)
###*
Sends a password reset link to the user to the email
registered with the application.
@method sendResetLink
@param {String} user
@param {booleanCallback} callback
@instance
@memberOf Authentication
###
@sendResetLink = (user, callback) ->
if typeof user isnt "string"
return callback?(cloudbrowserError('PARAM_MISSING', '- user'))
appUrl = app.getAppUrl()
token = null
Async.waterfall [
(next) ->
app.findUser(user, next)
(userRec, next) ->
if userRec then Crypto.randomBytes(32, next)
else next(cloudbrowserError('USER_NOT_REGISTERED'))
(token, next) ->
token = token.toString('hex')
app.addResetMarkerToUser
user : user
token : token
callback : next
(next) ->
esc_email = encodeURIComponent(user)
pwdResetLink = app.pwdRestApp.getAppUrl()+"?resettoken=#{token}&resetuser=#{esc_email}"
subject = "Link to reset your CloudBrowser password"
message = "You have requested to change your password." +
" If you want to continue click <a href=" +
"\"#{pwdResetLink}\"" +
">reset</a>. If you have" +
" not requested a change in password then take no" +
" action."
cbCtx.util.sendEmail
to : user
html : message
subject : subject
callback : next
], callback
return
# TODO : Add a configuration in app_config that allows only one user to connect to some
# VB types at a time.
###*
Resets the password.
A boolean is passed as an argument to indicate success/failure.
@method resetPassword
@param {String} password The new plaintext password provided by the user.
@param {booleanCallback} callback
@instance
@memberOf Authentication
###
@resetPassword = (password, callback) ->
sessionManager = bserver.server.sessionManager
session = null
Async.waterfall [
(next) ->
bserver.getFirstSession(next)
(sess, next) ->
session = sess
hashPassword({password : <PASSWORD>}, next)
(result, next) ->
# Reset the key and salt for the corresponding user
app.resetUserPassword
email : sessionManager.findPropOnSession(session, 'resetuser')
token : sessionManager.findPropOnSession(session, 'resettoken')
salt : result.salt.toString('hex')
key : result.key.toString('hex')
callback : next
], callback
return
###*
Logs out all connected clients from the current application.
@method logout
@instance
@memberOf Authentication
###
@logout = () ->
bserver.redirect("#{app.mountPoint}/logout")
return
###*
Returns an instance of local strategy for authentication
@method getLocalStrategy
@return {LocalStrategy}
@instance
@memberOf Authentication
###
@getLocalStrategy = () ->
return localStrategy
###*
Returns an instance of google strategy for authentication
@method getGoogleStrategy
@return {GoogleStrategy}
@instance
@memberOf Authentication
###
@getGoogleStrategy = () ->
return googleStrategy
module.exports = Authentication
| true | Crypto = require("crypto")
Async = require("async")
cloudbrowserError = require("../shared/cloudbrowser_error")
{LocalStrategy
, GoogleStrategy} = require("./authentication_strategies")
{hashPassword
, getParentMountPoint} = require("./utils")
class Authentication
constructor : (options) ->
{bserver, cbCtx, app} = options
localStrategy = new LocalStrategy(app, bserver, cbCtx)
googleStrategy = new GoogleStrategy(app, bserver)
###*
Sends a password reset link to the user to the email
registered with the application.
@method sendResetLink
@param {String} user
@param {booleanCallback} callback
@instance
@memberOf Authentication
###
@sendResetLink = (user, callback) ->
if typeof user isnt "string"
return callback?(cloudbrowserError('PARAM_MISSING', '- user'))
appUrl = app.getAppUrl()
token = null
Async.waterfall [
(next) ->
app.findUser(user, next)
(userRec, next) ->
if userRec then Crypto.randomBytes(32, next)
else next(cloudbrowserError('USER_NOT_REGISTERED'))
(token, next) ->
token = token.toString('hex')
app.addResetMarkerToUser
user : user
token : token
callback : next
(next) ->
esc_email = encodeURIComponent(user)
pwdResetLink = app.pwdRestApp.getAppUrl()+"?resettoken=#{token}&resetuser=#{esc_email}"
subject = "Link to reset your CloudBrowser password"
message = "You have requested to change your password." +
" If you want to continue click <a href=" +
"\"#{pwdResetLink}\"" +
">reset</a>. If you have" +
" not requested a change in password then take no" +
" action."
cbCtx.util.sendEmail
to : user
html : message
subject : subject
callback : next
], callback
return
# TODO : Add a configuration in app_config that allows only one user to connect to some
# VB types at a time.
###*
Resets the password.
A boolean is passed as an argument to indicate success/failure.
@method resetPassword
@param {String} password The new plaintext password provided by the user.
@param {booleanCallback} callback
@instance
@memberOf Authentication
###
@resetPassword = (password, callback) ->
sessionManager = bserver.server.sessionManager
session = null
Async.waterfall [
(next) ->
bserver.getFirstSession(next)
(sess, next) ->
session = sess
hashPassword({password : PI:PASSWORD:<PASSWORD>END_PI}, next)
(result, next) ->
# Reset the key and salt for the corresponding user
app.resetUserPassword
email : sessionManager.findPropOnSession(session, 'resetuser')
token : sessionManager.findPropOnSession(session, 'resettoken')
salt : result.salt.toString('hex')
key : result.key.toString('hex')
callback : next
], callback
return
###*
Logs out all connected clients from the current application.
@method logout
@instance
@memberOf Authentication
###
@logout = () ->
bserver.redirect("#{app.mountPoint}/logout")
return
###*
Returns an instance of local strategy for authentication
@method getLocalStrategy
@return {LocalStrategy}
@instance
@memberOf Authentication
###
@getLocalStrategy = () ->
return localStrategy
###*
Returns an instance of google strategy for authentication
@method getGoogleStrategy
@return {GoogleStrategy}
@instance
@memberOf Authentication
###
@getGoogleStrategy = () ->
return googleStrategy
module.exports = Authentication
|
[
{
"context": "s\",\"Prov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"",
"end": 505,
"score": 0.7829144597053528,
"start": 502,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ov\",\"Eccl\",\"Song\",\"Isa\"... | lib/bible-tools/lib/Bible-Passage-Reference-Parser/src/fi/spec.coffee | saiba-mais/bible-lessons | 149 | bcv_parser = require("../../js/fi_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","Dan","Hos","Joel","Amos","Obad","Jonah","Mic","Nah","Hab","Zeph","Hag","Zech","Mal","Matt","Mark","Luke","John","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (fi)", ->
`
expect(p.parse("Ensimmainen Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmäinen Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmainen Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmäinen Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Moos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMÄINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMAINEN MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMÄINEN MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (fi)", ->
`
expect(p.parse("Toinen Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Toinen Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Moos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("TOINEN MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (fi)", ->
`
expect(p.parse("Bel ja lohikaarme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikaärme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikäarme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikäärme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (fi)", ->
`
expect(p.parse("Kolmas Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Kolmas Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Moos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLMAS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("KOLMAS MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (fi)", ->
`
expect(p.parse("Neljas Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Neljäs Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Neljas Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Neljäs Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Moos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("NELJAS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJÄS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJAS MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJÄS MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (fi)", ->
`
expect(p.parse("Jeesus Siirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Jesus Siirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Siirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Siirakin 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirakin 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (fi)", ->
`
expect(p.parse("Salomon viisaus 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Viisauden kirja 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Viis 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (fi)", ->
`
expect(p.parse("Valitusvirret 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Valit 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("VALITUSVIRRET 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("VALIT 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (fi)", ->
`
expect(p.parse("Jeremian kirje 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (fi)", ->
`
expect(p.parse("Johanneksen ilmestys 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilmestyskirja 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilmestys 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilm 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("JOHANNEKSEN ILMESTYS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILMESTYSKIRJA 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILMESTYS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILM 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (fi)", ->
`
expect(p.parse("Manassen rukouksen 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Man ru 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (fi)", ->
`
expect(p.parse("Viides Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Viides Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Moos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("VIIDES MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VIIDES MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (fi)", ->
`
expect(p.parse("Joosuan kirja 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joosuan 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joos 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOOSUAN KIRJA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOOSUAN 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOOS 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (fi)", ->
`
expect(p.parse("Tuomarien kirja 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Tuomarien 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Tuom 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("TUOMARIEN KIRJA 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("TUOMARIEN 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("TUOM 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (fi)", ->
`
expect(p.parse("Ruutin kirja 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruutin 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUUTIN KIRJA 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUUTIN 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (fi)", ->
`
expect(p.parse("Ensimmainen Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Ensimmäinen Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (fi)", ->
`
expect(p.parse("Toinen Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (fi)", ->
`
expect(p.parse("Jesajan kirja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jesajan 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJAN KIRJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JESAJAN 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (fi)", ->
`
expect(p.parse("Toinen Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Toinen Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN SAMUELIN KIRJA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAMUELIN KIRJA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUELIN KIRJA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAMUELIN KIRJA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELIN KIRJA 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("TOINEN SAMUELIN 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SAMUELIN 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SAMUELIN 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II SAMUELIN 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUELIN 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (fi)", ->
`
expect(p.parse("Ensimmainen Samuelin kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmäinen Samuelin kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmainen Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmäinen Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Samuelin kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. Samuelin kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuelin kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I Samuelin kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSIMMÄINEN SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSIMMAINEN SAMUELIN 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSIMMÄINEN SAMUELIN 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. SAMUELIN 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. SAMUELIN 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUELIN 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I SAMUELIN 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (fi)", ->
`
expect(p.parse("Toinen Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Toinen Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kun 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TOINEN KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (fi)", ->
`
expect(p.parse("Ensimmainen Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmäinen Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmainen Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmäinen Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kun 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMÄINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMAINEN KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMÄINEN KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (fi)", ->
`
expect(p.parse("Toinen Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aikak 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIKAK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (fi)", ->
`
expect(p.parse("Ensimmainen Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("Ensimmäinen Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aikak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ENSIMMÄINEN AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIKAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (fi)", ->
`
expect(p.parse("Esran kirja 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esran 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRAN KIRJA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESRAN 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (fi)", ->
`
expect(p.parse("Nehemian kirja 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Nehemian 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIAN KIRJA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEHEMIAN 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (fi)", ->
`
expect(p.parse("Kreikkalainen Esterin kirja 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kreikkalainen Esterin 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kr. Est 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kr Est 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (fi)", ->
`
expect(p.parse("Esterin kirja 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esterin 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTERIN KIRJA 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTERIN 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (fi)", ->
`
expect(p.parse("Jobin kirja 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobin 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOBIN KIRJA 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBIN 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (fi)", ->
`
expect(p.parse("Psalmien kirja 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmien 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmit 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMIEN KIRJA 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMIEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMIT 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (fi)", ->
`
expect(p.parse("Asarjan rukous 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("Asar ru 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (fi)", ->
`
expect(p.parse("Sananlaskujen kirja 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananlaskujen 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananlaskut 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananl 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Snl 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SANANLASKUJEN KIRJA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANLASKUJEN 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANLASKUT 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANL 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SNL 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (fi)", ->
`
expect(p.parse("Saarnaajan kirja 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarnaajan 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarnaaja 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarn 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saar 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("SAARNAAJAN KIRJA 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARNAAJAN 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARNAAJA 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARN 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAAR 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (fi)", ->
`
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa patsissa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa patsissä 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa pätsissa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa pätsissä 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen kiitosvirsi tulessa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen kiitosvirsi 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (fi)", ->
`
expect(p.parse("Laulujen laulu 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Korkea veisu 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Laul. l 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Laul l 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("LAULUJEN LAULU 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KORKEA VEISU 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("LAUL. L 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("LAUL L 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (fi)", ->
`
expect(p.parse("Jeremian kirja 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jeremian 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIAN KIRJA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIAN 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (fi)", ->
`
expect(p.parse("Hesekielin kirja 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hesekielin 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hes 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("HESEKIELIN KIRJA 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HESEKIELIN 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HES 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (fi)", ->
`
expect(p.parse("Danielin kirja 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Danielin 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIELIN KIRJA 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIELIN 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (fi)", ->
`
expect(p.parse("Hoosean kirja 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoosean 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOOSEAN KIRJA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOOSEAN 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (fi)", ->
`
expect(p.parse("Joelin kirja 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joelin 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOELIN KIRJA 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOELIN 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (fi)", ->
`
expect(p.parse("Aamoksen kirja 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Aamoksen 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Aam 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AAMOKSEN KIRJA 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AAMOKSEN 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AAM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (fi)", ->
`
expect(p.parse("Obadjan kirja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadjan 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadj 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJAN KIRJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJAN 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJ 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book Jonah (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (fi)", ->
`
expect(p.parse("Joonan kirja 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joonan 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JOONAN KIRJA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOONAN 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (fi)", ->
`
expect(p.parse("Miikan kirja 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miikan 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miik 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIIKAN KIRJA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIKAN 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIK 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (fi)", ->
`
expect(p.parse("Nahumin kirja 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nahumin 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUMIN KIRJA 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUMIN 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (fi)", ->
`
expect(p.parse("Habakukin kirja 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Habakukin 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUKIN KIRJA 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUKIN 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (fi)", ->
`
expect(p.parse("Sefanjan kirja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sefanjan 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJAN KIRJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEFANJAN 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (fi)", ->
`
expect(p.parse("Haggain kirja 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggain 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAIN KIRJA 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAIN 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (fi)", ->
`
expect(p.parse("Sakarjan kirja 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sakarjan 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sak 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("SAKARJAN KIRJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAKARJAN 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAK 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (fi)", ->
`
expect(p.parse("Malakian kirja 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Malakian 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAKIAN KIRJA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKIAN 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (fi)", ->
`
expect(p.parse("Evankeliumi Matteuksen mukaan 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteuksen evankeliumi 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteuksen 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI MATTEUKSEN MUKAAN 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUKSEN EVANKELIUMI 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUKSEN 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (fi)", ->
`
expect(p.parse("Evankeliumi Markuksen mukaan 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markuksen evankeliumi 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markuksen 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI MARKUKSEN MUKAAN 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUKSEN EVANKELIUMI 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUKSEN 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (fi)", ->
`
expect(p.parse("Evankeliumi Luukkaan mukaan 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luukkaan evankeliumi 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luukkaan 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luuk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI LUUKKAAN MUKAAN 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUKKAAN EVANKELIUMI 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUKKAAN 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (fi)", ->
`
expect(p.parse("Ensimmainen Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmäinen Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmainen Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmäinen Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMÄINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMAINEN JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMÄINEN JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (fi)", ->
`
expect(p.parse("Toinen Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Toinen Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Toinen Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TOINEN JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TOINEN JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (fi)", ->
`
expect(p.parse("Kolmas Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("Kolmas Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLMAS JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("KOLMAS JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: John (fi)", ->
`
expect(p.parse("Evankeliumi Johanneksen mukaan 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johanneksen evankeliumi 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johanneksen 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI JOHANNEKSEN MUKAAN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANNEKSEN EVANKELIUMI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANNEKSEN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOH 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (fi)", ->
`
expect(p.parse("Apostolien teot 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap. t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap.t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Teot 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apt 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTOLIEN TEOT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP. T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP.T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("TEOT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APT 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (fi)", ->
`
expect(p.parse("Kirje roomalaisille 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roomalaiskirje 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roomalaisille 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Room 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE ROOMALAISILLE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOMALAISKIRJE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOMALAISILLE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (fi)", ->
`
expect(p.parse("Toinen Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Toinen Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Toinen Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TOINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TOINEN KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (fi)", ->
`
expect(p.parse("Ensimmainen Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmainen Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmainen Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMAINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMAINEN KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (fi)", ->
`
expect(p.parse("Kirje galatalaisille 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galatalaisille 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galatalaiskirj 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE GALATALAISILLE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATALAISILLE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATALAISKIRJ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (fi)", ->
`
expect(p.parse("Kirje efesolaisille 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesolaiskirje 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesolaisille 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE EFESOLAISILLE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESOLAISKIRJE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESOLAISILLE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (fi)", ->
`
expect(p.parse("Kirje filippilaisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Kirje filippiläisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippilaiskirje 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippiläiskirje 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippilaisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippiläisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE FILIPPILAISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("KIRJE FILIPPILÄISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILAISKIRJE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILÄISKIRJE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILAISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILÄISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (fi)", ->
`
expect(p.parse("Kirje kolossalaisille 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossalaiskirje 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossalaisille 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE KOLOSSALAISILLE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSALAISKIRJE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSALAISILLE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (fi)", ->
`
expect(p.parse("Toinen Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Toinen Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Toinen Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TOINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TOINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (fi)", ->
`
expect(p.parse("Ensimmainen Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmainen Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmainen Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMAINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMAINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (fi)", ->
`
expect(p.parse("Toinen Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Toinen Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Toinen Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TOINEN TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TOINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (fi)", ->
`
expect(p.parse("Ensimmainen Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmainen Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmainen Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMAINEN TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMAINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (fi)", ->
`
expect(p.parse("Kirje Titukselle 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titukselle 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE TITUKSELLE 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUKSELLE 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (fi)", ->
`
expect(p.parse("Kirje Filemonille 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filemonille 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE FILEMONILLE 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEMONILLE 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (fi)", ->
`
expect(p.parse("Kirje hebrealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Kirje heprealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heprealaiskirje 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heprealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hepr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE HEBREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("KIRJE HEPREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPREALAISKIRJE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (fi)", ->
`
expect(p.parse("Jaakobin kirje 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jaakobin 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jaak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAAKOBIN KIRJE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAAKOBIN 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (fi)", ->
`
expect(p.parse("Toinen Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Toinen Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Piet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TOINEN PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (fi)", ->
`
expect(p.parse("Ensimmainen Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmäinen Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmainen Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmäinen Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Piet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMÄINEN PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMAINEN PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMÄINEN PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (fi)", ->
`
expect(p.parse("Juudaksen kirje 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Juudaksen 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Juud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUUDAKSEN KIRJE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUUDAKSEN 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (fi)", ->
`
expect(p.parse("Tobiaan kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobitin kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobian kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobiaan 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobitin 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobian 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (fi)", ->
`
expect(p.parse("Juditin kirja 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Juditin 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (fi)", ->
`
expect(p.parse("Baarukin kirja 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Barukin kirja 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Baarukin 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Barukin 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (fi)", ->
`
expect(p.parse("Susanna ja vanhimmat 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Susanna 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (fi)", ->
`
expect(p.parse("Toinen makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (fi)", ->
`
expect(p.parse("Kolmas makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (fi)", ->
`
expect(p.parse("Neljas makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Neljäs makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (fi)", ->
`
expect(p.parse("Ensimmainen makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Ensimmäinen makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["fi"]
it "should handle ranges (fi)", ->
expect(p.parse("Titus 1:1 – 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1–2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 – 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (fi)", ->
expect(p.parse("Titus 1:1, luku 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUKU 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, luvut 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUVUT 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, luvun 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUVUN 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (fi)", ->
expect(p.parse("Exod 1:1 jakeet 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm JAKEET 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 jakeissa 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm JAKEISSA 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (fi)", ->
expect(p.parse("Exod 1:1 ja 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 JA 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vrt 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VRT 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (fi)", ->
expect(p.parse("Ps 3 johdannolla, 4:2, 5:johdannolla").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 JOHDANNOLLA, 4:2, 5:JOHDANNOLLA").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (fi)", ->
expect(p.parse("Rev 3ss, 4:2ss").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 SS, 4:2 SS").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (fi)", ->
expect(p.parse("Lev 1 (R1933)").osis_and_translations()).toEqual [["Lev.1", "R1933"]]
expect(p.parse("lev 1 r1933").osis_and_translations()).toEqual [["Lev.1", "R1933"]]
expect(p.parse("Lev 1 (R1992)").osis_and_translations()).toEqual [["Lev.1", "R1992"]]
expect(p.parse("lev 1 r1992").osis_and_translations()).toEqual [["Lev.1", "R1992"]]
it "should handle book ranges (fi)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Ensimmäinen – Kolmas Johanneksen").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (fi)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| 223945 | bcv_parser = require("../../js/fi_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","<NAME>","<NAME>","<NAME>","<NAME>mos","<NAME>ad","<NAME>","<NAME>","<NAME>ah","Hab","Zeph","Hag","Zech","<NAME>","<NAME>","<NAME>","<NAME>","<NAME>","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (fi)", ->
`
expect(p.parse("Ensimmainen Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmäinen Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmainen Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmäinen Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Moos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMÄINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMAINEN MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMÄINEN MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (fi)", ->
`
expect(p.parse("Toinen Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Toinen Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Moos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("TOINEN MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (fi)", ->
`
expect(p.parse("Bel ja lohikaarme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikaärme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikäarme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikäärme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (fi)", ->
`
expect(p.parse("Kolmas Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Kolmas Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Moos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLMAS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("KOLMAS MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (fi)", ->
`
expect(p.parse("Neljas Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Neljäs Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("<NAME> Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("<NAME>eljäs Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Moos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("<NAME>JAS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJÄS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJAS MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJÄS MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (fi)", ->
`
expect(p.parse("<NAME>akin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Jesus Siirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Siirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Siirakin 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirakin 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (fi)", ->
`
expect(p.parse("Salomon viisaus 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Viisauden kirja 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Viis 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (fi)", ->
`
expect(p.parse("Valitusvirret 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Valit 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("VALITUSVIRRET 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("VALIT 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (fi)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (fi)", ->
`
expect(p.parse("Johanneksen ilmestys 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilmestyskirja 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilmestys 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilm 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("JOHANNEKSEN ILMESTYS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILMESTYSKIRJA 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILMESTYS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILM 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (fi)", ->
`
expect(p.parse("Manassen rukouksen 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Man ru 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (fi)", ->
`
expect(p.parse("Viides Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Viides Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Moos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("VIIDES MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VIIDES MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book <NAME> (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (fi)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Jo<NAME> 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joos 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("<NAME> 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOOSUAN 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOOS 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (fi)", ->
`
expect(p.parse("Tuomarien kirja 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Tuomarien 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Tuom 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("TUOMARIEN KIRJA 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("TUOMARIEN 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("TUOM 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (fi)", ->
`
expect(p.parse("Ruutin kirja 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruutin 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUUTIN KIRJA 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUUTIN 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (fi)", ->
`
expect(p.parse("Ensimmainen Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Ensimmäinen Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (fi)", ->
`
expect(p.parse("Toinen Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (fi)", ->
`
expect(p.parse("Jesajan kirja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jesajan 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJAN KIRJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JESAJAN 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (fi)", ->
`
expect(p.parse("Toinen Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Sam<NAME> kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Sam<NAME> kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Toinen Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("<NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II <NAME> 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM<NAME>LIN 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (fi)", ->
`
expect(p.parse("En<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmäinen Samuelin kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmainen Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmäinen Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. <NAME> kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. <NAME> kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam<NAME> kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I <NAME> kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSIMMÄINEN SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSIMMAINEN SAMUELIN 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSI<NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I <NAME> 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (fi)", ->
`
expect(p.parse("Toinen Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Toinen Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kun 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TOINEN KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (fi)", ->
`
expect(p.parse("Ensimmainen Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmäinen Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmainen Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmäinen Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kun 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMÄINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMAINEN KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMÄINEN KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (fi)", ->
`
expect(p.parse("Toinen Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aikak 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIKAK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (fi)", ->
`
expect(p.parse("Ensimmainen Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("Ensimmäinen Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aikak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ENSIMMÄINEN AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIKAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (fi)", ->
`
expect(p.parse("Esran kirja 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esran 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRAN KIRJA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESRAN 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (fi)", ->
`
expect(p.parse("Nehemian kirja 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Nehemian 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIAN KIRJA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEHEMIAN 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (fi)", ->
`
expect(p.parse("Kreikkalainen Esterin kirja 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kreikkalainen Esterin 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kr. Est 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kr Est 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (fi)", ->
`
expect(p.parse("Esterin kirja 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esterin 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTERIN KIRJA 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTERIN 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (fi)", ->
`
expect(p.parse("Jobin kirja 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobin 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOBIN KIRJA 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBIN 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (fi)", ->
`
expect(p.parse("Psalmien kirja 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmien 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmit 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMIEN KIRJA 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMIEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMIT 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (fi)", ->
`
expect(p.parse("Asarjan rukous 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("Asar ru 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (fi)", ->
`
expect(p.parse("Sananlaskujen kirja 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananlaskujen 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananlaskut 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananl 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Snl 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SANANLASKUJEN KIRJA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANLASKUJEN 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANLASKUT 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANL 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SNL 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (fi)", ->
`
expect(p.parse("Sa<NAME> 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarnaajan 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarnaaja 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarn 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saar 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("SA<NAME> 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARNAAJAN 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARNAAJA 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARN 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAAR 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (fi)", ->
`
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa patsissa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa patsissä 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa pätsissa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa pätsissä 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen kiitosvirsi tulessa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen kiitosvirsi 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (fi)", ->
`
expect(p.parse("Laulujen laulu 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Korkea veisu 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Laul. l 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Laul l 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("LAULUJEN LAULU 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KORKEA VEISU 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("LAUL. L 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("LAUL L 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (fi)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIAN KIRJA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIAN 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (fi)", ->
`
expect(p.parse("Hesekielin kirja 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hesekielin 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hes 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("HESEKIELIN KIRJA 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HESEKIELIN 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HES 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (fi)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Danielin 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("D<NAME>EL<NAME>IR<NAME> 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIELIN 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>os (fi)", ->
`
expect(p.parse("Hoosean kirja 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoosean 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOOSEAN KIRJA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOOSEAN 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (fi)", ->
`
expect(p.parse("<NAME> kir<NAME> 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Jo<NAME> 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("<NAME> 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JO<NAME> 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (fi)", ->
`
expect(p.parse("Aamoksen kirja 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Aamoksen 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Aam 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AAMOKSEN KIRJA 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AAMOKSEN 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AAM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (fi)", ->
`
expect(p.parse("Obadjan kirja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadjan 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadj 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJAN KIRJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJAN 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJ 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book <NAME> (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (fi)", ->
`
expect(p.parse("Joonan kirja 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joonan 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JOONAN KIRJA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOONAN 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book <NAME>ic (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ic (fi)", ->
`
expect(p.parse("Miikan kirja 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miikan 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miik 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIIKAN KIRJA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIKAN 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIK 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (fi)", ->
`
expect(p.parse("Nahumin kirja 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nahumin 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUMIN KIRJA 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUMIN 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (fi)", ->
`
expect(p.parse("Habakukin kirja 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Habakukin 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUKIN KIRJA 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUKIN 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (fi)", ->
`
expect(p.parse("Sefanjan kirja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sefanjan 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJAN KIRJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEFANJAN 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (fi)", ->
`
expect(p.parse("Haggain kirja 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggain 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAIN KIRJA 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAIN 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (fi)", ->
`
expect(p.parse("Sakarjan kirja 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sakarjan 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sak 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("SAKARJAN KIRJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAKARJAN 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAK 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (fi)", ->
`
expect(p.parse("Malakian kirja 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Malakian 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAKIAN KIRJA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKIAN 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book <NAME> (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (fi)", ->
`
expect(p.parse("Evankeliumi Matteuksen mukaan 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteuksen evankeliumi 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteuksen 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI MATTEUKSEN MUKAAN 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUKSEN EVANKELIUMI 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUKSEN 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (fi)", ->
`
expect(p.parse("Evankeliumi Markuksen mukaan 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markuksen evankeliumi 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markuksen 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI MARKUKSEN MUKAAN 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUKSEN EVANKELIUMI 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUKSEN 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (fi)", ->
`
expect(p.parse("Evankeliumi Luukkaan mukaan 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luukkaan evankeliumi 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luukkaan 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luuk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI LUUKKAAN MUKAAN 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUKKAAN EVANKELIUMI 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUKKAAN 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (fi)", ->
`
expect(p.parse("Ensimmainen Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmäinen Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmainen Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmäinen Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMÄINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMAINEN JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMÄINEN JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (fi)", ->
`
expect(p.parse("Toinen Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Toinen Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Toinen Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TOINEN JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TOINEN JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (fi)", ->
`
expect(p.parse("Kolmas Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("Kolmas Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLMAS JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("KOLMAS JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book <NAME> (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (fi)", ->
`
expect(p.parse("Evankeliumi Johanneksen mukaan 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johanneksen evankeliumi 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johanneksen 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI JOHANNEKSEN MUKAAN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANNEKSEN EVANKELIUMI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANNEKSEN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JO<NAME> 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JO<NAME> 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (fi)", ->
`
expect(p.parse("Apostolien teot 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap. t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap.t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Teot 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apt 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTOLIEN TEOT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP. T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP.T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("TEOT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APT 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (fi)", ->
`
expect(p.parse("Kirje roomalaisille 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roomalaiskirje 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roomalaisille 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Room 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE ROOMALAISILLE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOMALAISKIRJE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOMALAISILLE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (fi)", ->
`
expect(p.parse("Toinen Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Toinen Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Toinen Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TOINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TOINEN KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (fi)", ->
`
expect(p.parse("Ensimmainen Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmainen Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmainen Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMAINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMAINEN KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (fi)", ->
`
expect(p.parse("Kirje galatalaisille 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galatalaisille 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galatalaiskirj 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE GALATALAISILLE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATALAISILLE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATALAISKIRJ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (fi)", ->
`
expect(p.parse("Kirje efesolaisille 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesolaiskirje 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesolaisille 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE EFESOLAISILLE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESOLAISKIRJE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESOLAISILLE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book <NAME> (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (fi)", ->
`
expect(p.parse("Kirje filippilaisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Kirje filippiläisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippilaiskirje 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippiläiskirje 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippilaisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippiläisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE FILIPPILAISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("KIRJE FILIPPILÄISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILAISKIRJE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILÄISKIRJE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILAISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILÄISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (fi)", ->
`
expect(p.parse("Kirje kolossalaisille 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossalaiskirje 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossalaisille 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE KOLOSSALAISILLE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSALAISKIRJE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSALAISILLE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (fi)", ->
`
expect(p.parse("Toinen Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Toinen Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Toinen Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TOINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TOINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (fi)", ->
`
expect(p.parse("Ensimmainen Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmainen Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmainen Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMAINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMAINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (fi)", ->
`
expect(p.parse("Toinen Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Toinen Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Toinen Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TOINEN TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TOINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (fi)", ->
`
expect(p.parse("Ensimmainen Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmainen Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmainen Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMAINEN TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMAINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (fi)", ->
`
expect(p.parse("Kirje Titukselle 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titukselle 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE TITUKSELLE 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUKSELLE 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (fi)", ->
`
expect(p.parse("Kirje Filemonille 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filemonille 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE FILEMONILLE 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEMONILLE 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (fi)", ->
`
expect(p.parse("Kirje hebrealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Kirje heprealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heprealaiskirje 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heprealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hepr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE HEBREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("KIRJE HEPREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPREALAISKIRJE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (fi)", ->
`
expect(p.parse("Jaakobin kirje 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jaakobin 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jaak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAAKOBIN KIRJE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAAKOBIN 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (fi)", ->
`
expect(p.parse("Toinen Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Toinen Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Piet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PI<NAME> 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PI<NAME> K<NAME> 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TOINEN PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PI<NAME>ARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PI<NAME>IN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (fi)", ->
`
expect(p.parse("Ensimmainen Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmäinen Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmainen Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmäinen Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Piet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMÄINEN PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMAINEN PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMÄINEN PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book <NAME>ude (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ude (fi)", ->
`
expect(p.parse("Juudaksen kirje 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Juudaksen 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Juud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUUDAKSEN KIRJE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUUDAKSEN 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (fi)", ->
`
expect(p.parse("Tobiaan kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobitin kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobian kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobiaan 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobitin 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobian 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (fi)", ->
`
expect(p.parse("Juditin kirja 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Juditin 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (fi)", ->
`
expect(p.parse("Baarukin kirja 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Barukin kirja 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Baarukin 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Barukin 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (fi)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("<NAME> 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (fi)", ->
`
expect(p.parse("Toinen makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (fi)", ->
`
expect(p.parse("Kolmas makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (fi)", ->
`
expect(p.parse("Neljas makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Neljäs makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (fi)", ->
`
expect(p.parse("Ensimmainen makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Ensimmäinen makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["fi"]
it "should handle ranges (fi)", ->
expect(p.parse("Titus 1:1 – 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1–2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 – 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (fi)", ->
expect(p.parse("Titus 1:1, luku 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUKU 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, luvut 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUVUT 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, luvun 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUVUN 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (fi)", ->
expect(p.parse("Exod 1:1 jakeet 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm JAKEET 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 jakeissa 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm JAKEISSA 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (fi)", ->
expect(p.parse("Exod 1:1 ja 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 JA 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vrt 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VRT 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (fi)", ->
expect(p.parse("Ps 3 johdannolla, 4:2, 5:johdannolla").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 JOHDANNOLLA, 4:2, 5:JOHDANNOLLA").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (fi)", ->
expect(p.parse("Rev 3ss, 4:2ss").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 SS, 4:2 SS").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (fi)", ->
expect(p.parse("Lev 1 (R1933)").osis_and_translations()).toEqual [["Lev.1", "R1933"]]
expect(p.parse("lev 1 r1933").osis_and_translations()).toEqual [["Lev.1", "R1933"]]
expect(p.parse("Lev 1 (R1992)").osis_and_translations()).toEqual [["Lev.1", "R1992"]]
expect(p.parse("lev 1 r1992").osis_and_translations()).toEqual [["Lev.1", "R1992"]]
it "should handle book ranges (fi)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Ensimmäinen – <NAME> <NAME>").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (fi)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| true | bcv_parser = require("../../js/fi_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PImos","PI:NAME:<NAME>END_PIad","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PIah","Hab","Zeph","Hag","Zech","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (fi)", ->
`
expect(p.parse("Ensimmainen Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmäinen Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmainen Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Ensimmäinen Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mooseksen kirja 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I Mooseksen 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 Moos 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMÄINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMAINEN MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("ENSIMMÄINEN MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOOSEKSEN KIRJA 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1. MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I. MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("I MOOSEKSEN 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("1 MOOS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (fi)", ->
`
expect(p.parse("Toinen Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mooseksen kirja 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Toinen Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Mooseksen 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 Moos 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("TOINEN MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II. MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2. MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("II MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOSEKSEN 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("2 MOOS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (fi)", ->
`
expect(p.parse("Bel ja lohikaarme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikaärme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikäarme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel ja lohikäärme 1:1").osis()).toEqual("Bel.1.1")
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (fi)", ->
`
expect(p.parse("Kolmas Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mooseksen kirja 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Kolmas Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Mooseksen 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 Moos 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLMAS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("KOLMAS MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III. MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("III MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3. MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOSEKSEN 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("3 MOOS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (fi)", ->
`
expect(p.parse("Neljas Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Neljäs Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mooseksen kirja 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("PI:NAME:<NAME>END_PIeljäs Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Mooseksen 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 Moos 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("PI:NAME:<NAME>END_PIJAS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJÄS MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJAS MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NELJÄS MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV. MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4. MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("IV MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOSEKSEN 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("4 MOOS 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (fi)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PIakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Jesus Siirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Siirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirakin kirja 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Siirakin 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sirakin 1:1").osis()).toEqual("Sir.1.1")
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (fi)", ->
`
expect(p.parse("Salomon viisaus 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Viisauden kirja 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Viis 1:1").osis()).toEqual("Wis.1.1")
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (fi)", ->
`
expect(p.parse("Valitusvirret 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Valit 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("VALITUSVIRRET 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("VALIT 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (fi)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("EpJer.1.1")
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (fi)", ->
`
expect(p.parse("Johanneksen ilmestys 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilmestyskirja 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilmestys 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Ilm 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("JOHANNEKSEN ILMESTYS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILMESTYSKIRJA 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILMESTYS 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ILM 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (fi)", ->
`
expect(p.parse("Manassen rukouksen 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("Man ru 1:1").osis()).toEqual("PrMan.1.1")
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (fi)", ->
`
expect(p.parse("Viides Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mooseksen kirja 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Viides Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V Mooseksen 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 Moos 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("VIIDES MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOOSEKSEN KIRJA 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("VIIDES MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5. MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V. MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("V MOOSEKSEN 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("5 MOOS 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (fi)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JoPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joos 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOOSUAN 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOOS 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (fi)", ->
`
expect(p.parse("Tuomarien kirja 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Tuomarien 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Tuom 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("TUOMARIEN KIRJA 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("TUOMARIEN 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("TUOM 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (fi)", ->
`
expect(p.parse("Ruutin kirja 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruutin 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUUTIN KIRJA 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUUTIN 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (fi)", ->
`
expect(p.parse("Ensimmainen Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("Ensimmäinen Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I. Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("I Esra 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1 Es 1:1").osis()).toEqual("1Esd.1.1")
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (fi)", ->
`
expect(p.parse("Toinen Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2. Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("II Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Esra 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2 Es 1:1").osis()).toEqual("2Esd.1.1")
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (fi)", ->
`
expect(p.parse("Jesajan kirja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jesajan 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJAN KIRJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JESAJAN 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (fi)", ->
`
expect(p.parse("Toinen Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. SamPI:NAME:<NAME>END_PI kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. SamPI:NAME:<NAME>END_PI kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelin kirja 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("Toinen Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuelin 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("II PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMPI:NAME:<NAME>END_PILIN 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (fi)", ->
`
expect(p.parse("EnPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmäinen Samuelin kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmainen Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("Ensimmäinen Samuelin 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. PI:NAME:<NAME>END_PI kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. PI:NAME:<NAME>END_PI kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SamPI:NAME:<NAME>END_PI kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I PI:NAME:<NAME>END_PI kirja 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SamPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSIMMÄINEN SAMUELIN KIRJA 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSIMMAINEN SAMUELIN 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("ENSIPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I. PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("I PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (fi)", ->
`
expect(p.parse("Toinen Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kuninkaiden kirja 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("Toinen Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kuninkaiden 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kun 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUNINKAIDEN KIRJA 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("TOINEN KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II. KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2. KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("II KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUNINKAIDEN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KUN 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (fi)", ->
`
expect(p.parse("Ensimmainen Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmäinen Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmainen Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("Ensimmäinen Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kuninkaiden kirja 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I Kuninkaiden 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kun 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMÄINEN KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMAINEN KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("ENSIMMÄINEN KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KUNINKAIDEN KIRJA 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1. KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I. KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("I KUNINKAIDEN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KUN 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (fi)", ->
`
expect(p.parse("Toinen Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aikakirja 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aikak 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Aik 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II. AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2. AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("II AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIKAKIRJA 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIKAK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 AIK 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (fi)", ->
`
expect(p.parse("Ensimmainen Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("Ensimmäinen Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I Aikakirja 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aikak 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Aik 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("ENSIMMÄINEN AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1. AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I. AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("I AIKAKIRJA 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIKAK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 AIK 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (fi)", ->
`
expect(p.parse("Esran kirja 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esran 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Esr 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRAN KIRJA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESRAN 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("ESR 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (fi)", ->
`
expect(p.parse("Nehemian kirja 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Nehemian 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIAN KIRJA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEHEMIAN 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (fi)", ->
`
expect(p.parse("Kreikkalainen Esterin kirja 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kreikkalainen Esterin 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kr. Est 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
expect(p.parse("Kr Est 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (fi)", ->
`
expect(p.parse("Esterin kirja 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esterin 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTERIN KIRJA 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTERIN 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (fi)", ->
`
expect(p.parse("Jobin kirja 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobin 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOBIN KIRJA 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBIN 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (fi)", ->
`
expect(p.parse("Psalmien kirja 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmien 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmit 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalmi 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMIEN KIRJA 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMIEN 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMIT 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALMI 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (fi)", ->
`
expect(p.parse("Asarjan rukous 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("Asar ru 1:1").osis()).toEqual("PrAzar.1.1")
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (fi)", ->
`
expect(p.parse("Sananlaskujen kirja 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananlaskujen 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananlaskut 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Sananl 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Snl 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SANANLASKUJEN KIRJA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANLASKUJEN 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANLASKUT 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SANANL 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SNL 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (fi)", ->
`
expect(p.parse("SaPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarnaajan 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarnaaja 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saarn 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Saar 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("SAPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARNAAJAN 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARNAAJA 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAARN 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("SAAR 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (fi)", ->
`
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa patsissa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa patsissä 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa pätsissa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen ollessa tulisessa pätsissä 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen kiitosvirsi tulessa 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen kiitosvirsi 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen nuoren miehen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("Kolmen miehen 1:1").osis()).toEqual("SgThree.1.1")
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (fi)", ->
`
expect(p.parse("Laulujen laulu 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Korkea veisu 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Laul. l 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Laul l 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("LAULUJEN LAULU 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("KORKEA VEISU 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("LAUL. L 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("LAUL L 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (fi)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIAN KIRJA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIAN 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (fi)", ->
`
expect(p.parse("Hesekielin kirja 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hesekielin 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hes 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("HESEKIELIN KIRJA 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HESEKIELIN 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HES 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (fi)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Danielin 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DPI:NAME:<NAME>END_PIELPI:NAME:<NAME>END_PIIRPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIELIN 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIos (fi)", ->
`
expect(p.parse("Hoosean kirja 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoosean 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoos 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOOSEAN KIRJA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOOSEAN 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOOS 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (fi)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI kirPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JoPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (fi)", ->
`
expect(p.parse("Aamoksen kirja 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Aamoksen 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Aam 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AAMOKSEN KIRJA 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AAMOKSEN 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AAM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (fi)", ->
`
expect(p.parse("Obadjan kirja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadjan 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadj 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJAN KIRJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJAN 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADJ 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (fi)", ->
`
expect(p.parse("Joonan kirja 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joonan 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Joon 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JOONAN KIRJA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOONAN 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JOON 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIic (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIic (fi)", ->
`
expect(p.parse("Miikan kirja 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miikan 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Miik 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIIKAN KIRJA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIKAN 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIIK 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (fi)", ->
`
expect(p.parse("Nahumin kirja 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nahumin 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUMIN KIRJA 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUMIN 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (fi)", ->
`
expect(p.parse("Habakukin kirja 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Habakukin 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUKIN KIRJA 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUKIN 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (fi)", ->
`
expect(p.parse("Sefanjan kirja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sefanjan 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJAN KIRJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEFANJAN 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (fi)", ->
`
expect(p.parse("Haggain kirja 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Haggain 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagg 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAIN KIRJA 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGGAIN 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGG 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (fi)", ->
`
expect(p.parse("Sakarjan kirja 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sakarjan 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sak 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("SAKARJAN KIRJA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAKARJAN 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAK 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (fi)", ->
`
expect(p.parse("Malakian kirja 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Malakian 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALAKIAN KIRJA 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKIAN 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (fi)", ->
`
expect(p.parse("Evankeliumi Matteuksen mukaan 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteuksen evankeliumi 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteuksen 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI MATTEUKSEN MUKAAN 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUKSEN EVANKELIUMI 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUKSEN 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (fi)", ->
`
expect(p.parse("Evankeliumi Markuksen mukaan 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markuksen evankeliumi 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Markuksen 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI MARKUKSEN MUKAAN 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUKSEN EVANKELIUMI 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKUKSEN 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (fi)", ->
`
expect(p.parse("Evankeliumi Luukkaan mukaan 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luukkaan evankeliumi 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luukkaan 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luuk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI LUUKKAAN MUKAAN 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUKKAAN EVANKELIUMI 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUKKAAN 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUUK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (fi)", ->
`
expect(p.parse("Ensimmainen Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmäinen Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmainen Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("Ensimmäinen Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johanneksen kirje 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johanneksen 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMÄINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMAINEN JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("ENSIMMÄINEN JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNEKSEN KIRJE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNEKSEN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (fi)", ->
`
expect(p.parse("Toinen Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johanneksen kirje 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Toinen Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johanneksen 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("Toinen Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TOINEN JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNEKSEN 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("TOINEN JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (fi)", ->
`
expect(p.parse("Kolmas Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johanneksen kirje 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("Kolmas Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johanneksen 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLMAS JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNEKSEN KIRJE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("KOLMAS JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNEKSEN 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (fi)", ->
`
expect(p.parse("Evankeliumi Johanneksen mukaan 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johanneksen evankeliumi 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johanneksen 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("EVANKELIUMI JOHANNEKSEN MUKAAN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANNEKSEN EVANKELIUMI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANNEKSEN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (fi)", ->
`
expect(p.parse("Apostolien teot 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap. t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Ap.t 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Teot 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Apt 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("APOSTOLIEN TEOT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP. T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("AP.T 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("TEOT 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("APT 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (fi)", ->
`
expect(p.parse("Kirje roomalaisille 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roomalaiskirje 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roomalaisille 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Room 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE ROOMALAISILLE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOMALAISKIRJE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOMALAISILLE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROOM 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (fi)", ->
`
expect(p.parse("Toinen Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kirje korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Toinen Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("Toinen Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinttolaiskirje 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinttilaisille 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TOINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("TOINEN KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTTOLAISKIRJE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTTILAISILLE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (fi)", ->
`
expect(p.parse("Ensimmainen Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmainen Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmainen Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("Ensimmäinen Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kirje korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinttolaiskirje 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinttilaisille 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMAINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMAINEN KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("ENSIMMÄINEN KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KIRJE KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTTOLAISKIRJE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTTILAISILLE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (fi)", ->
`
expect(p.parse("Kirje galatalaisille 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galatalaisille 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galatalaiskirj 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE GALATALAISILLE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATALAISILLE 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATALAISKIRJ 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (fi)", ->
`
expect(p.parse("Kirje efesolaisille 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesolaiskirje 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesolaisille 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE EFESOLAISILLE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESOLAISKIRJE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESOLAISILLE 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (fi)", ->
`
expect(p.parse("Kirje filippilaisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Kirje filippiläisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippilaiskirje 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippiläiskirje 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippilaisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filippiläisille 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE FILIPPILAISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("KIRJE FILIPPILÄISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILAISKIRJE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILÄISKIRJE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILAISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPPILÄISILLE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (fi)", ->
`
expect(p.parse("Kirje kolossalaisille 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossalaiskirje 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolossalaisille 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE KOLOSSALAISILLE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSALAISKIRJE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSSALAISILLE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (fi)", ->
`
expect(p.parse("Toinen Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Kirje tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Toinen Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("Toinen Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonikalaiskirje 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonikalaisille 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TOINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("TOINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONIKALAISKIRJE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONIKALAISILLE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (fi)", ->
`
expect(p.parse("Ensimmainen Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmainen Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmainen Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("Ensimmäinen Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Kirje tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonikalaiskirje 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonikalaisille 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMAINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMAINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("ENSIMMÄINEN TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I KIRJE TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONIKALAISKIRJE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONIKALAISILLE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (fi)", ->
`
expect(p.parse("Toinen Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Kirje Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Toinen Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("Toinen Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteukselle 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteuskirje 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TOINEN TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("TOINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUKSELLE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUSKIRJE 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (fi)", ->
`
expect(p.parse("Ensimmainen Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmainen Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmainen Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("Ensimmäinen Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Kirje Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteukselle 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteuskirje 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMAINEN TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMAINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("ENSIMMÄINEN TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I KIRJE TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUKSELLE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUSKIRJE 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (fi)", ->
`
expect(p.parse("Kirje Titukselle 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titukselle 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE TITUKSELLE 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUKSELLE 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (fi)", ->
`
expect(p.parse("Kirje Filemonille 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filemonille 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE FILEMONILLE 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEMONILLE 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (fi)", ->
`
expect(p.parse("Kirje hebrealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Kirje heprealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heprealaiskirje 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heprealaisille 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hepr 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("KIRJE HEBREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("KIRJE HEPREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPREALAISKIRJE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPREALAISILLE 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEPR 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (fi)", ->
`
expect(p.parse("Jaakobin kirje 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jaakobin 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jaak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAAKOBIN KIRJE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAAKOBIN 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (fi)", ->
`
expect(p.parse("Toinen Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pietarin kirje 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("Toinen Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pietarin 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Piet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("TOINEN PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PIPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PIPI:NAME:<NAME>END_PI KPI:NAME:<NAME>END_PI 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIETARIN KIRJE 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("TOINEN PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PIPI:NAME:<NAME>END_PIARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PIPI:NAME:<NAME>END_PIIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIETARIN 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PIET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (fi)", ->
`
expect(p.parse("Ensimmainen Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmäinen Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmainen Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("Ensimmäinen Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pietarin kirje 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pietarin 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Piet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("ENSIMMAINEN PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMÄINEN PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMAINEN PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("ENSIMMÄINEN PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PIETARIN KIRJE 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PIETARIN 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PIET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIude (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIude (fi)", ->
`
expect(p.parse("Juudaksen kirje 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Juudaksen 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Juud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUUDAKSEN KIRJE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUUDAKSEN 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (fi)", ->
`
expect(p.parse("Tobiaan kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobitin kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobian kirja 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobiaan 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobitin 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tobian 1:1").osis()).toEqual("Tob.1.1")
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (fi)", ->
`
expect(p.parse("Juditin kirja 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Juditin 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (fi)", ->
`
expect(p.parse("Baarukin kirja 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Barukin kirja 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Baarukin 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Barukin 1:1").osis()).toEqual("Bar.1.1")
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (fi)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Sus.1.1")
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (fi)", ->
`
expect(p.parse("Toinen makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II. makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2. makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("II makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 makkabilaiskirja 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2 makk 1:1").osis()).toEqual("2Macc.1.1")
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (fi)", ->
`
expect(p.parse("Kolmas makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III. makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("III makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3. makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 makkabilaiskirja 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3 makk 1:1").osis()).toEqual("3Macc.1.1")
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (fi)", ->
`
expect(p.parse("Neljas makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("Neljäs makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV. makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4. makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("IV makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 makkabilaiskirja 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4 makk 1:1").osis()).toEqual("4Macc.1.1")
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (fi)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (fi)", ->
`
expect(p.parse("Ensimmainen makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("Ensimmäinen makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1. makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I. makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("I makkabilaiskirja 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1 makk 1:1").osis()).toEqual("1Macc.1.1")
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["fi"]
it "should handle ranges (fi)", ->
expect(p.parse("Titus 1:1 – 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1–2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 – 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (fi)", ->
expect(p.parse("Titus 1:1, luku 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUKU 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, luvut 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUVUT 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, luvun 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 LUVUN 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (fi)", ->
expect(p.parse("Exod 1:1 jakeet 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm JAKEET 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 jakeissa 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm JAKEISSA 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (fi)", ->
expect(p.parse("Exod 1:1 ja 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 JA 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 vrt 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 VRT 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (fi)", ->
expect(p.parse("Ps 3 johdannolla, 4:2, 5:johdannolla").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 JOHDANNOLLA, 4:2, 5:JOHDANNOLLA").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (fi)", ->
expect(p.parse("Rev 3ss, 4:2ss").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 SS, 4:2 SS").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (fi)", ->
expect(p.parse("Lev 1 (R1933)").osis_and_translations()).toEqual [["Lev.1", "R1933"]]
expect(p.parse("lev 1 r1933").osis_and_translations()).toEqual [["Lev.1", "R1933"]]
expect(p.parse("Lev 1 (R1992)").osis_and_translations()).toEqual [["Lev.1", "R1992"]]
expect(p.parse("lev 1 r1992").osis_and_translations()).toEqual [["Lev.1", "R1992"]]
it "should handle book ranges (fi)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("Ensimmäinen – PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (fi)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
|
[
{
"context": ": SourceModel\n\n attribute2fieldView:\n key: TextareaFieldView1000\n address: TextareaFieldView1000\n note: ",
"end": 4411,
"score": 0.7583366632461548,
"start": 4390,
"tag": "KEY",
"value": "TextareaFieldView1000"
}
] | app/scripts/views/source-add-widget.coffee | OpenSourceFieldlinguistics/dative | 7 | define [
'./resource-add-widget'
'./textarea-field'
'./select-field'
'./relational-select-field'
'./related-resource-representation'
'./file-select-via-search-field'
'./source-select-via-search-field'
'./source-select-via-search-input'
'./../models/source'
], (ResourceAddWidgetView, TextareaFieldView, SelectFieldView,
RelationalSelectFieldView, RelatedResourceRepresentationView,
FileSelectViaSearchFieldView, SourceSelectViaSearchFieldView,
SourceSelectViaSearchInputView, SourceModel, ResourceAsRowView) ->
# The `crossref` and `crossref_source` attributes are interlinked in Source
# models in a complex way. To choose a `crossref` value, the user must select
# from the existing sources. This valuates two things:
#
# 1. `crossref` as the selected source's `key` value
# 2. `crossref_source` as the selected source (an object)
#
# This fact is what requires us to define
# `CrossrefSourceSelectViaSearchFieldView` and its dependent classes with
# some rather ad hoc methods below.
class MyRelatedResourceRepresentationView extends RelatedResourceRepresentationView
getRelatedResourceId: ->
@context.model.get('crossref_source').id
class CrossrefSourceSelectViaSearchInputView extends SourceSelectViaSearchInputView
# This is the class that is used to display the *selected* resource.
selectedResourceViewClass: MyRelatedResourceRepresentationView
resourceAsString: (resource) -> resource.key
setSelectedToModel: (resourceAsRowView) ->
@model.set 'crossref', resourceAsRowView.model.get('key')
@model.set 'crossref_source', resourceAsRowView.model.attributes
unsetSelectedFromModel: ->
@model.set 'crossref_source', null
@model.set 'crossref', ''
getSelectedResourceView: ->
params =
value: @selectedResourceModel.attributes
class: 'field-display-link dative-tooltip'
resourceAsString: @resourceAsString
valueFormatter: (v) -> v
resourceName: @resourceName
attributeName: @context.attribute
resourceModelClass: @resourceModelClass
resourcesCollectionClass: @resourcesCollectionClass
resourceViewClass: null
model: @getModelForSelectedResourceView()
if @selectedResourceWrapperViewClass
new @selectedResourceWrapperViewClass @selectedResourceViewClass, params
else
new @selectedResourceViewClass params
# If we have a selected value, cause it to be displayed and the search
# interface to not be displayed; if not, do the opposite.
setStateBasedOnSelectedValue: ->
if @context.value
attributes = @context.model.get('crossref_source')
@selectedResourceModel = new @resourceModelClass(attributes)
@searchInterfaceVisible = false
@selectedResourceViewVisible = true
else
@searchInterfaceVisible = true
@selectedResourceViewVisible = false
class CrossrefSourceSelectViaSearchFieldView extends SourceSelectViaSearchFieldView
getInputView: ->
new CrossrefSourceSelectViaSearchInputView @context
class TextareaFieldView255 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 255
super options
class TextareaFieldView1000 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 1000
super options
class TextareaFieldView100 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 100
super options
# A <select>-based field view for the source's (BibTeX) type.
class TypeSelectFieldView extends SelectFieldView
initialize: (options) ->
options.selectValueGetter = (o) -> o
options.selectTextGetter = (o) -> o
options.required = true
super options
# Source Add Widget View
# ----------------------
#
# View for a widget containing inputs and controls for creating a new
# source and updating an existing one.
##############################################################################
# Source Add Widget
##############################################################################
class SourceAddWidgetView extends ResourceAddWidgetView
resourceName: 'source'
resourceModel: SourceModel
attribute2fieldView:
key: TextareaFieldView1000
address: TextareaFieldView1000
note: TextareaFieldView1000
url: TextareaFieldView1000
author: TextareaFieldView255
booktitle: TextareaFieldView255
chapter: TextareaFieldView255
edition: TextareaFieldView255
editor: TextareaFieldView255
howpublished: TextareaFieldView255
institution: TextareaFieldView255
journal: TextareaFieldView255
key_field: TextareaFieldView255
organization: TextareaFieldView255
publisher: TextareaFieldView255
school: TextareaFieldView255
series: TextareaFieldView255
title: TextareaFieldView255
type_field: TextareaFieldView255
month: TextareaFieldView100
number: TextareaFieldView100
pages: TextareaFieldView100
volume: TextareaFieldView100
type: TypeSelectFieldView
file: FileSelectViaSearchFieldView
crossref: CrossrefSourceSelectViaSearchFieldView
primaryAttributes: [
'key'
'type'
'file'
'crossref'
'author'
'editor'
'year'
'journal'
'title'
'booktitle'
'chapter'
'pages'
'publisher'
'school'
'institution'
'note'
]
editableSecondaryAttributes: [
'volume'
'number'
'month'
'series'
'address'
'edition'
'annote'
'howpublished'
'key_field'
'organization'
'type_field'
'url'
'affiliation'
'abstract'
'contents'
'copyright'
'ISBN'
'ISSN'
'keywords'
'language'
'location'
'LCCN'
'mrnumber'
'price'
'size'
]
| 34366 | define [
'./resource-add-widget'
'./textarea-field'
'./select-field'
'./relational-select-field'
'./related-resource-representation'
'./file-select-via-search-field'
'./source-select-via-search-field'
'./source-select-via-search-input'
'./../models/source'
], (ResourceAddWidgetView, TextareaFieldView, SelectFieldView,
RelationalSelectFieldView, RelatedResourceRepresentationView,
FileSelectViaSearchFieldView, SourceSelectViaSearchFieldView,
SourceSelectViaSearchInputView, SourceModel, ResourceAsRowView) ->
# The `crossref` and `crossref_source` attributes are interlinked in Source
# models in a complex way. To choose a `crossref` value, the user must select
# from the existing sources. This valuates two things:
#
# 1. `crossref` as the selected source's `key` value
# 2. `crossref_source` as the selected source (an object)
#
# This fact is what requires us to define
# `CrossrefSourceSelectViaSearchFieldView` and its dependent classes with
# some rather ad hoc methods below.
class MyRelatedResourceRepresentationView extends RelatedResourceRepresentationView
getRelatedResourceId: ->
@context.model.get('crossref_source').id
class CrossrefSourceSelectViaSearchInputView extends SourceSelectViaSearchInputView
# This is the class that is used to display the *selected* resource.
selectedResourceViewClass: MyRelatedResourceRepresentationView
resourceAsString: (resource) -> resource.key
setSelectedToModel: (resourceAsRowView) ->
@model.set 'crossref', resourceAsRowView.model.get('key')
@model.set 'crossref_source', resourceAsRowView.model.attributes
unsetSelectedFromModel: ->
@model.set 'crossref_source', null
@model.set 'crossref', ''
getSelectedResourceView: ->
params =
value: @selectedResourceModel.attributes
class: 'field-display-link dative-tooltip'
resourceAsString: @resourceAsString
valueFormatter: (v) -> v
resourceName: @resourceName
attributeName: @context.attribute
resourceModelClass: @resourceModelClass
resourcesCollectionClass: @resourcesCollectionClass
resourceViewClass: null
model: @getModelForSelectedResourceView()
if @selectedResourceWrapperViewClass
new @selectedResourceWrapperViewClass @selectedResourceViewClass, params
else
new @selectedResourceViewClass params
# If we have a selected value, cause it to be displayed and the search
# interface to not be displayed; if not, do the opposite.
setStateBasedOnSelectedValue: ->
if @context.value
attributes = @context.model.get('crossref_source')
@selectedResourceModel = new @resourceModelClass(attributes)
@searchInterfaceVisible = false
@selectedResourceViewVisible = true
else
@searchInterfaceVisible = true
@selectedResourceViewVisible = false
class CrossrefSourceSelectViaSearchFieldView extends SourceSelectViaSearchFieldView
getInputView: ->
new CrossrefSourceSelectViaSearchInputView @context
class TextareaFieldView255 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 255
super options
class TextareaFieldView1000 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 1000
super options
class TextareaFieldView100 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 100
super options
# A <select>-based field view for the source's (BibTeX) type.
class TypeSelectFieldView extends SelectFieldView
initialize: (options) ->
options.selectValueGetter = (o) -> o
options.selectTextGetter = (o) -> o
options.required = true
super options
# Source Add Widget View
# ----------------------
#
# View for a widget containing inputs and controls for creating a new
# source and updating an existing one.
##############################################################################
# Source Add Widget
##############################################################################
class SourceAddWidgetView extends ResourceAddWidgetView
resourceName: 'source'
resourceModel: SourceModel
attribute2fieldView:
key: <KEY>
address: TextareaFieldView1000
note: TextareaFieldView1000
url: TextareaFieldView1000
author: TextareaFieldView255
booktitle: TextareaFieldView255
chapter: TextareaFieldView255
edition: TextareaFieldView255
editor: TextareaFieldView255
howpublished: TextareaFieldView255
institution: TextareaFieldView255
journal: TextareaFieldView255
key_field: TextareaFieldView255
organization: TextareaFieldView255
publisher: TextareaFieldView255
school: TextareaFieldView255
series: TextareaFieldView255
title: TextareaFieldView255
type_field: TextareaFieldView255
month: TextareaFieldView100
number: TextareaFieldView100
pages: TextareaFieldView100
volume: TextareaFieldView100
type: TypeSelectFieldView
file: FileSelectViaSearchFieldView
crossref: CrossrefSourceSelectViaSearchFieldView
primaryAttributes: [
'key'
'type'
'file'
'crossref'
'author'
'editor'
'year'
'journal'
'title'
'booktitle'
'chapter'
'pages'
'publisher'
'school'
'institution'
'note'
]
editableSecondaryAttributes: [
'volume'
'number'
'month'
'series'
'address'
'edition'
'annote'
'howpublished'
'key_field'
'organization'
'type_field'
'url'
'affiliation'
'abstract'
'contents'
'copyright'
'ISBN'
'ISSN'
'keywords'
'language'
'location'
'LCCN'
'mrnumber'
'price'
'size'
]
| true | define [
'./resource-add-widget'
'./textarea-field'
'./select-field'
'./relational-select-field'
'./related-resource-representation'
'./file-select-via-search-field'
'./source-select-via-search-field'
'./source-select-via-search-input'
'./../models/source'
], (ResourceAddWidgetView, TextareaFieldView, SelectFieldView,
RelationalSelectFieldView, RelatedResourceRepresentationView,
FileSelectViaSearchFieldView, SourceSelectViaSearchFieldView,
SourceSelectViaSearchInputView, SourceModel, ResourceAsRowView) ->
# The `crossref` and `crossref_source` attributes are interlinked in Source
# models in a complex way. To choose a `crossref` value, the user must select
# from the existing sources. This valuates two things:
#
# 1. `crossref` as the selected source's `key` value
# 2. `crossref_source` as the selected source (an object)
#
# This fact is what requires us to define
# `CrossrefSourceSelectViaSearchFieldView` and its dependent classes with
# some rather ad hoc methods below.
class MyRelatedResourceRepresentationView extends RelatedResourceRepresentationView
getRelatedResourceId: ->
@context.model.get('crossref_source').id
class CrossrefSourceSelectViaSearchInputView extends SourceSelectViaSearchInputView
# This is the class that is used to display the *selected* resource.
selectedResourceViewClass: MyRelatedResourceRepresentationView
resourceAsString: (resource) -> resource.key
setSelectedToModel: (resourceAsRowView) ->
@model.set 'crossref', resourceAsRowView.model.get('key')
@model.set 'crossref_source', resourceAsRowView.model.attributes
unsetSelectedFromModel: ->
@model.set 'crossref_source', null
@model.set 'crossref', ''
getSelectedResourceView: ->
params =
value: @selectedResourceModel.attributes
class: 'field-display-link dative-tooltip'
resourceAsString: @resourceAsString
valueFormatter: (v) -> v
resourceName: @resourceName
attributeName: @context.attribute
resourceModelClass: @resourceModelClass
resourcesCollectionClass: @resourcesCollectionClass
resourceViewClass: null
model: @getModelForSelectedResourceView()
if @selectedResourceWrapperViewClass
new @selectedResourceWrapperViewClass @selectedResourceViewClass, params
else
new @selectedResourceViewClass params
# If we have a selected value, cause it to be displayed and the search
# interface to not be displayed; if not, do the opposite.
setStateBasedOnSelectedValue: ->
if @context.value
attributes = @context.model.get('crossref_source')
@selectedResourceModel = new @resourceModelClass(attributes)
@searchInterfaceVisible = false
@selectedResourceViewVisible = true
else
@searchInterfaceVisible = true
@selectedResourceViewVisible = false
class CrossrefSourceSelectViaSearchFieldView extends SourceSelectViaSearchFieldView
getInputView: ->
new CrossrefSourceSelectViaSearchInputView @context
class TextareaFieldView255 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 255
super options
class TextareaFieldView1000 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 1000
super options
class TextareaFieldView100 extends TextareaFieldView
initialize: (options) ->
options.domAttributes =
maxlength: 100
super options
# A <select>-based field view for the source's (BibTeX) type.
class TypeSelectFieldView extends SelectFieldView
initialize: (options) ->
options.selectValueGetter = (o) -> o
options.selectTextGetter = (o) -> o
options.required = true
super options
# Source Add Widget View
# ----------------------
#
# View for a widget containing inputs and controls for creating a new
# source and updating an existing one.
##############################################################################
# Source Add Widget
##############################################################################
class SourceAddWidgetView extends ResourceAddWidgetView
resourceName: 'source'
resourceModel: SourceModel
attribute2fieldView:
key: PI:KEY:<KEY>END_PI
address: TextareaFieldView1000
note: TextareaFieldView1000
url: TextareaFieldView1000
author: TextareaFieldView255
booktitle: TextareaFieldView255
chapter: TextareaFieldView255
edition: TextareaFieldView255
editor: TextareaFieldView255
howpublished: TextareaFieldView255
institution: TextareaFieldView255
journal: TextareaFieldView255
key_field: TextareaFieldView255
organization: TextareaFieldView255
publisher: TextareaFieldView255
school: TextareaFieldView255
series: TextareaFieldView255
title: TextareaFieldView255
type_field: TextareaFieldView255
month: TextareaFieldView100
number: TextareaFieldView100
pages: TextareaFieldView100
volume: TextareaFieldView100
type: TypeSelectFieldView
file: FileSelectViaSearchFieldView
crossref: CrossrefSourceSelectViaSearchFieldView
primaryAttributes: [
'key'
'type'
'file'
'crossref'
'author'
'editor'
'year'
'journal'
'title'
'booktitle'
'chapter'
'pages'
'publisher'
'school'
'institution'
'note'
]
editableSecondaryAttributes: [
'volume'
'number'
'month'
'series'
'address'
'edition'
'annote'
'howpublished'
'key_field'
'organization'
'type_field'
'url'
'affiliation'
'abstract'
'contents'
'copyright'
'ISBN'
'ISSN'
'keywords'
'language'
'location'
'LCCN'
'mrnumber'
'price'
'size'
]
|
[
{
"context": " Breakfast\"\n time: \"9h00\"\n ,\n name: \"Linus Torvalds\"\n photo: \"http://f.cl.ly/items/2A3p1N0C3c0n3",
"end": 1154,
"score": 0.9998884201049805,
"start": 1140,
"tag": "NAME",
"value": "Linus Torvalds"
},
{
"context": " company: \"Linux Found... | docpad.coffee | matehackers/evento | 0 | module.exports =
# These are variables will be accessible via our templates
templateData:
# Conference info
conf:
name: "Conference name"
description: "Conference description"
date: "November 15"
price: "$100"
venue: "Coco Bongo"
address: "Boulevard Kukulcan, 30"
city: "Cancún"
state: "Quintana"
# Site info
site:
theme: "yellow-swan"
url: "http://confboilerplate.com"
googleanalytics: "UA-33656081-1"
# Active sections on the website
# to deactivate comment out with '#'
# you can also change order here and it will reflect on page
sections: [
'about'
'location'
'speakers'
'schedule'
'sponsors'
'partners'
#'contact'
]
# Labels which you can translate to other languages
labels:
about: "About"
location: "Location"
speakers: "Speakers"
schedule: "Schedule"
sponsors: "Sponsors"
partners: "Partners"
contact: "Contact"
# The entire schedule
schedule: [
name: "Check-in / Breakfast"
time: "9h00"
,
name: "Linus Torvalds"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Linux Foundation"
twitter: "linus"
presentation:
title: "Digging into a Linux Kernel"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "10h00"
,
name: "Bill Gates"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Microsoft"
twitter: "billy95"
presentation:
title: "Introducing Windows 12"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "11h00"
,
name: "Lunch"
time: "12h00"
,
name: "Chuck Norris"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Delta Command"
twitter: "littlechuck"
presentation:
title: "How to kill a elephant with one finger"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "13h00"
,
name: "Steve Jobs"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Apple, Inc."
twitter: "stevie"
presentation:
title: "Presenting iPad"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "14h00"
,
name: "Coffee-break"
time: "15h00"
,
name: "Mark Zuckerberg"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Facebook"
twitter: "zuck"
presentation:
title: "Revealing Facebook Secrets"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "16h00"
,
name: "Steve Wozniak"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Apple, Inc."
twitter: "woz"
presentation:
title: "Why do I prefer Android over iPhone"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "17h00"
]
# List of Sponsors
sponsors: [
name: "Eventick"
logo: "http://f.cl.ly/items/1U3p0Z0e2w0I1i3z1X22/Image%202012.09.25%203:00:58%20PM.png"
url: "http://eventick.com.br"
]
# List of Partners
partners: [
name: "BrazilJS"
logo: "http://f.cl.ly/items/2N3i2W0X2f3c2g2Z2N0f/Untitled-1.png"
url: "http://braziljs.org"
]
# Theme path
getTheme: ->
"themes/#{@site.theme}" | 100729 | module.exports =
# These are variables will be accessible via our templates
templateData:
# Conference info
conf:
name: "Conference name"
description: "Conference description"
date: "November 15"
price: "$100"
venue: "Coco Bongo"
address: "Boulevard Kukulcan, 30"
city: "Cancún"
state: "Quintana"
# Site info
site:
theme: "yellow-swan"
url: "http://confboilerplate.com"
googleanalytics: "UA-33656081-1"
# Active sections on the website
# to deactivate comment out with '#'
# you can also change order here and it will reflect on page
sections: [
'about'
'location'
'speakers'
'schedule'
'sponsors'
'partners'
#'contact'
]
# Labels which you can translate to other languages
labels:
about: "About"
location: "Location"
speakers: "Speakers"
schedule: "Schedule"
sponsors: "Sponsors"
partners: "Partners"
contact: "Contact"
# The entire schedule
schedule: [
name: "Check-in / Breakfast"
time: "9h00"
,
name: "<NAME>"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Linux Foundation"
twitter: "linus"
presentation:
title: "Digging into a Linux Kernel"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "10h00"
,
name: "<NAME>"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Microsoft"
twitter: "billy95"
presentation:
title: "Introducing Windows 12"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "11h00"
,
name: "Lunch"
time: "12h00"
,
name: "<NAME>"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Delta Command"
twitter: "littlechuck"
presentation:
title: "How to kill a elephant with one finger"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "13h00"
,
name: "<NAME>"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Apple, Inc."
twitter: "stevie"
presentation:
title: "Presenting iPad"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "14h00"
,
name: "<NAME>"
time: "15h00"
,
name: "<NAME>"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Facebook"
twitter: "zuck"
presentation:
title: "Revealing Facebook Secrets"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "16h00"
,
name: "<NAME>"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Apple, Inc."
twitter: "woz"
presentation:
title: "Why do I prefer Android over iPhone"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "17h00"
]
# List of Sponsors
sponsors: [
name: "<NAME>"
logo: "http://f.cl.ly/items/1U3p0Z0e2w0I1i3z1X22/Image%202012.09.25%203:00:58%20PM.png"
url: "http://eventick.com.br"
]
# List of Partners
partners: [
name: "BrazilJS"
logo: "http://f.cl.ly/items/2N3i2W0X2f3c2g2Z2N0f/Untitled-1.png"
url: "http://braziljs.org"
]
# Theme path
getTheme: ->
"themes/#{@site.theme}" | true | module.exports =
# These are variables will be accessible via our templates
templateData:
# Conference info
conf:
name: "Conference name"
description: "Conference description"
date: "November 15"
price: "$100"
venue: "Coco Bongo"
address: "Boulevard Kukulcan, 30"
city: "Cancún"
state: "Quintana"
# Site info
site:
theme: "yellow-swan"
url: "http://confboilerplate.com"
googleanalytics: "UA-33656081-1"
# Active sections on the website
# to deactivate comment out with '#'
# you can also change order here and it will reflect on page
sections: [
'about'
'location'
'speakers'
'schedule'
'sponsors'
'partners'
#'contact'
]
# Labels which you can translate to other languages
labels:
about: "About"
location: "Location"
speakers: "Speakers"
schedule: "Schedule"
sponsors: "Sponsors"
partners: "Partners"
contact: "Contact"
# The entire schedule
schedule: [
name: "Check-in / Breakfast"
time: "9h00"
,
name: "PI:NAME:<NAME>END_PI"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Linux Foundation"
twitter: "linus"
presentation:
title: "Digging into a Linux Kernel"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "10h00"
,
name: "PI:NAME:<NAME>END_PI"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Microsoft"
twitter: "billy95"
presentation:
title: "Introducing Windows 12"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "11h00"
,
name: "Lunch"
time: "12h00"
,
name: "PI:NAME:<NAME>END_PI"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Delta Command"
twitter: "littlechuck"
presentation:
title: "How to kill a elephant with one finger"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "13h00"
,
name: "PI:NAME:<NAME>END_PI"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Apple, Inc."
twitter: "stevie"
presentation:
title: "Presenting iPad"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "14h00"
,
name: "PI:NAME:<NAME>END_PI"
time: "15h00"
,
name: "PI:NAME:<NAME>END_PI"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Facebook"
twitter: "zuck"
presentation:
title: "Revealing Facebook Secrets"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "16h00"
,
name: "PI:NAME:<NAME>END_PI"
photo: "http://f.cl.ly/items/2A3p1N0C3c0n3N3R1w2B/speaker.jpg"
bio: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
company: "Apple, Inc."
twitter: "woz"
presentation:
title: "Why do I prefer Android over iPhone"
description: "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo"
time: "17h00"
]
# List of Sponsors
sponsors: [
name: "PI:NAME:<NAME>END_PI"
logo: "http://f.cl.ly/items/1U3p0Z0e2w0I1i3z1X22/Image%202012.09.25%203:00:58%20PM.png"
url: "http://eventick.com.br"
]
# List of Partners
partners: [
name: "BrazilJS"
logo: "http://f.cl.ly/items/2N3i2W0X2f3c2g2Z2N0f/Untitled-1.png"
url: "http://braziljs.org"
]
# Theme path
getTheme: ->
"themes/#{@site.theme}" |
[
{
"context": ": 'forgot',\n email: email,\n setPassword: setPassword,\n redirectTo: redirectTo\n intent: 'forg",
"end": 2138,
"score": 0.9964146614074707,
"start": 2127,
"tag": "PASSWORD",
"value": "setPassword"
}
] | src/desktop/apps/home/client/auth_router.coffee | kanaabe/force | 0 | Backbone = require 'backbone'
_ = require 'underscore'
mediator = require '../../../lib/mediator.coffee'
qs = require 'qs'
module.exports = class HomeAuthRouter extends Backbone.Router
routes:
'log_in': 'login'
'sign_up': 'signup'
'forgot': 'forgot'
initialize: ->
@location = window.location
@parsedLocation = qs.parse(window.location.search.replace /^\?/, '')
login: ->
error = @parsedLocation.error
redirectTo = @parsedLocation.redirect_uri or @parsedLocation['redirect-to']
# Handle gravity style account created errors
unless error
error = @parsedLocation.account_created_email
if error
msg = switch error
when 'already-signed-up', 'facebook'
"You've already signed up with your email address. " +
"Log in to link your Facebook account in your settings."
when 'twitter'
"You've already signed up with your email address. " +
"Log in to link your Twitter account in your settings."
when 'account-not-found', 'no-user-access-token', 'no-user'
"We couldn't find your account. Please sign up."
else
error
mediator.trigger 'open:auth',
mode: 'login'
intent: 'login'
signupIntent: 'login'
trigger: 'timed'
triggerSeconds: 0
redirectTo: redirectTo
mediator.trigger 'auth:error', msg
else
mediator.trigger 'open:auth',
mode: 'login'
redirectTo: redirectTo
intent: 'login'
signupIntent: 'login'
trigger: 'timed'
triggerSeconds: 0
signup: ->
redirectTo = @parsedLocation['redirect-to']
mediator.trigger 'open:auth',
mode: 'signup',
redirectTo: if redirectTo then redirectTo else null
intent: 'signup'
signupIntent: 'signup'
trigger: 'timed'
triggerSeconds: 0
forgot: ->
email = @parsedLocation.email
setPassword = @parsedLocation.set_password
redirectTo = @parsedLocation.reset_password_redirect_to
mediator.trigger 'open:auth',
mode: 'forgot',
email: email,
setPassword: setPassword,
redirectTo: redirectTo
intent: 'forgot'
signupIntent: 'forgot'
trigger: 'timed'
triggerSeconds: 0
| 17462 | Backbone = require 'backbone'
_ = require 'underscore'
mediator = require '../../../lib/mediator.coffee'
qs = require 'qs'
module.exports = class HomeAuthRouter extends Backbone.Router
routes:
'log_in': 'login'
'sign_up': 'signup'
'forgot': 'forgot'
initialize: ->
@location = window.location
@parsedLocation = qs.parse(window.location.search.replace /^\?/, '')
login: ->
error = @parsedLocation.error
redirectTo = @parsedLocation.redirect_uri or @parsedLocation['redirect-to']
# Handle gravity style account created errors
unless error
error = @parsedLocation.account_created_email
if error
msg = switch error
when 'already-signed-up', 'facebook'
"You've already signed up with your email address. " +
"Log in to link your Facebook account in your settings."
when 'twitter'
"You've already signed up with your email address. " +
"Log in to link your Twitter account in your settings."
when 'account-not-found', 'no-user-access-token', 'no-user'
"We couldn't find your account. Please sign up."
else
error
mediator.trigger 'open:auth',
mode: 'login'
intent: 'login'
signupIntent: 'login'
trigger: 'timed'
triggerSeconds: 0
redirectTo: redirectTo
mediator.trigger 'auth:error', msg
else
mediator.trigger 'open:auth',
mode: 'login'
redirectTo: redirectTo
intent: 'login'
signupIntent: 'login'
trigger: 'timed'
triggerSeconds: 0
signup: ->
redirectTo = @parsedLocation['redirect-to']
mediator.trigger 'open:auth',
mode: 'signup',
redirectTo: if redirectTo then redirectTo else null
intent: 'signup'
signupIntent: 'signup'
trigger: 'timed'
triggerSeconds: 0
forgot: ->
email = @parsedLocation.email
setPassword = @parsedLocation.set_password
redirectTo = @parsedLocation.reset_password_redirect_to
mediator.trigger 'open:auth',
mode: 'forgot',
email: email,
setPassword: <PASSWORD>,
redirectTo: redirectTo
intent: 'forgot'
signupIntent: 'forgot'
trigger: 'timed'
triggerSeconds: 0
| true | Backbone = require 'backbone'
_ = require 'underscore'
mediator = require '../../../lib/mediator.coffee'
qs = require 'qs'
module.exports = class HomeAuthRouter extends Backbone.Router
routes:
'log_in': 'login'
'sign_up': 'signup'
'forgot': 'forgot'
initialize: ->
@location = window.location
@parsedLocation = qs.parse(window.location.search.replace /^\?/, '')
login: ->
error = @parsedLocation.error
redirectTo = @parsedLocation.redirect_uri or @parsedLocation['redirect-to']
# Handle gravity style account created errors
unless error
error = @parsedLocation.account_created_email
if error
msg = switch error
when 'already-signed-up', 'facebook'
"You've already signed up with your email address. " +
"Log in to link your Facebook account in your settings."
when 'twitter'
"You've already signed up with your email address. " +
"Log in to link your Twitter account in your settings."
when 'account-not-found', 'no-user-access-token', 'no-user'
"We couldn't find your account. Please sign up."
else
error
mediator.trigger 'open:auth',
mode: 'login'
intent: 'login'
signupIntent: 'login'
trigger: 'timed'
triggerSeconds: 0
redirectTo: redirectTo
mediator.trigger 'auth:error', msg
else
mediator.trigger 'open:auth',
mode: 'login'
redirectTo: redirectTo
intent: 'login'
signupIntent: 'login'
trigger: 'timed'
triggerSeconds: 0
signup: ->
redirectTo = @parsedLocation['redirect-to']
mediator.trigger 'open:auth',
mode: 'signup',
redirectTo: if redirectTo then redirectTo else null
intent: 'signup'
signupIntent: 'signup'
trigger: 'timed'
triggerSeconds: 0
forgot: ->
email = @parsedLocation.email
setPassword = @parsedLocation.set_password
redirectTo = @parsedLocation.reset_password_redirect_to
mediator.trigger 'open:auth',
mode: 'forgot',
email: email,
setPassword: PI:PASSWORD:<PASSWORD>END_PI,
redirectTo: redirectTo
intent: 'forgot'
signupIntent: 'forgot'
trigger: 'timed'
triggerSeconds: 0
|
[
{
"context": "ocalhost'\n port: 80\n user: 'test'\n password: 'test'\n\n @jiraCli =",
"end": 357,
"score": 0.8890500664710999,
"start": 353,
"tag": "USERNAME",
"value": "test"
},
{
"context": "0\n user: 'test'\n password: ... | spec/jira-cli.spec.coffee | tebriel/jira-cli | 48 | fs = require 'fs'
path = require 'path'
color = require('ansi-color').set
jira = require '../src/jira-cli.coffee'
# These seem to be a bit silly, gets me more familiar with spies though, so I
# guess that's a good thing.
describe "JiraCli", ->
beforeEach ->
config =
host: 'localhost'
port: 80
user: 'test'
password: 'test'
@jiraCli = new jira.JiraHelper config
@cb = jasmine.createSpy 'callback'
spyOn @jiraCli.pp, 'prettyPrintIssue'
spyOn @jiraCli.log, 'error'
spyOn @jiraCli.log, 'log'
spyOn @jiraCli, 'dieWithFire'
it "Gets the requested issue", ->
spyOn @jiraCli.jira, 'findIssue'
@jiraCli.getIssue 1, false
expect(@jiraCli.jira.findIssue)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.findIssue.mostRecentCall.args[1] null, "response"
expect(@jiraCli.pp.prettyPrintIssue)
.toHaveBeenCalledWith "response", false
@jiraCli.jira.findIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error finding issue: error"
it "Gets the issue types", ->
spyOn @jiraCli.jira, 'listIssueTypes'
@jiraCli.getIssueTypes @cb
@jiraCli.jira.listIssueTypes.mostRecentCall.args[0] null, "response"
expect(@cb).toHaveBeenCalledWith "response"
@jiraCli.jira.listIssueTypes.mostRecentCall.args[0] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error listing issueTypes: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Adds a new issue", ->
issue = @jiraCli.createIssueObject 'project', 'summary', 'issueType',
'description'
spyOn @jiraCli.jira, 'addNewIssue'
@jiraCli.addIssue 'summary', 'description', 'issueType', 'project'
expect(@jiraCli.jira.addNewIssue)
.toHaveBeenCalledWith issue, jasmine.any Function
@jiraCli.jira.addNewIssue.mostRecentCall.args[1] null,
key: 'response'
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue response has been
#{color('created', 'green')}"
@jiraCli.jira.addNewIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error creating issue: \"error\""
it "Deletes an Issue", ->
spyOn @jiraCli.jira, 'deleteIssue'
@jiraCli.deleteIssue 1
expect(@jiraCli.jira.deleteIssue)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.deleteIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error deleting issue: error"
@jiraCli.jira.deleteIssue.mostRecentCall.args[1] null, "success"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue 1 was #{color('deleted', 'green')}"
it "Adds a worklog", ->
worklog =
comment: 'comment'
timeSpent: 'timeSpent'
spyOn @jiraCli.jira, 'addWorklog'
@jiraCli.addWorklog 1, 'comment', 'timeSpent', true
expect(@jiraCli.jira.addWorklog)
.toHaveBeenCalledWith 1, worklog, jasmine.any Function
@jiraCli.jira.addWorklog.mostRecentCall.args[2] null, "response"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Worklog was #{color("added", "green")}"
@jiraCli.jira.addWorklog.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error adding worklog: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Adds a worklog, but doesn't quit", ->
spyOn @jiraCli.jira, 'addWorklog'
@jiraCli.addWorklog 1, 'comment', 'timeSpent', false
@jiraCli.jira.addWorklog.mostRecentCall.args[2] null, "response"
expect(@jiraCli.dieWithFire).not.toHaveBeenCalled()
it "Lists transitions", ->
spyOn @jiraCli.jira, 'listTransitions'
@jiraCli.listTransitions 1, @cb
expect(@jiraCli.jira.listTransitions)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.listTransitions.mostRecentCall.args[1] null, "transitions"
expect(@cb).toHaveBeenCalledWith "transitions"
@jiraCli.jira.listTransitions.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error getting transitions: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Transitions an Issue", ->
issueUpdate =
transition:
id: 'transition'
spyOn @jiraCli.jira, 'transitionIssue'
@jiraCli.transitionIssue 1, 'transition'
expect(@jiraCli.jira.transitionIssue)
.toHaveBeenCalledWith 1, issueUpdate, jasmine.any Function
@jiraCli.jira.transitionIssue.mostRecentCall.args[2] null, "response"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue 1 was #{color "transitioned", 'green'}"
@jiraCli.jira.transitionIssue.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error transitioning issue: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Searches Jira", ->
fields = ["summary", "status", "assignee"]
spyOn @jiraCli.jira, 'searchJira'
@jiraCli.searchJira 'query', true
expect(@jiraCli.jira.searchJira)
.toHaveBeenCalledWith 'query', fields, jasmine.any Function
expect(@jiraCli.jira.searchJira.mostRecentCall.args[1])
.toEqual fields
@jiraCli.jira.searchJira.mostRecentCall.args[2] null, issues: [1]
expect(@jiraCli.pp.prettyPrintIssue).toHaveBeenCalledWith 1, true
@jiraCli.jira.searchJira.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error retreiving issues list: error"
it "Gets the user's OPEN issues", ->
jql = "assignee = \"test\" AND resolution = unresolved"
spyOn @jiraCli, 'searchJira'
@jiraCli.getMyIssues true, true
expect(@jiraCli.searchJira).toHaveBeenCalledWith jql, true
it "Gets ALL the user's issues", ->
jql = "assignee = \"test\""
spyOn @jiraCli, 'searchJira'
@jiraCli.getMyIssues false, true
expect(@jiraCli.searchJira).toHaveBeenCalledWith jql, true
it "Gets the user's projects", ->
spyOn @jiraCli.jira, 'listProjects'
@jiraCli.getMyProjects @cb
@jiraCli.jira.listProjects.mostRecentCall.args[0] null, "list"
expect(@cb).toHaveBeenCalledWith "list"
@jiraCli.jira.listProjects.mostRecentCall.args[0] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error listing projects: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
| 130188 | fs = require 'fs'
path = require 'path'
color = require('ansi-color').set
jira = require '../src/jira-cli.coffee'
# These seem to be a bit silly, gets me more familiar with spies though, so I
# guess that's a good thing.
describe "JiraCli", ->
beforeEach ->
config =
host: 'localhost'
port: 80
user: 'test'
password: '<PASSWORD>'
@jiraCli = new jira.JiraHelper config
@cb = jasmine.createSpy 'callback'
spyOn @jiraCli.pp, 'prettyPrintIssue'
spyOn @jiraCli.log, 'error'
spyOn @jiraCli.log, 'log'
spyOn @jiraCli, 'dieWithFire'
it "Gets the requested issue", ->
spyOn @jiraCli.jira, 'findIssue'
@jiraCli.getIssue 1, false
expect(@jiraCli.jira.findIssue)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.findIssue.mostRecentCall.args[1] null, "response"
expect(@jiraCli.pp.prettyPrintIssue)
.toHaveBeenCalledWith "response", false
@jiraCli.jira.findIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error finding issue: error"
it "Gets the issue types", ->
spyOn @jiraCli.jira, 'listIssueTypes'
@jiraCli.getIssueTypes @cb
@jiraCli.jira.listIssueTypes.mostRecentCall.args[0] null, "response"
expect(@cb).toHaveBeenCalledWith "response"
@jiraCli.jira.listIssueTypes.mostRecentCall.args[0] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error listing issueTypes: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Adds a new issue", ->
issue = @jiraCli.createIssueObject 'project', 'summary', 'issueType',
'description'
spyOn @jiraCli.jira, 'addNewIssue'
@jiraCli.addIssue 'summary', 'description', 'issueType', 'project'
expect(@jiraCli.jira.addNewIssue)
.toHaveBeenCalledWith issue, jasmine.any Function
@jiraCli.jira.addNewIssue.mostRecentCall.args[1] null,
key: 'response'
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue response has been
#{color('created', 'green')}"
@jiraCli.jira.addNewIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error creating issue: \"error\""
it "Deletes an Issue", ->
spyOn @jiraCli.jira, 'deleteIssue'
@jiraCli.deleteIssue 1
expect(@jiraCli.jira.deleteIssue)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.deleteIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error deleting issue: error"
@jiraCli.jira.deleteIssue.mostRecentCall.args[1] null, "success"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue 1 was #{color('deleted', 'green')}"
it "Adds a worklog", ->
worklog =
comment: 'comment'
timeSpent: 'timeSpent'
spyOn @jiraCli.jira, 'addWorklog'
@jiraCli.addWorklog 1, 'comment', 'timeSpent', true
expect(@jiraCli.jira.addWorklog)
.toHaveBeenCalledWith 1, worklog, jasmine.any Function
@jiraCli.jira.addWorklog.mostRecentCall.args[2] null, "response"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Worklog was #{color("added", "green")}"
@jiraCli.jira.addWorklog.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error adding worklog: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Adds a worklog, but doesn't quit", ->
spyOn @jiraCli.jira, 'addWorklog'
@jiraCli.addWorklog 1, 'comment', 'timeSpent', false
@jiraCli.jira.addWorklog.mostRecentCall.args[2] null, "response"
expect(@jiraCli.dieWithFire).not.toHaveBeenCalled()
it "Lists transitions", ->
spyOn @jiraCli.jira, 'listTransitions'
@jiraCli.listTransitions 1, @cb
expect(@jiraCli.jira.listTransitions)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.listTransitions.mostRecentCall.args[1] null, "transitions"
expect(@cb).toHaveBeenCalledWith "transitions"
@jiraCli.jira.listTransitions.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error getting transitions: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Transitions an Issue", ->
issueUpdate =
transition:
id: 'transition'
spyOn @jiraCli.jira, 'transitionIssue'
@jiraCli.transitionIssue 1, 'transition'
expect(@jiraCli.jira.transitionIssue)
.toHaveBeenCalledWith 1, issueUpdate, jasmine.any Function
@jiraCli.jira.transitionIssue.mostRecentCall.args[2] null, "response"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue 1 was #{color "transitioned", 'green'}"
@jiraCli.jira.transitionIssue.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error transitioning issue: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Searches Jira", ->
fields = ["summary", "status", "assignee"]
spyOn @jiraCli.jira, 'searchJira'
@jiraCli.searchJira 'query', true
expect(@jiraCli.jira.searchJira)
.toHaveBeenCalledWith 'query', fields, jasmine.any Function
expect(@jiraCli.jira.searchJira.mostRecentCall.args[1])
.toEqual fields
@jiraCli.jira.searchJira.mostRecentCall.args[2] null, issues: [1]
expect(@jiraCli.pp.prettyPrintIssue).toHaveBeenCalledWith 1, true
@jiraCli.jira.searchJira.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error retreiving issues list: error"
it "Gets the user's OPEN issues", ->
jql = "assignee = \"test\" AND resolution = unresolved"
spyOn @jiraCli, 'searchJira'
@jiraCli.getMyIssues true, true
expect(@jiraCli.searchJira).toHaveBeenCalledWith jql, true
it "Gets ALL the user's issues", ->
jql = "assignee = \"test\""
spyOn @jiraCli, 'searchJira'
@jiraCli.getMyIssues false, true
expect(@jiraCli.searchJira).toHaveBeenCalledWith jql, true
it "Gets the user's projects", ->
spyOn @jiraCli.jira, 'listProjects'
@jiraCli.getMyProjects @cb
@jiraCli.jira.listProjects.mostRecentCall.args[0] null, "list"
expect(@cb).toHaveBeenCalledWith "list"
@jiraCli.jira.listProjects.mostRecentCall.args[0] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error listing projects: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
| true | fs = require 'fs'
path = require 'path'
color = require('ansi-color').set
jira = require '../src/jira-cli.coffee'
# These seem to be a bit silly, gets me more familiar with spies though, so I
# guess that's a good thing.
describe "JiraCli", ->
beforeEach ->
config =
host: 'localhost'
port: 80
user: 'test'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
@jiraCli = new jira.JiraHelper config
@cb = jasmine.createSpy 'callback'
spyOn @jiraCli.pp, 'prettyPrintIssue'
spyOn @jiraCli.log, 'error'
spyOn @jiraCli.log, 'log'
spyOn @jiraCli, 'dieWithFire'
it "Gets the requested issue", ->
spyOn @jiraCli.jira, 'findIssue'
@jiraCli.getIssue 1, false
expect(@jiraCli.jira.findIssue)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.findIssue.mostRecentCall.args[1] null, "response"
expect(@jiraCli.pp.prettyPrintIssue)
.toHaveBeenCalledWith "response", false
@jiraCli.jira.findIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error finding issue: error"
it "Gets the issue types", ->
spyOn @jiraCli.jira, 'listIssueTypes'
@jiraCli.getIssueTypes @cb
@jiraCli.jira.listIssueTypes.mostRecentCall.args[0] null, "response"
expect(@cb).toHaveBeenCalledWith "response"
@jiraCli.jira.listIssueTypes.mostRecentCall.args[0] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error listing issueTypes: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Adds a new issue", ->
issue = @jiraCli.createIssueObject 'project', 'summary', 'issueType',
'description'
spyOn @jiraCli.jira, 'addNewIssue'
@jiraCli.addIssue 'summary', 'description', 'issueType', 'project'
expect(@jiraCli.jira.addNewIssue)
.toHaveBeenCalledWith issue, jasmine.any Function
@jiraCli.jira.addNewIssue.mostRecentCall.args[1] null,
key: 'response'
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue response has been
#{color('created', 'green')}"
@jiraCli.jira.addNewIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error creating issue: \"error\""
it "Deletes an Issue", ->
spyOn @jiraCli.jira, 'deleteIssue'
@jiraCli.deleteIssue 1
expect(@jiraCli.jira.deleteIssue)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.deleteIssue.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error deleting issue: error"
@jiraCli.jira.deleteIssue.mostRecentCall.args[1] null, "success"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue 1 was #{color('deleted', 'green')}"
it "Adds a worklog", ->
worklog =
comment: 'comment'
timeSpent: 'timeSpent'
spyOn @jiraCli.jira, 'addWorklog'
@jiraCli.addWorklog 1, 'comment', 'timeSpent', true
expect(@jiraCli.jira.addWorklog)
.toHaveBeenCalledWith 1, worklog, jasmine.any Function
@jiraCli.jira.addWorklog.mostRecentCall.args[2] null, "response"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Worklog was #{color("added", "green")}"
@jiraCli.jira.addWorklog.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error adding worklog: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Adds a worklog, but doesn't quit", ->
spyOn @jiraCli.jira, 'addWorklog'
@jiraCli.addWorklog 1, 'comment', 'timeSpent', false
@jiraCli.jira.addWorklog.mostRecentCall.args[2] null, "response"
expect(@jiraCli.dieWithFire).not.toHaveBeenCalled()
it "Lists transitions", ->
spyOn @jiraCli.jira, 'listTransitions'
@jiraCli.listTransitions 1, @cb
expect(@jiraCli.jira.listTransitions)
.toHaveBeenCalledWith 1, jasmine.any Function
@jiraCli.jira.listTransitions.mostRecentCall.args[1] null, "transitions"
expect(@cb).toHaveBeenCalledWith "transitions"
@jiraCli.jira.listTransitions.mostRecentCall.args[1] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error getting transitions: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Transitions an Issue", ->
issueUpdate =
transition:
id: 'transition'
spyOn @jiraCli.jira, 'transitionIssue'
@jiraCli.transitionIssue 1, 'transition'
expect(@jiraCli.jira.transitionIssue)
.toHaveBeenCalledWith 1, issueUpdate, jasmine.any Function
@jiraCli.jira.transitionIssue.mostRecentCall.args[2] null, "response"
expect(@jiraCli.log.log)
.toHaveBeenCalledWith "Issue 1 was #{color "transitioned", 'green'}"
@jiraCli.jira.transitionIssue.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error transitioning issue: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
it "Searches Jira", ->
fields = ["summary", "status", "assignee"]
spyOn @jiraCli.jira, 'searchJira'
@jiraCli.searchJira 'query', true
expect(@jiraCli.jira.searchJira)
.toHaveBeenCalledWith 'query', fields, jasmine.any Function
expect(@jiraCli.jira.searchJira.mostRecentCall.args[1])
.toEqual fields
@jiraCli.jira.searchJira.mostRecentCall.args[2] null, issues: [1]
expect(@jiraCli.pp.prettyPrintIssue).toHaveBeenCalledWith 1, true
@jiraCli.jira.searchJira.mostRecentCall.args[2] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error retreiving issues list: error"
it "Gets the user's OPEN issues", ->
jql = "assignee = \"test\" AND resolution = unresolved"
spyOn @jiraCli, 'searchJira'
@jiraCli.getMyIssues true, true
expect(@jiraCli.searchJira).toHaveBeenCalledWith jql, true
it "Gets ALL the user's issues", ->
jql = "assignee = \"test\""
spyOn @jiraCli, 'searchJira'
@jiraCli.getMyIssues false, true
expect(@jiraCli.searchJira).toHaveBeenCalledWith jql, true
it "Gets the user's projects", ->
spyOn @jiraCli.jira, 'listProjects'
@jiraCli.getMyProjects @cb
@jiraCli.jira.listProjects.mostRecentCall.args[0] null, "list"
expect(@cb).toHaveBeenCalledWith "list"
@jiraCli.jira.listProjects.mostRecentCall.args[0] "error"
expect(@jiraCli.log.error)
.toHaveBeenCalledWith "Error listing projects: error"
expect(@jiraCli.dieWithFire).toHaveBeenCalled()
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.999911904335022,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/mp-history/content.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { Event } from './event'
import { Game } from './game'
import * as React from 'react'
import { button, div, h3 } from 'react-dom-factories'
import { Spinner } from 'spinner'
el = React.createElement
export class Content extends React.PureComponent
constructor: (props) ->
super props
@eventsRef = React.createRef()
getSnapshotBeforeUpdate: (prevProps, prevState) =>
snapshot = {}
if prevProps.events?.length > 0 && @props.events?.length > 0
# events are prepended, use previous first entry as reference
if prevProps.events[0].id > @props.events[0].id
snapshot.reference =
# firstChild to avoid calculating based on ever-changing padding
@eventsRef.current.children[0].firstChild
# events are appended, use previous last entry as reference
else if _.last(prevProps.events).id < _.last(@props.events).id
snapshot.reference =
_.last(@eventsRef.current.children).firstChild
if snapshot.reference?
snapshot.referenceTop = snapshot.reference.getBoundingClientRect().top
if osu.bottomPageDistance() < 10 && prevProps.isAutoloading && @props.isAutoloading
snapshot.scrollToLastEvent = true
snapshot
componentDidUpdate: (prevProps, prevState, snapshot) =>
if snapshot.scrollToLastEvent
$(window).stop().scrollTo @eventsRef.current.scrollHeight, 500
else if snapshot.reference?
currentScrollReferenceTop = snapshot.reference.getBoundingClientRect().top
currentDocumentScrollTop = window.pageYOffset
targetDocumentScrollTop = currentDocumentScrollTop + currentScrollReferenceTop - snapshot.referenceTop
window.scrollTo window.pageXOffset, targetDocumentScrollTop
render: =>
div className: 'osu-page osu-page--mp-history',
h3 null, @props.match.name
if @props.hasPrevious
div className: 'mp-history-content',
if @props.loadingPrevious
el Spinner
else
button
className: 'mp-history-content__show-more'
type: 'button'
onClick: @props.loadPrevious
osu.trans 'common.buttons.show_more'
div
className: 'mp-history-events'
ref: @eventsRef
for event, i in @props.events
if event.detail.type == 'other'
continue if !event.game? || (!event.game.end_time? && event.game.id != @props.currentGameId)
div
className: 'mp-history-events__game'
key: event.id
el Game,
event: event
teamScores: @teamScores i
users: @props.users
else
div
className: 'mp-history-events__event'
key: event.id
el Event,
event: event
users: @props.users
key: event.id
if @props.hasNext
div className: 'mp-history-content',
if @props.isAutoloading
div className: 'mp-history-content__spinner',
div
className: 'mp-history-content__spinner-label'
osu.trans 'multiplayer.match.in_progress_spinner_label'
el Spinner
else if @props.loadingNext
el Spinner
else
button
className: 'mp-history-content__show-more'
type: 'button'
onClick: @props.loadNext
osu.trans 'common.buttons.show_more'
teamScores: (eventIndex) =>
game = @props.events[eventIndex].game
return if !game?
@scoresCache ?= {}
if !@scoresCache[eventIndex]?
scores =
blue: 0
red: 0
return scores if !game.end_time?
for score in game.scores
continue if !score.multiplayer.pass
scores[score.multiplayer.team] += score.score
@scoresCache[eventIndex] = scores
@scoresCache[eventIndex]
| 203562 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { Event } from './event'
import { Game } from './game'
import * as React from 'react'
import { button, div, h3 } from 'react-dom-factories'
import { Spinner } from 'spinner'
el = React.createElement
export class Content extends React.PureComponent
constructor: (props) ->
super props
@eventsRef = React.createRef()
getSnapshotBeforeUpdate: (prevProps, prevState) =>
snapshot = {}
if prevProps.events?.length > 0 && @props.events?.length > 0
# events are prepended, use previous first entry as reference
if prevProps.events[0].id > @props.events[0].id
snapshot.reference =
# firstChild to avoid calculating based on ever-changing padding
@eventsRef.current.children[0].firstChild
# events are appended, use previous last entry as reference
else if _.last(prevProps.events).id < _.last(@props.events).id
snapshot.reference =
_.last(@eventsRef.current.children).firstChild
if snapshot.reference?
snapshot.referenceTop = snapshot.reference.getBoundingClientRect().top
if osu.bottomPageDistance() < 10 && prevProps.isAutoloading && @props.isAutoloading
snapshot.scrollToLastEvent = true
snapshot
componentDidUpdate: (prevProps, prevState, snapshot) =>
if snapshot.scrollToLastEvent
$(window).stop().scrollTo @eventsRef.current.scrollHeight, 500
else if snapshot.reference?
currentScrollReferenceTop = snapshot.reference.getBoundingClientRect().top
currentDocumentScrollTop = window.pageYOffset
targetDocumentScrollTop = currentDocumentScrollTop + currentScrollReferenceTop - snapshot.referenceTop
window.scrollTo window.pageXOffset, targetDocumentScrollTop
render: =>
div className: 'osu-page osu-page--mp-history',
h3 null, @props.match.name
if @props.hasPrevious
div className: 'mp-history-content',
if @props.loadingPrevious
el Spinner
else
button
className: 'mp-history-content__show-more'
type: 'button'
onClick: @props.loadPrevious
osu.trans 'common.buttons.show_more'
div
className: 'mp-history-events'
ref: @eventsRef
for event, i in @props.events
if event.detail.type == 'other'
continue if !event.game? || (!event.game.end_time? && event.game.id != @props.currentGameId)
div
className: 'mp-history-events__game'
key: event.id
el Game,
event: event
teamScores: @teamScores i
users: @props.users
else
div
className: 'mp-history-events__event'
key: event.id
el Event,
event: event
users: @props.users
key: event.id
if @props.hasNext
div className: 'mp-history-content',
if @props.isAutoloading
div className: 'mp-history-content__spinner',
div
className: 'mp-history-content__spinner-label'
osu.trans 'multiplayer.match.in_progress_spinner_label'
el Spinner
else if @props.loadingNext
el Spinner
else
button
className: 'mp-history-content__show-more'
type: 'button'
onClick: @props.loadNext
osu.trans 'common.buttons.show_more'
teamScores: (eventIndex) =>
game = @props.events[eventIndex].game
return if !game?
@scoresCache ?= {}
if !@scoresCache[eventIndex]?
scores =
blue: 0
red: 0
return scores if !game.end_time?
for score in game.scores
continue if !score.multiplayer.pass
scores[score.multiplayer.team] += score.score
@scoresCache[eventIndex] = scores
@scoresCache[eventIndex]
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { Event } from './event'
import { Game } from './game'
import * as React from 'react'
import { button, div, h3 } from 'react-dom-factories'
import { Spinner } from 'spinner'
el = React.createElement
export class Content extends React.PureComponent
constructor: (props) ->
super props
@eventsRef = React.createRef()
getSnapshotBeforeUpdate: (prevProps, prevState) =>
snapshot = {}
if prevProps.events?.length > 0 && @props.events?.length > 0
# events are prepended, use previous first entry as reference
if prevProps.events[0].id > @props.events[0].id
snapshot.reference =
# firstChild to avoid calculating based on ever-changing padding
@eventsRef.current.children[0].firstChild
# events are appended, use previous last entry as reference
else if _.last(prevProps.events).id < _.last(@props.events).id
snapshot.reference =
_.last(@eventsRef.current.children).firstChild
if snapshot.reference?
snapshot.referenceTop = snapshot.reference.getBoundingClientRect().top
if osu.bottomPageDistance() < 10 && prevProps.isAutoloading && @props.isAutoloading
snapshot.scrollToLastEvent = true
snapshot
componentDidUpdate: (prevProps, prevState, snapshot) =>
if snapshot.scrollToLastEvent
$(window).stop().scrollTo @eventsRef.current.scrollHeight, 500
else if snapshot.reference?
currentScrollReferenceTop = snapshot.reference.getBoundingClientRect().top
currentDocumentScrollTop = window.pageYOffset
targetDocumentScrollTop = currentDocumentScrollTop + currentScrollReferenceTop - snapshot.referenceTop
window.scrollTo window.pageXOffset, targetDocumentScrollTop
render: =>
div className: 'osu-page osu-page--mp-history',
h3 null, @props.match.name
if @props.hasPrevious
div className: 'mp-history-content',
if @props.loadingPrevious
el Spinner
else
button
className: 'mp-history-content__show-more'
type: 'button'
onClick: @props.loadPrevious
osu.trans 'common.buttons.show_more'
div
className: 'mp-history-events'
ref: @eventsRef
for event, i in @props.events
if event.detail.type == 'other'
continue if !event.game? || (!event.game.end_time? && event.game.id != @props.currentGameId)
div
className: 'mp-history-events__game'
key: event.id
el Game,
event: event
teamScores: @teamScores i
users: @props.users
else
div
className: 'mp-history-events__event'
key: event.id
el Event,
event: event
users: @props.users
key: event.id
if @props.hasNext
div className: 'mp-history-content',
if @props.isAutoloading
div className: 'mp-history-content__spinner',
div
className: 'mp-history-content__spinner-label'
osu.trans 'multiplayer.match.in_progress_spinner_label'
el Spinner
else if @props.loadingNext
el Spinner
else
button
className: 'mp-history-content__show-more'
type: 'button'
onClick: @props.loadNext
osu.trans 'common.buttons.show_more'
teamScores: (eventIndex) =>
game = @props.events[eventIndex].game
return if !game?
@scoresCache ?= {}
if !@scoresCache[eventIndex]?
scores =
blue: 0
red: 0
return scores if !game.end_time?
for score in game.scores
continue if !score.multiplayer.pass
scores[score.multiplayer.team] += score.score
@scoresCache[eventIndex] = scores
@scoresCache[eventIndex]
|
[
{
"context": "name = 'coffee-engine'\n $scope.json =\n name: 'Mario'\n template: 'cinema'\n\n $scope.templates = [\n ",
"end": 1247,
"score": 0.9594364762306213,
"start": 1242,
"tag": "NAME",
"value": "Mario"
}
] | tools/game-maker/controller.coffee | mess110/coffee-engine | 1 | app.controller 'GameMakerController', ['$scope', '$mdToast', '$location', '$window', '$routeParams', ($scope, $mdToast, $location, $window, $routeParams) ->
Hodler.get().engine.removeDom()
$scope.workspace.lastOpenedProject = $routeParams.id
$scope.saveWorkspace()
$scope.scenes = []
$scope.ui.project.name = $routeParams.id
$scope.ui.newFilename = ''
workspaceQuery.getScenes($scope.workspace, $scope.ui.project.name, (err, scenes) ->
$scope.scenes = scenes
$scope.$apply()
)
$scope.newScene = ->
return unless $scope.ui.newFilename?
return if $scope.ui.newFilename == ''
newSceneObj = fileSystem.newScene($scope.workspace, $scope.ui.newFilename)
scenePath = fileSystem.getScenePath($scope.workspace, newSceneObj.id)
$scope.editScene(scenePath)
$scope.editScene = (scenePath) ->
$scope.workspace.lastOpenedScene = scenePath
$scope.saveWorkspace()
$scope.goTo('cinematic-editor')
$scope.getSceneName = (scene) ->
scene.split('/').last().split('.').first()
]
app.controller 'NewGameController', ['$scope', '$mdToast', '$location', ($scope, $mdToast, $location) ->
Hodler.get().engine.removeDom()
$scope.ui.project.name = 'coffee-engine'
$scope.json =
name: 'Mario'
template: 'cinema'
$scope.templates = [
{ name: 'basic', hint: 'Minimal template. Does not have build tools.' }
{ name: 'cinema', hint: 'Cinematic template for storytelling.' }
{ name: 'project', hint: 'Project template with build tools.' }
{ name: 'project-mesh', hint: 'Project template for mesh networks.' }
{ name: 'project-multiplayer', hint: 'Project template with networking and build tools.' }
{ name: 'project-multiplayer-menu', hint: 'More complete project template' }
{ name: 'vr-project', hint: 'VR project with multiplayer and build tools.' }
]
$scope.newGame = ->
success = fileSystem.newGame($scope.json)
$scope.goTo("game-maker/#{$scope.json.name}") if success
]
| 156058 | app.controller 'GameMakerController', ['$scope', '$mdToast', '$location', '$window', '$routeParams', ($scope, $mdToast, $location, $window, $routeParams) ->
Hodler.get().engine.removeDom()
$scope.workspace.lastOpenedProject = $routeParams.id
$scope.saveWorkspace()
$scope.scenes = []
$scope.ui.project.name = $routeParams.id
$scope.ui.newFilename = ''
workspaceQuery.getScenes($scope.workspace, $scope.ui.project.name, (err, scenes) ->
$scope.scenes = scenes
$scope.$apply()
)
$scope.newScene = ->
return unless $scope.ui.newFilename?
return if $scope.ui.newFilename == ''
newSceneObj = fileSystem.newScene($scope.workspace, $scope.ui.newFilename)
scenePath = fileSystem.getScenePath($scope.workspace, newSceneObj.id)
$scope.editScene(scenePath)
$scope.editScene = (scenePath) ->
$scope.workspace.lastOpenedScene = scenePath
$scope.saveWorkspace()
$scope.goTo('cinematic-editor')
$scope.getSceneName = (scene) ->
scene.split('/').last().split('.').first()
]
app.controller 'NewGameController', ['$scope', '$mdToast', '$location', ($scope, $mdToast, $location) ->
Hodler.get().engine.removeDom()
$scope.ui.project.name = 'coffee-engine'
$scope.json =
name: '<NAME>'
template: 'cinema'
$scope.templates = [
{ name: 'basic', hint: 'Minimal template. Does not have build tools.' }
{ name: 'cinema', hint: 'Cinematic template for storytelling.' }
{ name: 'project', hint: 'Project template with build tools.' }
{ name: 'project-mesh', hint: 'Project template for mesh networks.' }
{ name: 'project-multiplayer', hint: 'Project template with networking and build tools.' }
{ name: 'project-multiplayer-menu', hint: 'More complete project template' }
{ name: 'vr-project', hint: 'VR project with multiplayer and build tools.' }
]
$scope.newGame = ->
success = fileSystem.newGame($scope.json)
$scope.goTo("game-maker/#{$scope.json.name}") if success
]
| true | app.controller 'GameMakerController', ['$scope', '$mdToast', '$location', '$window', '$routeParams', ($scope, $mdToast, $location, $window, $routeParams) ->
Hodler.get().engine.removeDom()
$scope.workspace.lastOpenedProject = $routeParams.id
$scope.saveWorkspace()
$scope.scenes = []
$scope.ui.project.name = $routeParams.id
$scope.ui.newFilename = ''
workspaceQuery.getScenes($scope.workspace, $scope.ui.project.name, (err, scenes) ->
$scope.scenes = scenes
$scope.$apply()
)
$scope.newScene = ->
return unless $scope.ui.newFilename?
return if $scope.ui.newFilename == ''
newSceneObj = fileSystem.newScene($scope.workspace, $scope.ui.newFilename)
scenePath = fileSystem.getScenePath($scope.workspace, newSceneObj.id)
$scope.editScene(scenePath)
$scope.editScene = (scenePath) ->
$scope.workspace.lastOpenedScene = scenePath
$scope.saveWorkspace()
$scope.goTo('cinematic-editor')
$scope.getSceneName = (scene) ->
scene.split('/').last().split('.').first()
]
app.controller 'NewGameController', ['$scope', '$mdToast', '$location', ($scope, $mdToast, $location) ->
Hodler.get().engine.removeDom()
$scope.ui.project.name = 'coffee-engine'
$scope.json =
name: 'PI:NAME:<NAME>END_PI'
template: 'cinema'
$scope.templates = [
{ name: 'basic', hint: 'Minimal template. Does not have build tools.' }
{ name: 'cinema', hint: 'Cinematic template for storytelling.' }
{ name: 'project', hint: 'Project template with build tools.' }
{ name: 'project-mesh', hint: 'Project template for mesh networks.' }
{ name: 'project-multiplayer', hint: 'Project template with networking and build tools.' }
{ name: 'project-multiplayer-menu', hint: 'More complete project template' }
{ name: 'vr-project', hint: 'VR project with multiplayer and build tools.' }
]
$scope.newGame = ->
success = fileSystem.newGame($scope.json)
$scope.goTo("game-maker/#{$scope.json.name}") if success
]
|
[
{
"context": "ed from Sublime Completions\n# Converter created by Renato \"Hii\" Garcia\n# Repo: https://github.com/Renato-Ga",
"end": 105,
"score": 0.9998212456703186,
"start": 99,
"tag": "NAME",
"value": "Renato"
},
{
"context": "ublime Completions\n# Converter created by Renato \"... | snippets/streamer.cson | Wuzi/language-pawn | 4 | # Incognito's Streamer snippets for Atom converted from Sublime Completions
# Converter created by Renato "Hii" Garcia
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'Streamer_GetTickRate':
'prefix': 'Streamer_GetTickRate'
'body': 'Streamer_GetTickRate()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetTickRate':
'prefix': 'Streamer_SetTickRate'
'body': 'Streamer_SetTickRate(${1:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetMaxItems':
'prefix': 'Streamer_GetMaxItems'
'body': 'Streamer_GetMaxItems(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetMaxItems':
'prefix': 'Streamer_SetMaxItems'
'body': 'Streamer_SetMaxItems(${1:type}, ${2:items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetVisibleItems':
'prefix': 'Streamer_GetVisibleItems'
'body': 'Streamer_GetVisibleItems(${1:type}, ${2:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetVisibleItems':
'prefix': 'Streamer_SetVisibleItems'
'body': 'Streamer_SetVisibleItems(${1:type}, ${2:items}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetRadiusMultiplier':
'prefix': 'Streamer_GetRadiusMultiplier'
'body': 'Streamer_GetRadiusMultiplier(${1:type}, ${2:Float:multiplier}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetRadiusMultiplier':
'prefix': 'Streamer_SetRadiusMultiplier'
'body': 'Streamer_SetRadiusMultiplier(${1:type}, ${2:Float:multiplier}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetTypePriority':
'prefix': 'Streamer_GetTypePriority'
'body': 'Streamer_GetTypePriority(${1:types[]}, ${2:maxtypes = sizeof types})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetTypePriority':
'prefix': 'Streamer_SetTypePriority'
'body': 'Streamer_SetTypePriority(${1:const types[]}, ${2:maxtypes = sizeof types})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetCellDistance':
'prefix': 'Streamer_GetCellDistance'
'body': 'Streamer_GetCellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetCellDistance':
'prefix': 'Streamer_SetCellDistance'
'body': 'Streamer_SetCellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetCellSize':
'prefix': 'Streamer_GetCellSize'
'body': 'Streamer_GetCellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetCellSize':
'prefix': 'Streamer_SetCellSize'
'body': 'Streamer_SetCellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleErrorCallback':
'prefix': 'Streamer_ToggleErrorCallback'
'body': 'Streamer_ToggleErrorCallback(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleErrorCallback':
'prefix': 'Streamer_IsToggleErrorCallback'
'body': 'Streamer_IsToggleErrorCallback()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ProcessActiveItems':
'prefix': 'Streamer_ProcessActiveItems'
'body': 'Streamer_ProcessActiveItems()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleIdleUpdate':
'prefix': 'Streamer_ToggleIdleUpdate'
'body': 'Streamer_ToggleIdleUpdate(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleIdleUpdate':
'prefix': 'Streamer_IsToggleIdleUpdate'
'body': 'Streamer_IsToggleIdleUpdate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleCameraUpdate':
'prefix': 'Streamer_ToggleCameraUpdate'
'body': 'Streamer_ToggleCameraUpdate(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleCameraUpdate':
'prefix': 'Streamer_IsToggleCameraUpdate'
'body': 'Streamer_IsToggleCameraUpdate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemUpdate':
'prefix': 'Streamer_ToggleItemUpdate'
'body': 'Streamer_ToggleItemUpdate(${1:playerid}, ${2:type}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemUpdate':
'prefix': 'Streamer_IsToggleItemUpdate'
'body': 'Streamer_IsToggleItemUpdate(${1:playerid}, ${2:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_Update':
'prefix': 'Streamer_Update'
'body': 'Streamer_Update(${1:playerid}, ${2:type = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_UpdateEx':
'prefix': 'Streamer_UpdateEx'
'body': 'Streamer_UpdateEx(${1:playerid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:type = -1}, ${8:compensatedtime = -1}, ${9:freezeplayer = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetFloatData':
'prefix': 'Streamer_GetFloatData'
'body': 'Streamer_GetFloatData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:Float:result})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetFloatData':
'prefix': 'Streamer_SetFloatData'
'body': 'Streamer_SetFloatData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:Float:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetIntData':
'prefix': 'Streamer_GetIntData'
'body': 'Streamer_GetIntData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetIntData':
'prefix': 'Streamer_SetIntData'
'body': 'Streamer_SetIntData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetArrayData':
'prefix': 'Streamer_GetArrayData'
'body': 'Streamer_GetArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:dest[]}, ${5:maxdest = sizeof dest})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetArrayData':
'prefix': 'Streamer_SetArrayData'
'body': 'Streamer_SetArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:const src[]}, ${5:maxsrc = sizeof src})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsInArrayData':
'prefix': 'Streamer_IsInArrayData'
'body': 'Streamer_IsInArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_AppendArrayData':
'prefix': 'Streamer_AppendArrayData'
'body': 'Streamer_AppendArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_RemoveArrayData':
'prefix': 'Streamer_RemoveArrayData'
'body': 'Streamer_RemoveArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetUpperBound':
'prefix': 'Streamer_GetUpperBound'
'body': 'Streamer_GetUpperBound(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetDistanceToItem':
'prefix': 'Streamer_GetDistanceToItem'
'body': 'Streamer_GetDistanceToItem(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:STREAMER_ALL_TAGS:id}, ${6:Float:distance}, ${7:dimensions = 3})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemStatic':
'prefix': 'Streamer_ToggleItemStatic'
'body': 'Streamer_ToggleItemStatic(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemStatic':
'prefix': 'Streamer_IsToggleItemStatic'
'body': 'Streamer_IsToggleItemStatic(${1:type}, ${2:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemInternalID':
'prefix': 'Streamer_GetItemInternalID'
'body': 'Streamer_GetItemInternalID(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:streamerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemStreamerID':
'prefix': 'Streamer_GetItemStreamerID'
'body': 'Streamer_GetItemStreamerID(${1:playerid}, ${2:type}, ${3:internalid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsItemVisible':
'prefix': 'Streamer_IsItemVisible'
'body': 'Streamer_IsItemVisible(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_DestroyAllVisibleItems':
'prefix': 'Streamer_DestroyAllVisibleItems'
'body': 'Streamer_DestroyAllVisibleItems(${1:playerid}, ${2:type}, ${3:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CountVisibleItems':
'prefix': 'Streamer_CountVisibleItems'
'body': 'Streamer_CountVisibleItems(${1:playerid}, ${2:type}, ${3:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_DestroyAllItems':
'prefix': 'Streamer_DestroyAllItems'
'body': 'Streamer_DestroyAllItems(${1:type}, ${2:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CountItems':
'prefix': 'Streamer_CountItems'
'body': 'Streamer_CountItems(${1:type}, ${2:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicObject':
'prefix': 'CreateDynamicObject'
'body': 'CreateDynamicObject(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:rx}, ${6:Float:ry}, ${7:Float:rz}, ${8:worldid = -1}, ${9:interiorid = -1}, ${10:playerid = -1}, ${11:Float:streamdistance = STREAMER_OBJECT_SD}, ${12:Float:drawdistance = STREAMER_OBJECT_DD}, ${13:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${14:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicObject':
'prefix': 'DestroyDynamicObject'
'body': 'DestroyDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicObject':
'prefix': 'IsValidDynamicObject'
'body': 'IsValidDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectPos':
'prefix': 'SetDynamicObjectPos'
'body': 'SetDynamicObjectPos(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectPos':
'prefix': 'GetDynamicObjectPos'
'body': 'GetDynamicObjectPos(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectRot':
'prefix': 'SetDynamicObjectRot'
'body': 'SetDynamicObjectRot(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:rx}, ${3:Float:ry}, ${4:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectRot':
'prefix': 'GetDynamicObjectRot'
'body': 'GetDynamicObjectRot(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:rx}, ${3:Float:ry}, ${4:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectNoCameraCol':
'prefix': 'SetDynamicObjectNoCameraCol'
'body': 'SetDynamicObjectNoCameraCol(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectNoCameraCol':
'prefix': 'GetDynamicObjectNoCameraCol'
'body': 'GetDynamicObjectNoCameraCol(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'MoveDynamicObject':
'prefix': 'MoveDynamicObject'
'body': 'MoveDynamicObject(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:speed}, ${6:Float:rx = -1000.0}, ${7:Float:ry = -1000.0}, ${8:Float:rz = -1000.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'StopDynamicObject':
'prefix': 'StopDynamicObject'
'body': 'StopDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMoving':
'prefix': 'IsDynamicObjectMoving'
'body': 'IsDynamicObjectMoving(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachCameraToDynamicObject':
'prefix': 'AttachCameraToDynamicObject'
'body': 'AttachCameraToDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToObject':
'prefix': 'AttachDynamicObjectToObject'
'body': 'AttachDynamicObjectToObject(${1:STREAMER_TAG_OBJECT:objectid}, ${2:attachtoid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz}, ${9:syncrotation = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToPlayer':
'prefix': 'AttachDynamicObjectToPlayer'
'body': 'AttachDynamicObjectToPlayer(${1:STREAMER_TAG_OBJECT:objectid}, ${2:playerid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToVehicle':
'prefix': 'AttachDynamicObjectToVehicle'
'body': 'AttachDynamicObjectToVehicle(${1:STREAMER_TAG_OBJECT:objectid}, ${2:vehicleid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'EditDynamicObject':
'prefix': 'EditDynamicObject'
'body': 'EditDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMaterialUsed':
'prefix': 'IsDynamicObjectMaterialUsed'
'body': 'IsDynamicObjectMaterialUsed(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectMaterial':
'prefix': 'GetDynamicObjectMaterial'
'body': 'GetDynamicObjectMaterial(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:modelid}, ${4:txdname[]}, ${5:texturename[]}, ${6:materialcolor}, ${7:maxtxdname = sizeof txdname}, ${8:maxtexturename = sizeof texturename})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectMaterial':
'prefix': 'SetDynamicObjectMaterial'
'body': 'SetDynamicObjectMaterial(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:modelid}, ${4:const txdname[]}, ${5:const texturename[]}, ${6:materialcolor = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMaterialTextUsed':
'prefix': 'IsDynamicObjectMaterialTextUsed'
'body': 'IsDynamicObjectMaterialTextUsed(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectMaterialText':
'prefix': 'GetDynamicObjectMaterialText'
'body': 'GetDynamicObjectMaterialText(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:text[]}, ${4:materialsize}, ${5:fontface[]}, ${6:fontsize}, ${7:bold}, ${8:fontcolor}, ${9:backcolor}, ${10:textalignment}, ${11:maxtext = sizeof text}, ${12:maxfontface = sizeof fontface})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectMaterialText':
'prefix': 'SetDynamicObjectMaterialText'
'body': 'SetDynamicObjectMaterialText(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:const text[]}, ${4:materialsize = OBJECT_MATERIAL_SIZE_256x128}, ${5:const fontface[] = \"Arial\"}, ${6:fontsize = 24}, ${7:bold = 1}, ${8:fontcolor = 0xFFFFFFFF}, ${9:backcolor = 0}, ${10:textalignment = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPickup':
'prefix': 'CreateDynamicPickup'
'body': 'CreateDynamicPickup(${1:modelid}, ${2:type}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:Float:streamdistance = STREAMER_PICKUP_SD}, ${10:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${11:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicPickup':
'prefix': 'DestroyDynamicPickup'
'body': 'DestroyDynamicPickup(${1:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicPickup':
'prefix': 'IsValidDynamicPickup'
'body': 'IsValidDynamicPickup(${1:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCP':
'prefix': 'CreateDynamicCP'
'body': 'CreateDynamicCP(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:Float:streamdistance = STREAMER_CP_SD}, ${9:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicCP':
'prefix': 'DestroyDynamicCP'
'body': 'DestroyDynamicCP(${1:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicCP':
'prefix': 'IsValidDynamicCP'
'body': 'IsValidDynamicCP(${1:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicCP':
'prefix': 'TogglePlayerDynamicCP'
'body': 'TogglePlayerDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicCPs':
'prefix': 'TogglePlayerAllDynamicCPs'
'body': 'TogglePlayerAllDynamicCPs(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicCP':
'prefix': 'IsPlayerInDynamicCP'
'body': 'IsPlayerInDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerVisibleDynamicCP':
'prefix': 'GetPlayerVisibleDynamicCP'
'body': 'GetPlayerVisibleDynamicCP(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRaceCP':
'prefix': 'CreateDynamicRaceCP'
'body': 'CreateDynamicRaceCP(${1:type}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:nextx}, ${6:Float:nexty}, ${7:Float:nextz}, ${8:Float:size}, ${9:worldid = -1}, ${10:interiorid = -1}, ${11:playerid = -1}, ${12:Float:streamdistance = STREAMER_RACE_CP_SD}, ${13:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${14:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicRaceCP':
'prefix': 'DestroyDynamicRaceCP'
'body': 'DestroyDynamicRaceCP(${1:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicRaceCP':
'prefix': 'IsValidDynamicRaceCP'
'body': 'IsValidDynamicRaceCP(${1:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicRaceCP':
'prefix': 'TogglePlayerDynamicRaceCP'
'body': 'TogglePlayerDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicRaceCPs':
'prefix': 'TogglePlayerAllDynamicRaceCPs'
'body': 'TogglePlayerAllDynamicRaceCPs(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicRaceCP':
'prefix': 'IsPlayerInDynamicRaceCP'
'body': 'IsPlayerInDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerVisibleDynamicRaceCP':
'prefix': 'GetPlayerVisibleDynamicRaceCP'
'body': 'GetPlayerVisibleDynamicRaceCP(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicMapIcon':
'prefix': 'CreateDynamicMapIcon'
'body': 'CreateDynamicMapIcon(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:color}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:Float:streamdistance = STREAMER_MAP_ICON_SD}, ${10:style = MAPICON_LOCAL}, ${11:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${12:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicMapIcon':
'prefix': 'DestroyDynamicMapIcon'
'body': 'DestroyDynamicMapIcon(${1:STREAMER_TAG_MAP_ICON:iconid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicMapIcon':
'prefix': 'IsValidDynamicMapIcon'
'body': 'IsValidDynamicMapIcon(${1:STREAMER_TAG_MAP_ICON:iconid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamic3DTextLabel':
'prefix': 'CreateDynamic3DTextLabel'
'body': 'CreateDynamic3DTextLabel(${1:const text[]}, ${2:color}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:drawdistance}, ${7:attachedplayer = INVALID_PLAYER_ID}, ${8:attachedvehicle = INVALID_VEHICLE_ID}, ${9:testlos = 0}, ${10:worldid = -1}, ${11:interiorid = -1}, ${12:playerid = -1}, ${13:Float:streamdistance = STREAMER_3D_TEXT_LABEL_SD}, ${14:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${15:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamic3DTextLabel':
'prefix': 'DestroyDynamic3DTextLabel'
'body': 'DestroyDynamic3DTextLabel(${1:STREAMER_TAG_3D_TEXT_LABEL:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamic3DTextLabel':
'prefix': 'IsValidDynamic3DTextLabel'
'body': 'IsValidDynamic3DTextLabel(${1:STREAMER_TAG_3D_TEXT_LABEL:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamic3DTextLabelText':
'prefix': 'GetDynamic3DTextLabelText'
'body': 'GetDynamic3DTextLabelText(${1:STREAMER_TAG_3D_TEXT_LABEL:id}, ${2:text[]}, ${3:maxtext = sizeof text})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'UpdateDynamic3DTextLabelText':
'prefix': 'UpdateDynamic3DTextLabelText'
'body': 'UpdateDynamic3DTextLabelText(${1:STREAMER_TAG_3D_TEXT_LABEL:id}, ${2:color}, ${3:const text[]})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCircle':
'prefix': 'CreateDynamicCircle'
'body': 'CreateDynamicCircle(${1:Float:x}, ${2:Float:y}, ${3:Float:size}, ${4:worldid = -1}, ${5:interiorid = -1}, ${6:playerid = -1}, ${7:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCylinder':
'prefix': 'CreateDynamicCylinder'
'body': 'CreateDynamicCylinder(${1:Float:x}, ${2:Float:y}, ${3:Float:minz}, ${4:Float:maxz}, ${5:Float:size}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicSphere':
'prefix': 'CreateDynamicSphere'
'body': 'CreateDynamicSphere(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRectangle':
'prefix': 'CreateDynamicRectangle'
'body': 'CreateDynamicRectangle(${1:Float:minx}, ${2:Float:miny}, ${3:Float:maxx}, ${4:Float:maxy}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCuboid':
'prefix': 'CreateDynamicCuboid'
'body': 'CreateDynamicCuboid(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worldid = -1}, ${8:interiorid = -1}, ${9:playerid = -1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCube':
'prefix': 'CreateDynamicCube'
'body': 'CreateDynamicCube(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worldid = -1}, ${8:interiorid = -1}, ${9:playerid = -1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPolygon':
'prefix': 'CreateDynamicPolygon'
'body': 'CreateDynamicPolygon(${1:Float:points[]}, ${2:Float:minz = -FLOAT_INFINITY}, ${3:Float:maxz = FLOAT_INFINITY}, ${4:maxpoints = sizeof points}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicArea':
'prefix': 'DestroyDynamicArea'
'body': 'DestroyDynamicArea(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicArea':
'prefix': 'IsValidDynamicArea'
'body': 'IsValidDynamicArea(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicPolygonPoints':
'prefix': 'GetDynamicPolygonPoints'
'body': 'GetDynamicPolygonPoints(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:points[]}, ${3:maxpoints = sizeof points})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicPolygonNumberPoints':
'prefix': 'GetDynamicPolygonNumberPoints'
'body': 'GetDynamicPolygonNumberPoints(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicArea':
'prefix': 'TogglePlayerDynamicArea'
'body': 'TogglePlayerDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicAreas':
'prefix': 'TogglePlayerAllDynamicAreas'
'body': 'TogglePlayerAllDynamicAreas(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicArea':
'prefix': 'IsPlayerInDynamicArea'
'body': 'IsPlayerInDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid}, ${3:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInAnyDynamicArea':
'prefix': 'IsPlayerInAnyDynamicArea'
'body': 'IsPlayerInAnyDynamicArea(${1:playerid}, ${2:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsAnyPlayerInDynamicArea':
'prefix': 'IsAnyPlayerInDynamicArea'
'body': 'IsAnyPlayerInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsAnyPlayerInAnyDynamicArea':
'prefix': 'IsAnyPlayerInAnyDynamicArea'
'body': 'IsAnyPlayerInAnyDynamicArea(${1:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerDynamicAreas':
'prefix': 'GetPlayerDynamicAreas'
'body': 'GetPlayerDynamicAreas(${1:playerid}, ${2:STREAMER_TAG_AREA:areas[]}, ${3:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerNumberDynamicAreas':
'prefix': 'GetPlayerNumberDynamicAreas'
'body': 'GetPlayerNumberDynamicAreas(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPointInDynamicArea':
'prefix': 'IsPointInDynamicArea'
'body': 'IsPointInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPointInAnyDynamicArea':
'prefix': 'IsPointInAnyDynamicArea'
'body': 'IsPointInAnyDynamicArea(${1:Float:x}, ${2:Float:y}, ${3:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreasForPoint':
'prefix': 'GetDynamicAreasForPoint'
'body': 'GetDynamicAreasForPoint(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:STREAMER_TAG_AREA:areas[]}, ${5:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetNumberDynamicAreasForPoint':
'prefix': 'GetNumberDynamicAreasForPoint'
'body': 'GetNumberDynamicAreasForPoint(${1:Float:x}, ${2:Float:y}, ${3:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToObject':
'prefix': 'AttachDynamicAreaToObject'
'body': 'AttachDynamicAreaToObject(${1:STREAMER_TAG_AREA:areaid}, ${2:STREAMER_TAG_OBJECT_ALT objectid}, ${3:type = STREAMER_OBJECT_TYPE_DYNAMIC}, ${4:playerid = INVALID_PLAYER_ID}, ${5:Float:offsetx = 0.0}, ${6:Float:offsety = 0.0}, ${7:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToPlayer':
'prefix': 'AttachDynamicAreaToPlayer'
'body': 'AttachDynamicAreaToPlayer(${1:STREAMER_TAG_AREA:areaid}, ${2:playerid}, ${3:Float:offsetx = 0.0}, ${4:Float:offsety = 0.0}, ${5:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToVehicle':
'prefix': 'AttachDynamicAreaToVehicle'
'body': 'AttachDynamicAreaToVehicle(${1:STREAMER_TAG_AREA:areaid}, ${2:vehicleid}, ${3:Float:offsetx = 0.0}, ${4:Float:offsety = 0.0}, ${5:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicObjectEx':
'prefix': 'CreateDynamicObjectEx'
'body': 'CreateDynamicObjectEx(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:rx}, ${6:Float:ry}, ${7:Float:rz}, ${8:Float:streamdistance = STREAMER_OBJECT_SD}, ${9:Float:drawdistance = STREAMER_OBJECT_DD}, ${10:worlds[] = { -1 }}, ${11:interiors[] = { -1 }}, ${12:players[] = { -1 }}, ${13:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${14:priority = 0}, ${15:maxworlds = sizeof worlds}, ${16:maxinteriors = sizeof interiors}, ${17:maxplayers = sizeof players}, ${18:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPickupEx':
'prefix': 'CreateDynamicPickupEx'
'body': 'CreateDynamicPickupEx(${1:modelid}, ${2:type}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:streamdistance = STREAMER_PICKUP_SD}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${11:priority = 0}, ${12:maxworlds = sizeof worlds}, ${13:maxinteriors = sizeof interiors}, ${14:maxplayers = sizeof players}, ${15:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCPEx':
'prefix': 'CreateDynamicCPEx'
'body': 'CreateDynamicCPEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:Float:streamdistance = STREAMER_CP_SD}, ${6:worlds[] = { -1 }}, ${7:interiors[] = { -1 }}, ${8:players[] = { -1 }}, ${9:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players}, ${14:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRaceCPEx':
'prefix': 'CreateDynamicRaceCPEx'
'body': 'CreateDynamicRaceCPEx(${1:type}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:nextx}, ${6:Float:nexty}, ${7:Float:nextz}, ${8:Float:size}, ${9:Float:streamdistance = STREAMER_RACE_CP_SD}, ${10:worlds[] = { -1 }}, ${11:interiors[] = { -1 }}, ${12:players[] = { -1 }}, ${13:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${14:priority = 0}, ${15:maxworlds = sizeof worlds}, ${16:maxinteriors = sizeof interiors}, ${17:maxplayers = sizeof players}, ${18:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicMapIconEx':
'prefix': 'CreateDynamicMapIconEx'
'body': 'CreateDynamicMapIconEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:color}, ${6:style = MAPICON_LOCAL}, ${7:Float:streamdistance = STREAMER_MAP_ICON_SD}, ${8:worlds[] = { -1 }}, ${9:interiors[] = { -1 }}, ${10:players[] = { -1 }}, ${11:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${12:priority = 0}, ${13:maxworlds = sizeof worlds}, ${14:maxinteriors = sizeof interiors}, ${15:maxplayers = sizeof players}, ${16:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamic3DTextLabelEx':
'prefix': 'CreateDynamic3DTextLabelEx'
'body': 'CreateDynamic3DTextLabelEx(${1:const text[]}, ${2:color}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:drawdistance}, ${7:attachedplayer = INVALID_PLAYER_ID}, ${8:attachedvehicle = INVALID_VEHICLE_ID}, ${9:testlos = 0}, ${10:Float:streamdistance = STREAMER_3D_TEXT_LABEL_SD}, ${11:worlds[] = { -1 }}, ${12:interiors[] = { -1 }}, ${13:players[] = { -1 }}, ${14:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${15:priority = 0}, ${16:maxworlds = sizeof worlds}, ${17:maxinteriors = sizeof interiors}, ${18:maxplayers = sizeof players}, ${19:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCircleEx':
'prefix': 'CreateDynamicCircleEx'
'body': 'CreateDynamicCircleEx(${1:Float:x}, ${2:Float:y}, ${3:Float:size}, ${4:worlds[] = { -1 }}, ${5:interiors[] = { -1 }}, ${6:players[] = { -1 }}, ${7:priority = 0}, ${8:maxworlds = sizeof worlds}, ${9:maxinteriors = sizeof interiors}, ${10:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCylinderEx':
'prefix': 'CreateDynamicCylinderEx'
'body': 'CreateDynamicCylinderEx(${1:Float:x}, ${2:Float:y}, ${3:Float:minz}, ${4:Float:maxz}, ${5:Float:size}, ${6:worlds[] = { -1 }}, ${7:interiors[] = { -1 }}, ${8:players[] = { -1 }}, ${9:priority = 0}, ${10:maxworlds = sizeof worlds}, ${11:maxinteriors = sizeof interiors}, ${12:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicSphereEx':
'prefix': 'CreateDynamicSphereEx'
'body': 'CreateDynamicSphereEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRectangleEx':
'prefix': 'CreateDynamicRectangleEx'
'body': 'CreateDynamicRectangleEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:maxx}, ${4:Float:maxy}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCuboidEx':
'prefix': 'CreateDynamicCuboidEx'
'body': 'CreateDynamicCuboidEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCubeEx':
'prefix': 'CreateDynamicCubeEx'
'body': 'CreateDynamicCubeEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPolygonEx':
'prefix': 'CreateDynamicPolygonEx'
'body': 'CreateDynamicPolygonEx(${1:Float:points[]}, ${2:Float:minz = -FLOAT_INFINITY}, ${3:Float:maxz = FLOAT_INFINITY}, ${4:maxpoints = sizeof points}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_TickRate':
'prefix': 'Streamer_TickRate'
'body': 'Streamer_TickRate(${1:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_MaxItems':
'prefix': 'Streamer_MaxItems'
'body': 'Streamer_MaxItems(${1:type}, ${2:items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_VisibleItems':
'prefix': 'Streamer_VisibleItems'
'body': 'Streamer_VisibleItems(${1:type}, ${2:items}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CellDistance':
'prefix': 'Streamer_CellDistance'
'body': 'Streamer_CellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CellSize':
'prefix': 'Streamer_CellSize'
'body': 'Streamer_CellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CallbackHook':
'prefix': 'Streamer_CallbackHook'
'body': 'Streamer_CallbackHook(${1:callback}, ${2:...})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicObjects':
'prefix': 'DestroyAllDynamicObjects'
'body': 'DestroyAllDynamicObjects()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicObjects':
'prefix': 'CountDynamicObjects'
'body': 'CountDynamicObjects()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicPickups':
'prefix': 'DestroyAllDynamicPickups'
'body': 'DestroyAllDynamicPickups()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicPickups':
'prefix': 'CountDynamicPickups'
'body': 'CountDynamicPickups()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicCPs':
'prefix': 'DestroyAllDynamicCPs'
'body': 'DestroyAllDynamicCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicCPs':
'prefix': 'CountDynamicCPs'
'body': 'CountDynamicCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicRaceCPs':
'prefix': 'DestroyAllDynamicRaceCPs'
'body': 'DestroyAllDynamicRaceCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicRaceCPs':
'prefix': 'CountDynamicRaceCPs'
'body': 'CountDynamicRaceCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicMapIcons':
'prefix': 'DestroyAllDynamicMapIcons'
'body': 'DestroyAllDynamicMapIcons()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicMapIcons':
'prefix': 'CountDynamicMapIcons'
'body': 'CountDynamicMapIcons()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamic3DTextLabels':
'prefix': 'DestroyAllDynamic3DTextLabels'
'body': 'DestroyAllDynamic3DTextLabels()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamic3DTextLabels':
'prefix': 'CountDynamic3DTextLabels'
'body': 'CountDynamic3DTextLabels()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicAreas':
'prefix': 'DestroyAllDynamicAreas'
'body': 'DestroyAllDynamicAreas()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicAreas':
'prefix': 'CountDynamicAreas'
'body': 'CountDynamicAreas()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleChunkStream':
'prefix': 'Streamer_IsToggleChunkStream'
'body': 'Streamer_IsToggleChunkStream()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetLastUpdateTime':
'prefix': 'Streamer_GetLastUpdateTime'
'body': 'Streamer_GetLastUpdateTime(${1:&Float:time})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetChunkSize':
'prefix': 'Streamer_GetChunkSize'
'body': 'Streamer_GetChunkSize(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetPlayerTickRate':
'prefix': 'Streamer_GetPlayerTickRate'
'body': 'Streamer_GetPlayerTickRate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicActor':
'prefix': 'DestroyDynamicActor'
'body': 'DestroyDynamicActor(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicActor':
'prefix': 'IsValidDynamicActor'
'body': 'IsValidDynamicActor(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorVirtualWorld':
'prefix': 'GetDynamicActorVirtualWorld'
'body': 'GetDynamicActorVirtualWorld(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ClearDynamicActorAnimations':
'prefix': 'ClearDynamicActorAnimations'
'body': 'ClearDynamicActorAnimations(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicActorInvulnerable':
'prefix': 'IsDynamicActorInvulnerable'
'body': 'IsDynamicActorInvulnerable(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerTargetDynamicActor':
'prefix': 'GetPlayerTargetDynamicActor'
'body': 'GetPlayerTargetDynamicActor(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerCameraTargetDynActor':
'prefix': 'GetPlayerCameraTargetDynActor'
'body': 'GetPlayerCameraTargetDynActor(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsToggleDynAreaSpectateMode':
'prefix': 'IsToggleDynAreaSpectateMode'
'body': 'IsToggleDynAreaSpectateMode(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerCameraTargetDynObject':
'prefix': 'GetPlayerCameraTargetDynObject'
'body': 'GetPlayerCameraTargetDynObject(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleChunkStream':
'prefix': 'Streamer_ToggleChunkStream'
'body': 'Streamer_ToggleChunkStream(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreaType':
'prefix': 'GetDynamicAreaType'
'body': 'GetDynamicAreaType(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_AmxUnloadDestroyItems':
'prefix': 'Streamer_AmxUnloadDestroyItems'
'body': 'Streamer_AmxUnloadDestroyItems(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemInvAreas':
'prefix': 'Streamer_IsToggleItemInvAreas'
'body': 'Streamer_IsToggleItemInvAreas(${1:type}, ${2:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetChunkTickRate':
'prefix': 'Streamer_GetChunkTickRate'
'body': 'Streamer_GetChunkTickRate(${1:type}, ${2:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetChunkSize':
'prefix': 'Streamer_SetChunkSize'
'body': 'Streamer_SetChunkSize(${1:type}, ${2:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetPlayerTickRate':
'prefix': 'Streamer_SetPlayerTickRate'
'body': 'Streamer_SetPlayerTickRate(${1:playerid}, ${2:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicActorStreamedIn':
'prefix': 'IsDynamicActorStreamedIn'
'body': 'IsDynamicActorStreamedIn(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorVirtualWorld':
'prefix': 'SetDynamicActorVirtualWorld'
'body': 'SetDynamicActorVirtualWorld(${1:STREAMER_TAG_ACTOR:actorid}, ${2:vworld})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorFacingAngle':
'prefix': 'GetDynamicActorFacingAngle'
'body': 'GetDynamicActorFacingAngle(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:ang})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorFacingAngle':
'prefix': 'SetDynamicActorFacingAngle'
'body': 'SetDynamicActorFacingAngle(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:ang})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorHealth':
'prefix': 'GetDynamicActorHealth'
'body': 'GetDynamicActorHealth(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:health})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorHealth':
'prefix': 'SetDynamicActorHealth'
'body': 'SetDynamicActorHealth(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:health})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorInvulnerable':
'prefix': 'SetDynamicActorInvulnerable'
'body': 'SetDynamicActorInvulnerable(${1:STREAMER_TAG_ACTOR:actorid}, ${2:invulnerable = true})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ToggleDynAreaSpectateMode':
'prefix': 'ToggleDynAreaSpectateMode'
'body': 'ToggleDynAreaSpectateMode(${1:STREAMER_TAG_AREA:areaid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemInvAreas':
'prefix': 'Streamer_ToggleItemInvAreas'
'body': 'Streamer_ToggleItemInvAreas(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItem':
'prefix': 'Streamer_IsToggleItem'
'body': 'Streamer_IsToggleItem(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetChunkTickRate':
'prefix': 'Streamer_SetChunkTickRate'
'body': 'Streamer_SetChunkTickRate(${1:type}, ${2:rate}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemCallbacks':
'prefix': 'Streamer_ToggleItemCallbacks'
'body': 'Streamer_ToggleItemCallbacks(${1:type}, ${2:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetArrayDataLength':
'prefix': 'Streamer_GetArrayDataLength'
'body': 'Streamer_GetArrayDataLength(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItem':
'prefix': 'Streamer_ToggleItem'
'body': 'Streamer_ToggleItem(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id}, ${4:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorPos':
'prefix': 'GetDynamicActorPos'
'body': 'GetDynamicActorPos(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:x}, ${3:&Float:y}, ${4:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorPos':
'prefix': 'SetDynamicActorPos'
'body': 'SetDynamicActorPos(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetAllVisibleItems':
'prefix': 'Streamer_GetAllVisibleItems'
'body': 'Streamer_GetAllVisibleItems(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:items[]}, ${4:maxitems = sizeof items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleAllItems':
'prefix': 'Streamer_ToggleAllItems'
'body': 'Streamer_ToggleAllItems(${1:playerid}, ${2:type}, ${3:toggle}, ${4:const exceptions[] = { -1 }}, ${5:maxexceptions = sizeof exceptions})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemOffset':
'prefix': 'Streamer_GetItemOffset'
'body': 'Streamer_GetItemOffset(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:&Float:x}, ${4:&Float:y}, ${5:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetItemOffset':
'prefix': 'Streamer_SetItemOffset'
'body': 'Streamer_SetItemOffset(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemPos':
'prefix': 'Streamer_GetItemPos'
'body': 'Streamer_GetItemPos(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:&Float:x}, ${4:&Float:y}, ${5:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetItemPos':
'prefix': 'Streamer_SetItemPos'
'body': 'Streamer_SetItemPos(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsLineInAnyDynamicArea':
'prefix': 'IsLineInAnyDynamicArea'
'body': 'IsLineInAnyDynamicArea(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetNumberDynamicAreasForLine':
'prefix': 'GetNumberDynamicAreasForLine'
'body': 'GetNumberDynamicAreasForLine(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsLineInDynamicArea':
'prefix': 'IsLineInDynamicArea'
'body': 'IsLineInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:x1}, ${3:Float:y1}, ${4:Float:z1}, ${5:Float:x2}, ${6:Float:y2}, ${7:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreasForLine':
'prefix': 'GetDynamicAreasForLine'
'body': 'GetDynamicAreasForLine(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2}, ${7:STREAMER_TAG_AREA:areas[]}, ${8:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetNearbyItems':
'prefix': 'Streamer_GetNearbyItems'
'body': 'Streamer_GetNearbyItems(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:STREAMER_ALL_TAGS:items[]}, ${6:maxitems = sizeof items}, ${7:Float:range = 300.0}, ${8:worldid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ApplyDynamicActorAnimation':
'prefix': 'ApplyDynamicActorAnimation'
'body': 'ApplyDynamicActorAnimation(${1:STREAMER_TAG_ACTOR:actorid}, ${2:const animlib[]}, ${3:const animname[]}, ${4:Float:fdelta}, ${5:loop}, ${6:lockx}, ${7:locky}, ${8:freeze}, ${9:time})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorAnimation':
'prefix': 'GetDynamicActorAnimation'
'body': 'GetDynamicActorAnimation(${1:STREAMER_TAG_ACTOR:actorid}, ${2:animlib[]}, ${3:animname[]}, ${4:&Float:fdelta}, ${5:&loop}, ${6:&lockx}, ${7:&locky}, ${8:&freeze}, ${9:&time}, ${10:maxanimlib = sizeof animlib}, ${11:maxanimname = sizeof animname})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicActor':
'prefix': 'CreateDynamicActor'
'body': 'CreateDynamicActor(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:r}, ${6:invulnerable = true}, ${7:Float:health = 100.0}, ${8:worldid = -1}, ${9:interiorid = -1}, ${10:playerid = -1}, ${11:Float:streamdistance = STREAMER_ACTOR_SD}, ${12:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${13:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicActorEx':
'prefix': 'CreateDynamicActorEx'
'body': 'CreateDynamicActorEx(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:r}, ${6:invulnerable = 1}, ${7:Float:health = 100.0}, ${8:Float:streamdistance = STREAMER_ACTOR_SD}, ${9:const worlds[] = { -1 }}, ${10:const interiors[] = { -1 }}, ${11:const players[] = { -1 }}, ${12:const STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${13:priority = 0}, ${14:maxworlds = sizeof worlds}, ${15:maxinteriors = sizeof interiors}, ${16:maxplayers = sizeof players}, ${17:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicObjectMoved':
'prefix': 'OnDynamicObjectMoved'
'body': 'OnDynamicObjectMoved(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEditDynamicObject':
'prefix': 'OnPlayerEditDynamicObject'
'body': 'OnPlayerEditDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid}, ${3:response}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z}, ${7:Float:rx}, ${8:Float:ry}, ${9:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerSelectDynamicObject':
'prefix': 'OnPlayerSelectDynamicObject'
'body': 'OnPlayerSelectDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid}, ${3:modelid}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerShootDynamicObject':
'prefix': 'OnPlayerShootDynamicObject'
'body': 'OnPlayerShootDynamicObject(${1:playerid}, ${2:weaponid}, ${3:STREAMER_TAG_OBJECT:objectid}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerPickUpDynamicPickup':
'prefix': 'OnPlayerPickUpDynamicPickup'
'body': 'OnPlayerPickUpDynamicPickup(${1:playerid}, ${2:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicCP':
'prefix': 'OnPlayerEnterDynamicCP'
'body': 'OnPlayerEnterDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicCP':
'prefix': 'OnPlayerLeaveDynamicCP'
'body': 'OnPlayerLeaveDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicRaceCP':
'prefix': 'OnPlayerEnterDynamicRaceCP'
'body': 'OnPlayerEnterDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicRaceCP':
'prefix': 'OnPlayerLeaveDynamicRaceCP'
'body': 'OnPlayerLeaveDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicArea':
'prefix': 'OnPlayerEnterDynamicArea'
'body': 'OnPlayerEnterDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicArea':
'prefix': 'OnPlayerLeaveDynamicArea'
'body': 'OnPlayerLeaveDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnItemStreamIn':
'prefix': 'Streamer_OnItemStreamIn'
'body': 'Streamer_OnItemStreamIn(${1:type}, ${2:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnItemStreamOut':
'prefix': 'Streamer_OnItemStreamOut'
'body': 'Streamer_OnItemStreamOut(${1:type}, ${2:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicActorStreamIn':
'prefix': 'OnDynamicActorStreamIn'
'body': 'OnDynamicActorStreamIn(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicActorStreamOut':
'prefix': 'OnDynamicActorStreamOut'
'body': 'OnDynamicActorStreamOut(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerGiveDamageDynamicActor':
'prefix': 'OnPlayerGiveDamageDynamicActor'
'body': 'OnPlayerGiveDamageDynamicActor(${1:playerid}, ${2:STREAMER_TAG_ACTOR:actorid}, ${3:Float:amount}, ${4:weaponid}, ${5:bodypart})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnPluginError':
'prefix': 'Streamer_OnPluginError'
'body': 'Streamer_OnPluginError(${1:error[]})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
| 137543 | # Incognito's Streamer snippets for Atom converted from Sublime Completions
# Converter created by <NAME> "<NAME>" <NAME>
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'Streamer_GetTickRate':
'prefix': 'Streamer_GetTickRate'
'body': 'Streamer_GetTickRate()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetTickRate':
'prefix': 'Streamer_SetTickRate'
'body': 'Streamer_SetTickRate(${1:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetMaxItems':
'prefix': 'Streamer_GetMaxItems'
'body': 'Streamer_GetMaxItems(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetMaxItems':
'prefix': 'Streamer_SetMaxItems'
'body': 'Streamer_SetMaxItems(${1:type}, ${2:items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetVisibleItems':
'prefix': 'Streamer_GetVisibleItems'
'body': 'Streamer_GetVisibleItems(${1:type}, ${2:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetVisibleItems':
'prefix': 'Streamer_SetVisibleItems'
'body': 'Streamer_SetVisibleItems(${1:type}, ${2:items}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetRadiusMultiplier':
'prefix': 'Streamer_GetRadiusMultiplier'
'body': 'Streamer_GetRadiusMultiplier(${1:type}, ${2:Float:multiplier}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetRadiusMultiplier':
'prefix': 'Streamer_SetRadiusMultiplier'
'body': 'Streamer_SetRadiusMultiplier(${1:type}, ${2:Float:multiplier}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetTypePriority':
'prefix': 'Streamer_GetTypePriority'
'body': 'Streamer_GetTypePriority(${1:types[]}, ${2:maxtypes = sizeof types})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetTypePriority':
'prefix': 'Streamer_SetTypePriority'
'body': 'Streamer_SetTypePriority(${1:const types[]}, ${2:maxtypes = sizeof types})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetCellDistance':
'prefix': 'Streamer_GetCellDistance'
'body': 'Streamer_GetCellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetCellDistance':
'prefix': 'Streamer_SetCellDistance'
'body': 'Streamer_SetCellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetCellSize':
'prefix': 'Streamer_GetCellSize'
'body': 'Streamer_GetCellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetCellSize':
'prefix': 'Streamer_SetCellSize'
'body': 'Streamer_SetCellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleErrorCallback':
'prefix': 'Streamer_ToggleErrorCallback'
'body': 'Streamer_ToggleErrorCallback(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleErrorCallback':
'prefix': 'Streamer_IsToggleErrorCallback'
'body': 'Streamer_IsToggleErrorCallback()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ProcessActiveItems':
'prefix': 'Streamer_ProcessActiveItems'
'body': 'Streamer_ProcessActiveItems()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleIdleUpdate':
'prefix': 'Streamer_ToggleIdleUpdate'
'body': 'Streamer_ToggleIdleUpdate(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleIdleUpdate':
'prefix': 'Streamer_IsToggleIdleUpdate'
'body': 'Streamer_IsToggleIdleUpdate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleCameraUpdate':
'prefix': 'Streamer_ToggleCameraUpdate'
'body': 'Streamer_ToggleCameraUpdate(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleCameraUpdate':
'prefix': 'Streamer_IsToggleCameraUpdate'
'body': 'Streamer_IsToggleCameraUpdate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemUpdate':
'prefix': 'Streamer_ToggleItemUpdate'
'body': 'Streamer_ToggleItemUpdate(${1:playerid}, ${2:type}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemUpdate':
'prefix': 'Streamer_IsToggleItemUpdate'
'body': 'Streamer_IsToggleItemUpdate(${1:playerid}, ${2:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_Update':
'prefix': 'Streamer_Update'
'body': 'Streamer_Update(${1:playerid}, ${2:type = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_UpdateEx':
'prefix': 'Streamer_UpdateEx'
'body': 'Streamer_UpdateEx(${1:playerid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:type = -1}, ${8:compensatedtime = -1}, ${9:freezeplayer = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetFloatData':
'prefix': 'Streamer_GetFloatData'
'body': 'Streamer_GetFloatData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:Float:result})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetFloatData':
'prefix': 'Streamer_SetFloatData'
'body': 'Streamer_SetFloatData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:Float:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetIntData':
'prefix': 'Streamer_GetIntData'
'body': 'Streamer_GetIntData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetIntData':
'prefix': 'Streamer_SetIntData'
'body': 'Streamer_SetIntData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetArrayData':
'prefix': 'Streamer_GetArrayData'
'body': 'Streamer_GetArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:dest[]}, ${5:maxdest = sizeof dest})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetArrayData':
'prefix': 'Streamer_SetArrayData'
'body': 'Streamer_SetArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:const src[]}, ${5:maxsrc = sizeof src})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsInArrayData':
'prefix': 'Streamer_IsInArrayData'
'body': 'Streamer_IsInArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_AppendArrayData':
'prefix': 'Streamer_AppendArrayData'
'body': 'Streamer_AppendArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_RemoveArrayData':
'prefix': 'Streamer_RemoveArrayData'
'body': 'Streamer_RemoveArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetUpperBound':
'prefix': 'Streamer_GetUpperBound'
'body': 'Streamer_GetUpperBound(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetDistanceToItem':
'prefix': 'Streamer_GetDistanceToItem'
'body': 'Streamer_GetDistanceToItem(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:STREAMER_ALL_TAGS:id}, ${6:Float:distance}, ${7:dimensions = 3})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemStatic':
'prefix': 'Streamer_ToggleItemStatic'
'body': 'Streamer_ToggleItemStatic(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemStatic':
'prefix': 'Streamer_IsToggleItemStatic'
'body': 'Streamer_IsToggleItemStatic(${1:type}, ${2:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemInternalID':
'prefix': 'Streamer_GetItemInternalID'
'body': 'Streamer_GetItemInternalID(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:streamerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemStreamerID':
'prefix': 'Streamer_GetItemStreamerID'
'body': 'Streamer_GetItemStreamerID(${1:playerid}, ${2:type}, ${3:internalid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsItemVisible':
'prefix': 'Streamer_IsItemVisible'
'body': 'Streamer_IsItemVisible(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_DestroyAllVisibleItems':
'prefix': 'Streamer_DestroyAllVisibleItems'
'body': 'Streamer_DestroyAllVisibleItems(${1:playerid}, ${2:type}, ${3:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CountVisibleItems':
'prefix': 'Streamer_CountVisibleItems'
'body': 'Streamer_CountVisibleItems(${1:playerid}, ${2:type}, ${3:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_DestroyAllItems':
'prefix': 'Streamer_DestroyAllItems'
'body': 'Streamer_DestroyAllItems(${1:type}, ${2:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CountItems':
'prefix': 'Streamer_CountItems'
'body': 'Streamer_CountItems(${1:type}, ${2:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicObject':
'prefix': 'CreateDynamicObject'
'body': 'CreateDynamicObject(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:rx}, ${6:Float:ry}, ${7:Float:rz}, ${8:worldid = -1}, ${9:interiorid = -1}, ${10:playerid = -1}, ${11:Float:streamdistance = STREAMER_OBJECT_SD}, ${12:Float:drawdistance = STREAMER_OBJECT_DD}, ${13:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${14:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicObject':
'prefix': 'DestroyDynamicObject'
'body': 'DestroyDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicObject':
'prefix': 'IsValidDynamicObject'
'body': 'IsValidDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectPos':
'prefix': 'SetDynamicObjectPos'
'body': 'SetDynamicObjectPos(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectPos':
'prefix': 'GetDynamicObjectPos'
'body': 'GetDynamicObjectPos(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectRot':
'prefix': 'SetDynamicObjectRot'
'body': 'SetDynamicObjectRot(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:rx}, ${3:Float:ry}, ${4:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectRot':
'prefix': 'GetDynamicObjectRot'
'body': 'GetDynamicObjectRot(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:rx}, ${3:Float:ry}, ${4:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectNoCameraCol':
'prefix': 'SetDynamicObjectNoCameraCol'
'body': 'SetDynamicObjectNoCameraCol(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectNoCameraCol':
'prefix': 'GetDynamicObjectNoCameraCol'
'body': 'GetDynamicObjectNoCameraCol(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'MoveDynamicObject':
'prefix': 'MoveDynamicObject'
'body': 'MoveDynamicObject(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:speed}, ${6:Float:rx = -1000.0}, ${7:Float:ry = -1000.0}, ${8:Float:rz = -1000.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'StopDynamicObject':
'prefix': 'StopDynamicObject'
'body': 'StopDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMoving':
'prefix': 'IsDynamicObjectMoving'
'body': 'IsDynamicObjectMoving(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachCameraToDynamicObject':
'prefix': 'AttachCameraToDynamicObject'
'body': 'AttachCameraToDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToObject':
'prefix': 'AttachDynamicObjectToObject'
'body': 'AttachDynamicObjectToObject(${1:STREAMER_TAG_OBJECT:objectid}, ${2:attachtoid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz}, ${9:syncrotation = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToPlayer':
'prefix': 'AttachDynamicObjectToPlayer'
'body': 'AttachDynamicObjectToPlayer(${1:STREAMER_TAG_OBJECT:objectid}, ${2:playerid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToVehicle':
'prefix': 'AttachDynamicObjectToVehicle'
'body': 'AttachDynamicObjectToVehicle(${1:STREAMER_TAG_OBJECT:objectid}, ${2:vehicleid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'EditDynamicObject':
'prefix': 'EditDynamicObject'
'body': 'EditDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMaterialUsed':
'prefix': 'IsDynamicObjectMaterialUsed'
'body': 'IsDynamicObjectMaterialUsed(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectMaterial':
'prefix': 'GetDynamicObjectMaterial'
'body': 'GetDynamicObjectMaterial(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:modelid}, ${4:txdname[]}, ${5:texturename[]}, ${6:materialcolor}, ${7:maxtxdname = sizeof txdname}, ${8:maxtexturename = sizeof texturename})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectMaterial':
'prefix': 'SetDynamicObjectMaterial'
'body': 'SetDynamicObjectMaterial(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:modelid}, ${4:const txdname[]}, ${5:const texturename[]}, ${6:materialcolor = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMaterialTextUsed':
'prefix': 'IsDynamicObjectMaterialTextUsed'
'body': 'IsDynamicObjectMaterialTextUsed(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectMaterialText':
'prefix': 'GetDynamicObjectMaterialText'
'body': 'GetDynamicObjectMaterialText(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:text[]}, ${4:materialsize}, ${5:fontface[]}, ${6:fontsize}, ${7:bold}, ${8:fontcolor}, ${9:backcolor}, ${10:textalignment}, ${11:maxtext = sizeof text}, ${12:maxfontface = sizeof fontface})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectMaterialText':
'prefix': 'SetDynamicObjectMaterialText'
'body': 'SetDynamicObjectMaterialText(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:const text[]}, ${4:materialsize = OBJECT_MATERIAL_SIZE_256x128}, ${5:const fontface[] = \"Arial\"}, ${6:fontsize = 24}, ${7:bold = 1}, ${8:fontcolor = 0xFFFFFFFF}, ${9:backcolor = 0}, ${10:textalignment = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPickup':
'prefix': 'CreateDynamicPickup'
'body': 'CreateDynamicPickup(${1:modelid}, ${2:type}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:Float:streamdistance = STREAMER_PICKUP_SD}, ${10:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${11:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicPickup':
'prefix': 'DestroyDynamicPickup'
'body': 'DestroyDynamicPickup(${1:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicPickup':
'prefix': 'IsValidDynamicPickup'
'body': 'IsValidDynamicPickup(${1:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCP':
'prefix': 'CreateDynamicCP'
'body': 'CreateDynamicCP(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:Float:streamdistance = STREAMER_CP_SD}, ${9:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicCP':
'prefix': 'DestroyDynamicCP'
'body': 'DestroyDynamicCP(${1:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicCP':
'prefix': 'IsValidDynamicCP'
'body': 'IsValidDynamicCP(${1:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicCP':
'prefix': 'TogglePlayerDynamicCP'
'body': 'TogglePlayerDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicCPs':
'prefix': 'TogglePlayerAllDynamicCPs'
'body': 'TogglePlayerAllDynamicCPs(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicCP':
'prefix': 'IsPlayerInDynamicCP'
'body': 'IsPlayerInDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerVisibleDynamicCP':
'prefix': 'GetPlayerVisibleDynamicCP'
'body': 'GetPlayerVisibleDynamicCP(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRaceCP':
'prefix': 'CreateDynamicRaceCP'
'body': 'CreateDynamicRaceCP(${1:type}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:nextx}, ${6:Float:nexty}, ${7:Float:nextz}, ${8:Float:size}, ${9:worldid = -1}, ${10:interiorid = -1}, ${11:playerid = -1}, ${12:Float:streamdistance = STREAMER_RACE_CP_SD}, ${13:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${14:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicRaceCP':
'prefix': 'DestroyDynamicRaceCP'
'body': 'DestroyDynamicRaceCP(${1:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicRaceCP':
'prefix': 'IsValidDynamicRaceCP'
'body': 'IsValidDynamicRaceCP(${1:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicRaceCP':
'prefix': 'TogglePlayerDynamicRaceCP'
'body': 'TogglePlayerDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicRaceCPs':
'prefix': 'TogglePlayerAllDynamicRaceCPs'
'body': 'TogglePlayerAllDynamicRaceCPs(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicRaceCP':
'prefix': 'IsPlayerInDynamicRaceCP'
'body': 'IsPlayerInDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerVisibleDynamicRaceCP':
'prefix': 'GetPlayerVisibleDynamicRaceCP'
'body': 'GetPlayerVisibleDynamicRaceCP(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicMapIcon':
'prefix': 'CreateDynamicMapIcon'
'body': 'CreateDynamicMapIcon(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:color}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:Float:streamdistance = STREAMER_MAP_ICON_SD}, ${10:style = MAPICON_LOCAL}, ${11:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${12:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicMapIcon':
'prefix': 'DestroyDynamicMapIcon'
'body': 'DestroyDynamicMapIcon(${1:STREAMER_TAG_MAP_ICON:iconid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicMapIcon':
'prefix': 'IsValidDynamicMapIcon'
'body': 'IsValidDynamicMapIcon(${1:STREAMER_TAG_MAP_ICON:iconid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamic3DTextLabel':
'prefix': 'CreateDynamic3DTextLabel'
'body': 'CreateDynamic3DTextLabel(${1:const text[]}, ${2:color}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:drawdistance}, ${7:attachedplayer = INVALID_PLAYER_ID}, ${8:attachedvehicle = INVALID_VEHICLE_ID}, ${9:testlos = 0}, ${10:worldid = -1}, ${11:interiorid = -1}, ${12:playerid = -1}, ${13:Float:streamdistance = STREAMER_3D_TEXT_LABEL_SD}, ${14:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${15:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamic3DTextLabel':
'prefix': 'DestroyDynamic3DTextLabel'
'body': 'DestroyDynamic3DTextLabel(${1:STREAMER_TAG_3D_TEXT_LABEL:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamic3DTextLabel':
'prefix': 'IsValidDynamic3DTextLabel'
'body': 'IsValidDynamic3DTextLabel(${1:STREAMER_TAG_3D_TEXT_LABEL:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamic3DTextLabelText':
'prefix': 'GetDynamic3DTextLabelText'
'body': 'GetDynamic3DTextLabelText(${1:STREAMER_TAG_3D_TEXT_LABEL:id}, ${2:text[]}, ${3:maxtext = sizeof text})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'UpdateDynamic3DTextLabelText':
'prefix': 'UpdateDynamic3DTextLabelText'
'body': 'UpdateDynamic3DTextLabelText(${1:STREAMER_TAG_3D_TEXT_LABEL:id}, ${2:color}, ${3:const text[]})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCircle':
'prefix': 'CreateDynamicCircle'
'body': 'CreateDynamicCircle(${1:Float:x}, ${2:Float:y}, ${3:Float:size}, ${4:worldid = -1}, ${5:interiorid = -1}, ${6:playerid = -1}, ${7:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCylinder':
'prefix': 'CreateDynamicCylinder'
'body': 'CreateDynamicCylinder(${1:Float:x}, ${2:Float:y}, ${3:Float:minz}, ${4:Float:maxz}, ${5:Float:size}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicSphere':
'prefix': 'CreateDynamicSphere'
'body': 'CreateDynamicSphere(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRectangle':
'prefix': 'CreateDynamicRectangle'
'body': 'CreateDynamicRectangle(${1:Float:minx}, ${2:Float:miny}, ${3:Float:maxx}, ${4:Float:maxy}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCuboid':
'prefix': 'CreateDynamicCuboid'
'body': 'CreateDynamicCuboid(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worldid = -1}, ${8:interiorid = -1}, ${9:playerid = -1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCube':
'prefix': 'CreateDynamicCube'
'body': 'CreateDynamicCube(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worldid = -1}, ${8:interiorid = -1}, ${9:playerid = -1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPolygon':
'prefix': 'CreateDynamicPolygon'
'body': 'CreateDynamicPolygon(${1:Float:points[]}, ${2:Float:minz = -FLOAT_INFINITY}, ${3:Float:maxz = FLOAT_INFINITY}, ${4:maxpoints = sizeof points}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicArea':
'prefix': 'DestroyDynamicArea'
'body': 'DestroyDynamicArea(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicArea':
'prefix': 'IsValidDynamicArea'
'body': 'IsValidDynamicArea(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicPolygonPoints':
'prefix': 'GetDynamicPolygonPoints'
'body': 'GetDynamicPolygonPoints(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:points[]}, ${3:maxpoints = sizeof points})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicPolygonNumberPoints':
'prefix': 'GetDynamicPolygonNumberPoints'
'body': 'GetDynamicPolygonNumberPoints(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicArea':
'prefix': 'TogglePlayerDynamicArea'
'body': 'TogglePlayerDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicAreas':
'prefix': 'TogglePlayerAllDynamicAreas'
'body': 'TogglePlayerAllDynamicAreas(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicArea':
'prefix': 'IsPlayerInDynamicArea'
'body': 'IsPlayerInDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid}, ${3:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInAnyDynamicArea':
'prefix': 'IsPlayerInAnyDynamicArea'
'body': 'IsPlayerInAnyDynamicArea(${1:playerid}, ${2:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsAnyPlayerInDynamicArea':
'prefix': 'IsAnyPlayerInDynamicArea'
'body': 'IsAnyPlayerInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsAnyPlayerInAnyDynamicArea':
'prefix': 'IsAnyPlayerInAnyDynamicArea'
'body': 'IsAnyPlayerInAnyDynamicArea(${1:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerDynamicAreas':
'prefix': 'GetPlayerDynamicAreas'
'body': 'GetPlayerDynamicAreas(${1:playerid}, ${2:STREAMER_TAG_AREA:areas[]}, ${3:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerNumberDynamicAreas':
'prefix': 'GetPlayerNumberDynamicAreas'
'body': 'GetPlayerNumberDynamicAreas(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPointInDynamicArea':
'prefix': 'IsPointInDynamicArea'
'body': 'IsPointInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPointInAnyDynamicArea':
'prefix': 'IsPointInAnyDynamicArea'
'body': 'IsPointInAnyDynamicArea(${1:Float:x}, ${2:Float:y}, ${3:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreasForPoint':
'prefix': 'GetDynamicAreasForPoint'
'body': 'GetDynamicAreasForPoint(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:STREAMER_TAG_AREA:areas[]}, ${5:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetNumberDynamicAreasForPoint':
'prefix': 'GetNumberDynamicAreasForPoint'
'body': 'GetNumberDynamicAreasForPoint(${1:Float:x}, ${2:Float:y}, ${3:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToObject':
'prefix': 'AttachDynamicAreaToObject'
'body': 'AttachDynamicAreaToObject(${1:STREAMER_TAG_AREA:areaid}, ${2:STREAMER_TAG_OBJECT_ALT objectid}, ${3:type = STREAMER_OBJECT_TYPE_DYNAMIC}, ${4:playerid = INVALID_PLAYER_ID}, ${5:Float:offsetx = 0.0}, ${6:Float:offsety = 0.0}, ${7:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToPlayer':
'prefix': 'AttachDynamicAreaToPlayer'
'body': 'AttachDynamicAreaToPlayer(${1:STREAMER_TAG_AREA:areaid}, ${2:playerid}, ${3:Float:offsetx = 0.0}, ${4:Float:offsety = 0.0}, ${5:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToVehicle':
'prefix': 'AttachDynamicAreaToVehicle'
'body': 'AttachDynamicAreaToVehicle(${1:STREAMER_TAG_AREA:areaid}, ${2:vehicleid}, ${3:Float:offsetx = 0.0}, ${4:Float:offsety = 0.0}, ${5:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicObjectEx':
'prefix': 'CreateDynamicObjectEx'
'body': 'CreateDynamicObjectEx(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:rx}, ${6:Float:ry}, ${7:Float:rz}, ${8:Float:streamdistance = STREAMER_OBJECT_SD}, ${9:Float:drawdistance = STREAMER_OBJECT_DD}, ${10:worlds[] = { -1 }}, ${11:interiors[] = { -1 }}, ${12:players[] = { -1 }}, ${13:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${14:priority = 0}, ${15:maxworlds = sizeof worlds}, ${16:maxinteriors = sizeof interiors}, ${17:maxplayers = sizeof players}, ${18:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPickupEx':
'prefix': 'CreateDynamicPickupEx'
'body': 'CreateDynamicPickupEx(${1:modelid}, ${2:type}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:streamdistance = STREAMER_PICKUP_SD}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${11:priority = 0}, ${12:maxworlds = sizeof worlds}, ${13:maxinteriors = sizeof interiors}, ${14:maxplayers = sizeof players}, ${15:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCPEx':
'prefix': 'CreateDynamicCPEx'
'body': 'CreateDynamicCPEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:Float:streamdistance = STREAMER_CP_SD}, ${6:worlds[] = { -1 }}, ${7:interiors[] = { -1 }}, ${8:players[] = { -1 }}, ${9:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players}, ${14:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRaceCPEx':
'prefix': 'CreateDynamicRaceCPEx'
'body': 'CreateDynamicRaceCPEx(${1:type}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:nextx}, ${6:Float:nexty}, ${7:Float:nextz}, ${8:Float:size}, ${9:Float:streamdistance = STREAMER_RACE_CP_SD}, ${10:worlds[] = { -1 }}, ${11:interiors[] = { -1 }}, ${12:players[] = { -1 }}, ${13:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${14:priority = 0}, ${15:maxworlds = sizeof worlds}, ${16:maxinteriors = sizeof interiors}, ${17:maxplayers = sizeof players}, ${18:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicMapIconEx':
'prefix': 'CreateDynamicMapIconEx'
'body': 'CreateDynamicMapIconEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:color}, ${6:style = MAPICON_LOCAL}, ${7:Float:streamdistance = STREAMER_MAP_ICON_SD}, ${8:worlds[] = { -1 }}, ${9:interiors[] = { -1 }}, ${10:players[] = { -1 }}, ${11:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${12:priority = 0}, ${13:maxworlds = sizeof worlds}, ${14:maxinteriors = sizeof interiors}, ${15:maxplayers = sizeof players}, ${16:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamic3DTextLabelEx':
'prefix': 'CreateDynamic3DTextLabelEx'
'body': 'CreateDynamic3DTextLabelEx(${1:const text[]}, ${2:color}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:drawdistance}, ${7:attachedplayer = INVALID_PLAYER_ID}, ${8:attachedvehicle = INVALID_VEHICLE_ID}, ${9:testlos = 0}, ${10:Float:streamdistance = STREAMER_3D_TEXT_LABEL_SD}, ${11:worlds[] = { -1 }}, ${12:interiors[] = { -1 }}, ${13:players[] = { -1 }}, ${14:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${15:priority = 0}, ${16:maxworlds = sizeof worlds}, ${17:maxinteriors = sizeof interiors}, ${18:maxplayers = sizeof players}, ${19:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCircleEx':
'prefix': 'CreateDynamicCircleEx'
'body': 'CreateDynamicCircleEx(${1:Float:x}, ${2:Float:y}, ${3:Float:size}, ${4:worlds[] = { -1 }}, ${5:interiors[] = { -1 }}, ${6:players[] = { -1 }}, ${7:priority = 0}, ${8:maxworlds = sizeof worlds}, ${9:maxinteriors = sizeof interiors}, ${10:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCylinderEx':
'prefix': 'CreateDynamicCylinderEx'
'body': 'CreateDynamicCylinderEx(${1:Float:x}, ${2:Float:y}, ${3:Float:minz}, ${4:Float:maxz}, ${5:Float:size}, ${6:worlds[] = { -1 }}, ${7:interiors[] = { -1 }}, ${8:players[] = { -1 }}, ${9:priority = 0}, ${10:maxworlds = sizeof worlds}, ${11:maxinteriors = sizeof interiors}, ${12:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicSphereEx':
'prefix': 'CreateDynamicSphereEx'
'body': 'CreateDynamicSphereEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRectangleEx':
'prefix': 'CreateDynamicRectangleEx'
'body': 'CreateDynamicRectangleEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:maxx}, ${4:Float:maxy}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCuboidEx':
'prefix': 'CreateDynamicCuboidEx'
'body': 'CreateDynamicCuboidEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCubeEx':
'prefix': 'CreateDynamicCubeEx'
'body': 'CreateDynamicCubeEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPolygonEx':
'prefix': 'CreateDynamicPolygonEx'
'body': 'CreateDynamicPolygonEx(${1:Float:points[]}, ${2:Float:minz = -FLOAT_INFINITY}, ${3:Float:maxz = FLOAT_INFINITY}, ${4:maxpoints = sizeof points}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_TickRate':
'prefix': 'Streamer_TickRate'
'body': 'Streamer_TickRate(${1:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_MaxItems':
'prefix': 'Streamer_MaxItems'
'body': 'Streamer_MaxItems(${1:type}, ${2:items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_VisibleItems':
'prefix': 'Streamer_VisibleItems'
'body': 'Streamer_VisibleItems(${1:type}, ${2:items}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CellDistance':
'prefix': 'Streamer_CellDistance'
'body': 'Streamer_CellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CellSize':
'prefix': 'Streamer_CellSize'
'body': 'Streamer_CellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CallbackHook':
'prefix': 'Streamer_CallbackHook'
'body': 'Streamer_CallbackHook(${1:callback}, ${2:...})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicObjects':
'prefix': 'DestroyAllDynamicObjects'
'body': 'DestroyAllDynamicObjects()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicObjects':
'prefix': 'CountDynamicObjects'
'body': 'CountDynamicObjects()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicPickups':
'prefix': 'DestroyAllDynamicPickups'
'body': 'DestroyAllDynamicPickups()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicPickups':
'prefix': 'CountDynamicPickups'
'body': 'CountDynamicPickups()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicCPs':
'prefix': 'DestroyAllDynamicCPs'
'body': 'DestroyAllDynamicCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicCPs':
'prefix': 'CountDynamicCPs'
'body': 'CountDynamicCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicRaceCPs':
'prefix': 'DestroyAllDynamicRaceCPs'
'body': 'DestroyAllDynamicRaceCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicRaceCPs':
'prefix': 'CountDynamicRaceCPs'
'body': 'CountDynamicRaceCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicMapIcons':
'prefix': 'DestroyAllDynamicMapIcons'
'body': 'DestroyAllDynamicMapIcons()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicMapIcons':
'prefix': 'CountDynamicMapIcons'
'body': 'CountDynamicMapIcons()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamic3DTextLabels':
'prefix': 'DestroyAllDynamic3DTextLabels'
'body': 'DestroyAllDynamic3DTextLabels()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamic3DTextLabels':
'prefix': 'CountDynamic3DTextLabels'
'body': 'CountDynamic3DTextLabels()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicAreas':
'prefix': 'DestroyAllDynamicAreas'
'body': 'DestroyAllDynamicAreas()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicAreas':
'prefix': 'CountDynamicAreas'
'body': 'CountDynamicAreas()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleChunkStream':
'prefix': 'Streamer_IsToggleChunkStream'
'body': 'Streamer_IsToggleChunkStream()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetLastUpdateTime':
'prefix': 'Streamer_GetLastUpdateTime'
'body': 'Streamer_GetLastUpdateTime(${1:&Float:time})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetChunkSize':
'prefix': 'Streamer_GetChunkSize'
'body': 'Streamer_GetChunkSize(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetPlayerTickRate':
'prefix': 'Streamer_GetPlayerTickRate'
'body': 'Streamer_GetPlayerTickRate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicActor':
'prefix': 'DestroyDynamicActor'
'body': 'DestroyDynamicActor(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicActor':
'prefix': 'IsValidDynamicActor'
'body': 'IsValidDynamicActor(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorVirtualWorld':
'prefix': 'GetDynamicActorVirtualWorld'
'body': 'GetDynamicActorVirtualWorld(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ClearDynamicActorAnimations':
'prefix': 'ClearDynamicActorAnimations'
'body': 'ClearDynamicActorAnimations(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicActorInvulnerable':
'prefix': 'IsDynamicActorInvulnerable'
'body': 'IsDynamicActorInvulnerable(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerTargetDynamicActor':
'prefix': 'GetPlayerTargetDynamicActor'
'body': 'GetPlayerTargetDynamicActor(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerCameraTargetDynActor':
'prefix': 'GetPlayerCameraTargetDynActor'
'body': 'GetPlayerCameraTargetDynActor(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsToggleDynAreaSpectateMode':
'prefix': 'IsToggleDynAreaSpectateMode'
'body': 'IsToggleDynAreaSpectateMode(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerCameraTargetDynObject':
'prefix': 'GetPlayerCameraTargetDynObject'
'body': 'GetPlayerCameraTargetDynObject(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleChunkStream':
'prefix': 'Streamer_ToggleChunkStream'
'body': 'Streamer_ToggleChunkStream(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreaType':
'prefix': 'GetDynamicAreaType'
'body': 'GetDynamicAreaType(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_AmxUnloadDestroyItems':
'prefix': 'Streamer_AmxUnloadDestroyItems'
'body': 'Streamer_AmxUnloadDestroyItems(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemInvAreas':
'prefix': 'Streamer_IsToggleItemInvAreas'
'body': 'Streamer_IsToggleItemInvAreas(${1:type}, ${2:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetChunkTickRate':
'prefix': 'Streamer_GetChunkTickRate'
'body': 'Streamer_GetChunkTickRate(${1:type}, ${2:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetChunkSize':
'prefix': 'Streamer_SetChunkSize'
'body': 'Streamer_SetChunkSize(${1:type}, ${2:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetPlayerTickRate':
'prefix': 'Streamer_SetPlayerTickRate'
'body': 'Streamer_SetPlayerTickRate(${1:playerid}, ${2:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicActorStreamedIn':
'prefix': 'IsDynamicActorStreamedIn'
'body': 'IsDynamicActorStreamedIn(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorVirtualWorld':
'prefix': 'SetDynamicActorVirtualWorld'
'body': 'SetDynamicActorVirtualWorld(${1:STREAMER_TAG_ACTOR:actorid}, ${2:vworld})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorFacingAngle':
'prefix': 'GetDynamicActorFacingAngle'
'body': 'GetDynamicActorFacingAngle(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:ang})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorFacingAngle':
'prefix': 'SetDynamicActorFacingAngle'
'body': 'SetDynamicActorFacingAngle(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:ang})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorHealth':
'prefix': 'GetDynamicActorHealth'
'body': 'GetDynamicActorHealth(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:health})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorHealth':
'prefix': 'SetDynamicActorHealth'
'body': 'SetDynamicActorHealth(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:health})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorInvulnerable':
'prefix': 'SetDynamicActorInvulnerable'
'body': 'SetDynamicActorInvulnerable(${1:STREAMER_TAG_ACTOR:actorid}, ${2:invulnerable = true})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ToggleDynAreaSpectateMode':
'prefix': 'ToggleDynAreaSpectateMode'
'body': 'ToggleDynAreaSpectateMode(${1:STREAMER_TAG_AREA:areaid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemInvAreas':
'prefix': 'Streamer_ToggleItemInvAreas'
'body': 'Streamer_ToggleItemInvAreas(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItem':
'prefix': 'Streamer_IsToggleItem'
'body': 'Streamer_IsToggleItem(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetChunkTickRate':
'prefix': 'Streamer_SetChunkTickRate'
'body': 'Streamer_SetChunkTickRate(${1:type}, ${2:rate}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemCallbacks':
'prefix': 'Streamer_ToggleItemCallbacks'
'body': 'Streamer_ToggleItemCallbacks(${1:type}, ${2:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetArrayDataLength':
'prefix': 'Streamer_GetArrayDataLength'
'body': 'Streamer_GetArrayDataLength(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItem':
'prefix': 'Streamer_ToggleItem'
'body': 'Streamer_ToggleItem(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id}, ${4:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorPos':
'prefix': 'GetDynamicActorPos'
'body': 'GetDynamicActorPos(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:x}, ${3:&Float:y}, ${4:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorPos':
'prefix': 'SetDynamicActorPos'
'body': 'SetDynamicActorPos(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetAllVisibleItems':
'prefix': 'Streamer_GetAllVisibleItems'
'body': 'Streamer_GetAllVisibleItems(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:items[]}, ${4:maxitems = sizeof items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleAllItems':
'prefix': 'Streamer_ToggleAllItems'
'body': 'Streamer_ToggleAllItems(${1:playerid}, ${2:type}, ${3:toggle}, ${4:const exceptions[] = { -1 }}, ${5:maxexceptions = sizeof exceptions})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemOffset':
'prefix': 'Streamer_GetItemOffset'
'body': 'Streamer_GetItemOffset(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:&Float:x}, ${4:&Float:y}, ${5:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetItemOffset':
'prefix': 'Streamer_SetItemOffset'
'body': 'Streamer_SetItemOffset(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemPos':
'prefix': 'Streamer_GetItemPos'
'body': 'Streamer_GetItemPos(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:&Float:x}, ${4:&Float:y}, ${5:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetItemPos':
'prefix': 'Streamer_SetItemPos'
'body': 'Streamer_SetItemPos(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsLineInAnyDynamicArea':
'prefix': 'IsLineInAnyDynamicArea'
'body': 'IsLineInAnyDynamicArea(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetNumberDynamicAreasForLine':
'prefix': 'GetNumberDynamicAreasForLine'
'body': 'GetNumberDynamicAreasForLine(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsLineInDynamicArea':
'prefix': 'IsLineInDynamicArea'
'body': 'IsLineInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:x1}, ${3:Float:y1}, ${4:Float:z1}, ${5:Float:x2}, ${6:Float:y2}, ${7:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreasForLine':
'prefix': 'GetDynamicAreasForLine'
'body': 'GetDynamicAreasForLine(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2}, ${7:STREAMER_TAG_AREA:areas[]}, ${8:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetNearbyItems':
'prefix': 'Streamer_GetNearbyItems'
'body': 'Streamer_GetNearbyItems(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:STREAMER_ALL_TAGS:items[]}, ${6:maxitems = sizeof items}, ${7:Float:range = 300.0}, ${8:worldid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ApplyDynamicActorAnimation':
'prefix': 'ApplyDynamicActorAnimation'
'body': 'ApplyDynamicActorAnimation(${1:STREAMER_TAG_ACTOR:actorid}, ${2:const animlib[]}, ${3:const animname[]}, ${4:Float:fdelta}, ${5:loop}, ${6:lockx}, ${7:locky}, ${8:freeze}, ${9:time})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorAnimation':
'prefix': 'GetDynamicActorAnimation'
'body': 'GetDynamicActorAnimation(${1:STREAMER_TAG_ACTOR:actorid}, ${2:animlib[]}, ${3:animname[]}, ${4:&Float:fdelta}, ${5:&loop}, ${6:&lockx}, ${7:&locky}, ${8:&freeze}, ${9:&time}, ${10:maxanimlib = sizeof animlib}, ${11:maxanimname = sizeof animname})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicActor':
'prefix': 'CreateDynamicActor'
'body': 'CreateDynamicActor(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:r}, ${6:invulnerable = true}, ${7:Float:health = 100.0}, ${8:worldid = -1}, ${9:interiorid = -1}, ${10:playerid = -1}, ${11:Float:streamdistance = STREAMER_ACTOR_SD}, ${12:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${13:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicActorEx':
'prefix': 'CreateDynamicActorEx'
'body': 'CreateDynamicActorEx(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:r}, ${6:invulnerable = 1}, ${7:Float:health = 100.0}, ${8:Float:streamdistance = STREAMER_ACTOR_SD}, ${9:const worlds[] = { -1 }}, ${10:const interiors[] = { -1 }}, ${11:const players[] = { -1 }}, ${12:const STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${13:priority = 0}, ${14:maxworlds = sizeof worlds}, ${15:maxinteriors = sizeof interiors}, ${16:maxplayers = sizeof players}, ${17:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicObjectMoved':
'prefix': 'OnDynamicObjectMoved'
'body': 'OnDynamicObjectMoved(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEditDynamicObject':
'prefix': 'OnPlayerEditDynamicObject'
'body': 'OnPlayerEditDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid}, ${3:response}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z}, ${7:Float:rx}, ${8:Float:ry}, ${9:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerSelectDynamicObject':
'prefix': 'OnPlayerSelectDynamicObject'
'body': 'OnPlayerSelectDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid}, ${3:modelid}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerShootDynamicObject':
'prefix': 'OnPlayerShootDynamicObject'
'body': 'OnPlayerShootDynamicObject(${1:playerid}, ${2:weaponid}, ${3:STREAMER_TAG_OBJECT:objectid}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerPickUpDynamicPickup':
'prefix': 'OnPlayerPickUpDynamicPickup'
'body': 'OnPlayerPickUpDynamicPickup(${1:playerid}, ${2:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicCP':
'prefix': 'OnPlayerEnterDynamicCP'
'body': 'OnPlayerEnterDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicCP':
'prefix': 'OnPlayerLeaveDynamicCP'
'body': 'OnPlayerLeaveDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicRaceCP':
'prefix': 'OnPlayerEnterDynamicRaceCP'
'body': 'OnPlayerEnterDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicRaceCP':
'prefix': 'OnPlayerLeaveDynamicRaceCP'
'body': 'OnPlayerLeaveDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicArea':
'prefix': 'OnPlayerEnterDynamicArea'
'body': 'OnPlayerEnterDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicArea':
'prefix': 'OnPlayerLeaveDynamicArea'
'body': 'OnPlayerLeaveDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnItemStreamIn':
'prefix': 'Streamer_OnItemStreamIn'
'body': 'Streamer_OnItemStreamIn(${1:type}, ${2:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnItemStreamOut':
'prefix': 'Streamer_OnItemStreamOut'
'body': 'Streamer_OnItemStreamOut(${1:type}, ${2:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicActorStreamIn':
'prefix': 'OnDynamicActorStreamIn'
'body': 'OnDynamicActorStreamIn(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicActorStreamOut':
'prefix': 'OnDynamicActorStreamOut'
'body': 'OnDynamicActorStreamOut(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerGiveDamageDynamicActor':
'prefix': 'OnPlayerGiveDamageDynamicActor'
'body': 'OnPlayerGiveDamageDynamicActor(${1:playerid}, ${2:STREAMER_TAG_ACTOR:actorid}, ${3:Float:amount}, ${4:weaponid}, ${5:bodypart})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnPluginError':
'prefix': 'Streamer_OnPluginError'
'body': 'Streamer_OnPluginError(${1:error[]})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
| true | # Incognito's Streamer snippets for Atom converted from Sublime Completions
# Converter created by PI:NAME:<NAME>END_PI "PI:NAME:<NAME>END_PI" PI:NAME:<NAME>END_PI
# Repo: https://github.com/Renato-Garcia/sublime-completions-to-atom-snippets
'.source.pwn, .source.inc':
'Streamer_GetTickRate':
'prefix': 'Streamer_GetTickRate'
'body': 'Streamer_GetTickRate()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetTickRate':
'prefix': 'Streamer_SetTickRate'
'body': 'Streamer_SetTickRate(${1:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetMaxItems':
'prefix': 'Streamer_GetMaxItems'
'body': 'Streamer_GetMaxItems(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetMaxItems':
'prefix': 'Streamer_SetMaxItems'
'body': 'Streamer_SetMaxItems(${1:type}, ${2:items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetVisibleItems':
'prefix': 'Streamer_GetVisibleItems'
'body': 'Streamer_GetVisibleItems(${1:type}, ${2:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetVisibleItems':
'prefix': 'Streamer_SetVisibleItems'
'body': 'Streamer_SetVisibleItems(${1:type}, ${2:items}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetRadiusMultiplier':
'prefix': 'Streamer_GetRadiusMultiplier'
'body': 'Streamer_GetRadiusMultiplier(${1:type}, ${2:Float:multiplier}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetRadiusMultiplier':
'prefix': 'Streamer_SetRadiusMultiplier'
'body': 'Streamer_SetRadiusMultiplier(${1:type}, ${2:Float:multiplier}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetTypePriority':
'prefix': 'Streamer_GetTypePriority'
'body': 'Streamer_GetTypePriority(${1:types[]}, ${2:maxtypes = sizeof types})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetTypePriority':
'prefix': 'Streamer_SetTypePriority'
'body': 'Streamer_SetTypePriority(${1:const types[]}, ${2:maxtypes = sizeof types})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetCellDistance':
'prefix': 'Streamer_GetCellDistance'
'body': 'Streamer_GetCellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetCellDistance':
'prefix': 'Streamer_SetCellDistance'
'body': 'Streamer_SetCellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetCellSize':
'prefix': 'Streamer_GetCellSize'
'body': 'Streamer_GetCellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetCellSize':
'prefix': 'Streamer_SetCellSize'
'body': 'Streamer_SetCellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleErrorCallback':
'prefix': 'Streamer_ToggleErrorCallback'
'body': 'Streamer_ToggleErrorCallback(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleErrorCallback':
'prefix': 'Streamer_IsToggleErrorCallback'
'body': 'Streamer_IsToggleErrorCallback()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ProcessActiveItems':
'prefix': 'Streamer_ProcessActiveItems'
'body': 'Streamer_ProcessActiveItems()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleIdleUpdate':
'prefix': 'Streamer_ToggleIdleUpdate'
'body': 'Streamer_ToggleIdleUpdate(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleIdleUpdate':
'prefix': 'Streamer_IsToggleIdleUpdate'
'body': 'Streamer_IsToggleIdleUpdate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleCameraUpdate':
'prefix': 'Streamer_ToggleCameraUpdate'
'body': 'Streamer_ToggleCameraUpdate(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleCameraUpdate':
'prefix': 'Streamer_IsToggleCameraUpdate'
'body': 'Streamer_IsToggleCameraUpdate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemUpdate':
'prefix': 'Streamer_ToggleItemUpdate'
'body': 'Streamer_ToggleItemUpdate(${1:playerid}, ${2:type}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemUpdate':
'prefix': 'Streamer_IsToggleItemUpdate'
'body': 'Streamer_IsToggleItemUpdate(${1:playerid}, ${2:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_Update':
'prefix': 'Streamer_Update'
'body': 'Streamer_Update(${1:playerid}, ${2:type = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_UpdateEx':
'prefix': 'Streamer_UpdateEx'
'body': 'Streamer_UpdateEx(${1:playerid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:type = -1}, ${8:compensatedtime = -1}, ${9:freezeplayer = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetFloatData':
'prefix': 'Streamer_GetFloatData'
'body': 'Streamer_GetFloatData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:Float:result})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetFloatData':
'prefix': 'Streamer_SetFloatData'
'body': 'Streamer_SetFloatData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:Float:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetIntData':
'prefix': 'Streamer_GetIntData'
'body': 'Streamer_GetIntData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetIntData':
'prefix': 'Streamer_SetIntData'
'body': 'Streamer_SetIntData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetArrayData':
'prefix': 'Streamer_GetArrayData'
'body': 'Streamer_GetArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:dest[]}, ${5:maxdest = sizeof dest})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetArrayData':
'prefix': 'Streamer_SetArrayData'
'body': 'Streamer_SetArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:const src[]}, ${5:maxsrc = sizeof src})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsInArrayData':
'prefix': 'Streamer_IsInArrayData'
'body': 'Streamer_IsInArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_AppendArrayData':
'prefix': 'Streamer_AppendArrayData'
'body': 'Streamer_AppendArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_RemoveArrayData':
'prefix': 'Streamer_RemoveArrayData'
'body': 'Streamer_RemoveArrayData(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data}, ${4:value})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetUpperBound':
'prefix': 'Streamer_GetUpperBound'
'body': 'Streamer_GetUpperBound(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetDistanceToItem':
'prefix': 'Streamer_GetDistanceToItem'
'body': 'Streamer_GetDistanceToItem(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:STREAMER_ALL_TAGS:id}, ${6:Float:distance}, ${7:dimensions = 3})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemStatic':
'prefix': 'Streamer_ToggleItemStatic'
'body': 'Streamer_ToggleItemStatic(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemStatic':
'prefix': 'Streamer_IsToggleItemStatic'
'body': 'Streamer_IsToggleItemStatic(${1:type}, ${2:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemInternalID':
'prefix': 'Streamer_GetItemInternalID'
'body': 'Streamer_GetItemInternalID(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:streamerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemStreamerID':
'prefix': 'Streamer_GetItemStreamerID'
'body': 'Streamer_GetItemStreamerID(${1:playerid}, ${2:type}, ${3:internalid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsItemVisible':
'prefix': 'Streamer_IsItemVisible'
'body': 'Streamer_IsItemVisible(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_DestroyAllVisibleItems':
'prefix': 'Streamer_DestroyAllVisibleItems'
'body': 'Streamer_DestroyAllVisibleItems(${1:playerid}, ${2:type}, ${3:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CountVisibleItems':
'prefix': 'Streamer_CountVisibleItems'
'body': 'Streamer_CountVisibleItems(${1:playerid}, ${2:type}, ${3:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_DestroyAllItems':
'prefix': 'Streamer_DestroyAllItems'
'body': 'Streamer_DestroyAllItems(${1:type}, ${2:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CountItems':
'prefix': 'Streamer_CountItems'
'body': 'Streamer_CountItems(${1:type}, ${2:serverwide = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicObject':
'prefix': 'CreateDynamicObject'
'body': 'CreateDynamicObject(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:rx}, ${6:Float:ry}, ${7:Float:rz}, ${8:worldid = -1}, ${9:interiorid = -1}, ${10:playerid = -1}, ${11:Float:streamdistance = STREAMER_OBJECT_SD}, ${12:Float:drawdistance = STREAMER_OBJECT_DD}, ${13:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${14:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicObject':
'prefix': 'DestroyDynamicObject'
'body': 'DestroyDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicObject':
'prefix': 'IsValidDynamicObject'
'body': 'IsValidDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectPos':
'prefix': 'SetDynamicObjectPos'
'body': 'SetDynamicObjectPos(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectPos':
'prefix': 'GetDynamicObjectPos'
'body': 'GetDynamicObjectPos(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectRot':
'prefix': 'SetDynamicObjectRot'
'body': 'SetDynamicObjectRot(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:rx}, ${3:Float:ry}, ${4:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectRot':
'prefix': 'GetDynamicObjectRot'
'body': 'GetDynamicObjectRot(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:rx}, ${3:Float:ry}, ${4:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectNoCameraCol':
'prefix': 'SetDynamicObjectNoCameraCol'
'body': 'SetDynamicObjectNoCameraCol(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectNoCameraCol':
'prefix': 'GetDynamicObjectNoCameraCol'
'body': 'GetDynamicObjectNoCameraCol(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'MoveDynamicObject':
'prefix': 'MoveDynamicObject'
'body': 'MoveDynamicObject(${1:STREAMER_TAG_OBJECT:objectid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:speed}, ${6:Float:rx = -1000.0}, ${7:Float:ry = -1000.0}, ${8:Float:rz = -1000.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'StopDynamicObject':
'prefix': 'StopDynamicObject'
'body': 'StopDynamicObject(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMoving':
'prefix': 'IsDynamicObjectMoving'
'body': 'IsDynamicObjectMoving(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachCameraToDynamicObject':
'prefix': 'AttachCameraToDynamicObject'
'body': 'AttachCameraToDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToObject':
'prefix': 'AttachDynamicObjectToObject'
'body': 'AttachDynamicObjectToObject(${1:STREAMER_TAG_OBJECT:objectid}, ${2:attachtoid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz}, ${9:syncrotation = 1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToPlayer':
'prefix': 'AttachDynamicObjectToPlayer'
'body': 'AttachDynamicObjectToPlayer(${1:STREAMER_TAG_OBJECT:objectid}, ${2:playerid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicObjectToVehicle':
'prefix': 'AttachDynamicObjectToVehicle'
'body': 'AttachDynamicObjectToVehicle(${1:STREAMER_TAG_OBJECT:objectid}, ${2:vehicleid}, ${3:Float:offsetx}, ${4:Float:offsety}, ${5:Float:offsetz}, ${6:Float:rx}, ${7:Float:ry}, ${8:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'EditDynamicObject':
'prefix': 'EditDynamicObject'
'body': 'EditDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMaterialUsed':
'prefix': 'IsDynamicObjectMaterialUsed'
'body': 'IsDynamicObjectMaterialUsed(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectMaterial':
'prefix': 'GetDynamicObjectMaterial'
'body': 'GetDynamicObjectMaterial(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:modelid}, ${4:txdname[]}, ${5:texturename[]}, ${6:materialcolor}, ${7:maxtxdname = sizeof txdname}, ${8:maxtexturename = sizeof texturename})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectMaterial':
'prefix': 'SetDynamicObjectMaterial'
'body': 'SetDynamicObjectMaterial(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:modelid}, ${4:const txdname[]}, ${5:const texturename[]}, ${6:materialcolor = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicObjectMaterialTextUsed':
'prefix': 'IsDynamicObjectMaterialTextUsed'
'body': 'IsDynamicObjectMaterialTextUsed(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicObjectMaterialText':
'prefix': 'GetDynamicObjectMaterialText'
'body': 'GetDynamicObjectMaterialText(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:text[]}, ${4:materialsize}, ${5:fontface[]}, ${6:fontsize}, ${7:bold}, ${8:fontcolor}, ${9:backcolor}, ${10:textalignment}, ${11:maxtext = sizeof text}, ${12:maxfontface = sizeof fontface})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicObjectMaterialText':
'prefix': 'SetDynamicObjectMaterialText'
'body': 'SetDynamicObjectMaterialText(${1:STREAMER_TAG_OBJECT:objectid}, ${2:materialindex}, ${3:const text[]}, ${4:materialsize = OBJECT_MATERIAL_SIZE_256x128}, ${5:const fontface[] = \"Arial\"}, ${6:fontsize = 24}, ${7:bold = 1}, ${8:fontcolor = 0xFFFFFFFF}, ${9:backcolor = 0}, ${10:textalignment = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPickup':
'prefix': 'CreateDynamicPickup'
'body': 'CreateDynamicPickup(${1:modelid}, ${2:type}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:Float:streamdistance = STREAMER_PICKUP_SD}, ${10:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${11:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicPickup':
'prefix': 'DestroyDynamicPickup'
'body': 'DestroyDynamicPickup(${1:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicPickup':
'prefix': 'IsValidDynamicPickup'
'body': 'IsValidDynamicPickup(${1:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCP':
'prefix': 'CreateDynamicCP'
'body': 'CreateDynamicCP(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:Float:streamdistance = STREAMER_CP_SD}, ${9:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicCP':
'prefix': 'DestroyDynamicCP'
'body': 'DestroyDynamicCP(${1:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicCP':
'prefix': 'IsValidDynamicCP'
'body': 'IsValidDynamicCP(${1:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicCP':
'prefix': 'TogglePlayerDynamicCP'
'body': 'TogglePlayerDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicCPs':
'prefix': 'TogglePlayerAllDynamicCPs'
'body': 'TogglePlayerAllDynamicCPs(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicCP':
'prefix': 'IsPlayerInDynamicCP'
'body': 'IsPlayerInDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerVisibleDynamicCP':
'prefix': 'GetPlayerVisibleDynamicCP'
'body': 'GetPlayerVisibleDynamicCP(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRaceCP':
'prefix': 'CreateDynamicRaceCP'
'body': 'CreateDynamicRaceCP(${1:type}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:nextx}, ${6:Float:nexty}, ${7:Float:nextz}, ${8:Float:size}, ${9:worldid = -1}, ${10:interiorid = -1}, ${11:playerid = -1}, ${12:Float:streamdistance = STREAMER_RACE_CP_SD}, ${13:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${14:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicRaceCP':
'prefix': 'DestroyDynamicRaceCP'
'body': 'DestroyDynamicRaceCP(${1:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicRaceCP':
'prefix': 'IsValidDynamicRaceCP'
'body': 'IsValidDynamicRaceCP(${1:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicRaceCP':
'prefix': 'TogglePlayerDynamicRaceCP'
'body': 'TogglePlayerDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicRaceCPs':
'prefix': 'TogglePlayerAllDynamicRaceCPs'
'body': 'TogglePlayerAllDynamicRaceCPs(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicRaceCP':
'prefix': 'IsPlayerInDynamicRaceCP'
'body': 'IsPlayerInDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerVisibleDynamicRaceCP':
'prefix': 'GetPlayerVisibleDynamicRaceCP'
'body': 'GetPlayerVisibleDynamicRaceCP(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicMapIcon':
'prefix': 'CreateDynamicMapIcon'
'body': 'CreateDynamicMapIcon(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:color}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:Float:streamdistance = STREAMER_MAP_ICON_SD}, ${10:style = MAPICON_LOCAL}, ${11:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${12:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicMapIcon':
'prefix': 'DestroyDynamicMapIcon'
'body': 'DestroyDynamicMapIcon(${1:STREAMER_TAG_MAP_ICON:iconid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicMapIcon':
'prefix': 'IsValidDynamicMapIcon'
'body': 'IsValidDynamicMapIcon(${1:STREAMER_TAG_MAP_ICON:iconid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamic3DTextLabel':
'prefix': 'CreateDynamic3DTextLabel'
'body': 'CreateDynamic3DTextLabel(${1:const text[]}, ${2:color}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:drawdistance}, ${7:attachedplayer = INVALID_PLAYER_ID}, ${8:attachedvehicle = INVALID_VEHICLE_ID}, ${9:testlos = 0}, ${10:worldid = -1}, ${11:interiorid = -1}, ${12:playerid = -1}, ${13:Float:streamdistance = STREAMER_3D_TEXT_LABEL_SD}, ${14:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${15:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamic3DTextLabel':
'prefix': 'DestroyDynamic3DTextLabel'
'body': 'DestroyDynamic3DTextLabel(${1:STREAMER_TAG_3D_TEXT_LABEL:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamic3DTextLabel':
'prefix': 'IsValidDynamic3DTextLabel'
'body': 'IsValidDynamic3DTextLabel(${1:STREAMER_TAG_3D_TEXT_LABEL:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamic3DTextLabelText':
'prefix': 'GetDynamic3DTextLabelText'
'body': 'GetDynamic3DTextLabelText(${1:STREAMER_TAG_3D_TEXT_LABEL:id}, ${2:text[]}, ${3:maxtext = sizeof text})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'UpdateDynamic3DTextLabelText':
'prefix': 'UpdateDynamic3DTextLabelText'
'body': 'UpdateDynamic3DTextLabelText(${1:STREAMER_TAG_3D_TEXT_LABEL:id}, ${2:color}, ${3:const text[]})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCircle':
'prefix': 'CreateDynamicCircle'
'body': 'CreateDynamicCircle(${1:Float:x}, ${2:Float:y}, ${3:Float:size}, ${4:worldid = -1}, ${5:interiorid = -1}, ${6:playerid = -1}, ${7:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCylinder':
'prefix': 'CreateDynamicCylinder'
'body': 'CreateDynamicCylinder(${1:Float:x}, ${2:Float:y}, ${3:Float:minz}, ${4:Float:maxz}, ${5:Float:size}, ${6:worldid = -1}, ${7:interiorid = -1}, ${8:playerid = -1}, ${9:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicSphere':
'prefix': 'CreateDynamicSphere'
'body': 'CreateDynamicSphere(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRectangle':
'prefix': 'CreateDynamicRectangle'
'body': 'CreateDynamicRectangle(${1:Float:minx}, ${2:Float:miny}, ${3:Float:maxx}, ${4:Float:maxy}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCuboid':
'prefix': 'CreateDynamicCuboid'
'body': 'CreateDynamicCuboid(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worldid = -1}, ${8:interiorid = -1}, ${9:playerid = -1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCube':
'prefix': 'CreateDynamicCube'
'body': 'CreateDynamicCube(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worldid = -1}, ${8:interiorid = -1}, ${9:playerid = -1}, ${10:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPolygon':
'prefix': 'CreateDynamicPolygon'
'body': 'CreateDynamicPolygon(${1:Float:points[]}, ${2:Float:minz = -FLOAT_INFINITY}, ${3:Float:maxz = FLOAT_INFINITY}, ${4:maxpoints = sizeof points}, ${5:worldid = -1}, ${6:interiorid = -1}, ${7:playerid = -1}, ${8:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicArea':
'prefix': 'DestroyDynamicArea'
'body': 'DestroyDynamicArea(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicArea':
'prefix': 'IsValidDynamicArea'
'body': 'IsValidDynamicArea(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicPolygonPoints':
'prefix': 'GetDynamicPolygonPoints'
'body': 'GetDynamicPolygonPoints(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:points[]}, ${3:maxpoints = sizeof points})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicPolygonNumberPoints':
'prefix': 'GetDynamicPolygonNumberPoints'
'body': 'GetDynamicPolygonNumberPoints(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerDynamicArea':
'prefix': 'TogglePlayerDynamicArea'
'body': 'TogglePlayerDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'TogglePlayerAllDynamicAreas':
'prefix': 'TogglePlayerAllDynamicAreas'
'body': 'TogglePlayerAllDynamicAreas(${1:playerid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInDynamicArea':
'prefix': 'IsPlayerInDynamicArea'
'body': 'IsPlayerInDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid}, ${3:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPlayerInAnyDynamicArea':
'prefix': 'IsPlayerInAnyDynamicArea'
'body': 'IsPlayerInAnyDynamicArea(${1:playerid}, ${2:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsAnyPlayerInDynamicArea':
'prefix': 'IsAnyPlayerInDynamicArea'
'body': 'IsAnyPlayerInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsAnyPlayerInAnyDynamicArea':
'prefix': 'IsAnyPlayerInAnyDynamicArea'
'body': 'IsAnyPlayerInAnyDynamicArea(${1:recheck = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerDynamicAreas':
'prefix': 'GetPlayerDynamicAreas'
'body': 'GetPlayerDynamicAreas(${1:playerid}, ${2:STREAMER_TAG_AREA:areas[]}, ${3:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerNumberDynamicAreas':
'prefix': 'GetPlayerNumberDynamicAreas'
'body': 'GetPlayerNumberDynamicAreas(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPointInDynamicArea':
'prefix': 'IsPointInDynamicArea'
'body': 'IsPointInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsPointInAnyDynamicArea':
'prefix': 'IsPointInAnyDynamicArea'
'body': 'IsPointInAnyDynamicArea(${1:Float:x}, ${2:Float:y}, ${3:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreasForPoint':
'prefix': 'GetDynamicAreasForPoint'
'body': 'GetDynamicAreasForPoint(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:STREAMER_TAG_AREA:areas[]}, ${5:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetNumberDynamicAreasForPoint':
'prefix': 'GetNumberDynamicAreasForPoint'
'body': 'GetNumberDynamicAreasForPoint(${1:Float:x}, ${2:Float:y}, ${3:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToObject':
'prefix': 'AttachDynamicAreaToObject'
'body': 'AttachDynamicAreaToObject(${1:STREAMER_TAG_AREA:areaid}, ${2:STREAMER_TAG_OBJECT_ALT objectid}, ${3:type = STREAMER_OBJECT_TYPE_DYNAMIC}, ${4:playerid = INVALID_PLAYER_ID}, ${5:Float:offsetx = 0.0}, ${6:Float:offsety = 0.0}, ${7:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToPlayer':
'prefix': 'AttachDynamicAreaToPlayer'
'body': 'AttachDynamicAreaToPlayer(${1:STREAMER_TAG_AREA:areaid}, ${2:playerid}, ${3:Float:offsetx = 0.0}, ${4:Float:offsety = 0.0}, ${5:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'AttachDynamicAreaToVehicle':
'prefix': 'AttachDynamicAreaToVehicle'
'body': 'AttachDynamicAreaToVehicle(${1:STREAMER_TAG_AREA:areaid}, ${2:vehicleid}, ${3:Float:offsetx = 0.0}, ${4:Float:offsety = 0.0}, ${5:Float:offsetz = 0.0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicObjectEx':
'prefix': 'CreateDynamicObjectEx'
'body': 'CreateDynamicObjectEx(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:rx}, ${6:Float:ry}, ${7:Float:rz}, ${8:Float:streamdistance = STREAMER_OBJECT_SD}, ${9:Float:drawdistance = STREAMER_OBJECT_DD}, ${10:worlds[] = { -1 }}, ${11:interiors[] = { -1 }}, ${12:players[] = { -1 }}, ${13:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${14:priority = 0}, ${15:maxworlds = sizeof worlds}, ${16:maxinteriors = sizeof interiors}, ${17:maxplayers = sizeof players}, ${18:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPickupEx':
'prefix': 'CreateDynamicPickupEx'
'body': 'CreateDynamicPickupEx(${1:modelid}, ${2:type}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:streamdistance = STREAMER_PICKUP_SD}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${11:priority = 0}, ${12:maxworlds = sizeof worlds}, ${13:maxinteriors = sizeof interiors}, ${14:maxplayers = sizeof players}, ${15:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCPEx':
'prefix': 'CreateDynamicCPEx'
'body': 'CreateDynamicCPEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:Float:streamdistance = STREAMER_CP_SD}, ${6:worlds[] = { -1 }}, ${7:interiors[] = { -1 }}, ${8:players[] = { -1 }}, ${9:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players}, ${14:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRaceCPEx':
'prefix': 'CreateDynamicRaceCPEx'
'body': 'CreateDynamicRaceCPEx(${1:type}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:nextx}, ${6:Float:nexty}, ${7:Float:nextz}, ${8:Float:size}, ${9:Float:streamdistance = STREAMER_RACE_CP_SD}, ${10:worlds[] = { -1 }}, ${11:interiors[] = { -1 }}, ${12:players[] = { -1 }}, ${13:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${14:priority = 0}, ${15:maxworlds = sizeof worlds}, ${16:maxinteriors = sizeof interiors}, ${17:maxplayers = sizeof players}, ${18:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicMapIconEx':
'prefix': 'CreateDynamicMapIconEx'
'body': 'CreateDynamicMapIconEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:color}, ${6:style = MAPICON_LOCAL}, ${7:Float:streamdistance = STREAMER_MAP_ICON_SD}, ${8:worlds[] = { -1 }}, ${9:interiors[] = { -1 }}, ${10:players[] = { -1 }}, ${11:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${12:priority = 0}, ${13:maxworlds = sizeof worlds}, ${14:maxinteriors = sizeof interiors}, ${15:maxplayers = sizeof players}, ${16:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamic3DTextLabelEx':
'prefix': 'CreateDynamic3DTextLabelEx'
'body': 'CreateDynamic3DTextLabelEx(${1:const text[]}, ${2:color}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z}, ${6:Float:drawdistance}, ${7:attachedplayer = INVALID_PLAYER_ID}, ${8:attachedvehicle = INVALID_VEHICLE_ID}, ${9:testlos = 0}, ${10:Float:streamdistance = STREAMER_3D_TEXT_LABEL_SD}, ${11:worlds[] = { -1 }}, ${12:interiors[] = { -1 }}, ${13:players[] = { -1 }}, ${14:STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${15:priority = 0}, ${16:maxworlds = sizeof worlds}, ${17:maxinteriors = sizeof interiors}, ${18:maxplayers = sizeof players}, ${19:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCircleEx':
'prefix': 'CreateDynamicCircleEx'
'body': 'CreateDynamicCircleEx(${1:Float:x}, ${2:Float:y}, ${3:Float:size}, ${4:worlds[] = { -1 }}, ${5:interiors[] = { -1 }}, ${6:players[] = { -1 }}, ${7:priority = 0}, ${8:maxworlds = sizeof worlds}, ${9:maxinteriors = sizeof interiors}, ${10:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCylinderEx':
'prefix': 'CreateDynamicCylinderEx'
'body': 'CreateDynamicCylinderEx(${1:Float:x}, ${2:Float:y}, ${3:Float:minz}, ${4:Float:maxz}, ${5:Float:size}, ${6:worlds[] = { -1 }}, ${7:interiors[] = { -1 }}, ${8:players[] = { -1 }}, ${9:priority = 0}, ${10:maxworlds = sizeof worlds}, ${11:maxinteriors = sizeof interiors}, ${12:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicSphereEx':
'prefix': 'CreateDynamicSphereEx'
'body': 'CreateDynamicSphereEx(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:Float:size}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicRectangleEx':
'prefix': 'CreateDynamicRectangleEx'
'body': 'CreateDynamicRectangleEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:maxx}, ${4:Float:maxy}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCuboidEx':
'prefix': 'CreateDynamicCuboidEx'
'body': 'CreateDynamicCuboidEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicCubeEx':
'prefix': 'CreateDynamicCubeEx'
'body': 'CreateDynamicCubeEx(${1:Float:minx}, ${2:Float:miny}, ${3:Float:minz}, ${4:Float:maxx}, ${5:Float:maxy}, ${6:Float:maxz}, ${7:worlds[] = { -1 }}, ${8:interiors[] = { -1 }}, ${9:players[] = { -1 }}, ${10:priority = 0}, ${11:maxworlds = sizeof worlds}, ${12:maxinteriors = sizeof interiors}, ${13:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicPolygonEx':
'prefix': 'CreateDynamicPolygonEx'
'body': 'CreateDynamicPolygonEx(${1:Float:points[]}, ${2:Float:minz = -FLOAT_INFINITY}, ${3:Float:maxz = FLOAT_INFINITY}, ${4:maxpoints = sizeof points}, ${5:worlds[] = { -1 }}, ${6:interiors[] = { -1 }}, ${7:players[] = { -1 }}, ${8:priority = 0}, ${9:maxworlds = sizeof worlds}, ${10:maxinteriors = sizeof interiors}, ${11:maxplayers = sizeof players})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_TickRate':
'prefix': 'Streamer_TickRate'
'body': 'Streamer_TickRate(${1:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_MaxItems':
'prefix': 'Streamer_MaxItems'
'body': 'Streamer_MaxItems(${1:type}, ${2:items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_VisibleItems':
'prefix': 'Streamer_VisibleItems'
'body': 'Streamer_VisibleItems(${1:type}, ${2:items}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CellDistance':
'prefix': 'Streamer_CellDistance'
'body': 'Streamer_CellDistance(${1:Float:distance})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CellSize':
'prefix': 'Streamer_CellSize'
'body': 'Streamer_CellSize(${1:Float:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_CallbackHook':
'prefix': 'Streamer_CallbackHook'
'body': 'Streamer_CallbackHook(${1:callback}, ${2:...})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicObjects':
'prefix': 'DestroyAllDynamicObjects'
'body': 'DestroyAllDynamicObjects()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicObjects':
'prefix': 'CountDynamicObjects'
'body': 'CountDynamicObjects()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicPickups':
'prefix': 'DestroyAllDynamicPickups'
'body': 'DestroyAllDynamicPickups()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicPickups':
'prefix': 'CountDynamicPickups'
'body': 'CountDynamicPickups()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicCPs':
'prefix': 'DestroyAllDynamicCPs'
'body': 'DestroyAllDynamicCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicCPs':
'prefix': 'CountDynamicCPs'
'body': 'CountDynamicCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicRaceCPs':
'prefix': 'DestroyAllDynamicRaceCPs'
'body': 'DestroyAllDynamicRaceCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicRaceCPs':
'prefix': 'CountDynamicRaceCPs'
'body': 'CountDynamicRaceCPs()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicMapIcons':
'prefix': 'DestroyAllDynamicMapIcons'
'body': 'DestroyAllDynamicMapIcons()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicMapIcons':
'prefix': 'CountDynamicMapIcons'
'body': 'CountDynamicMapIcons()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamic3DTextLabels':
'prefix': 'DestroyAllDynamic3DTextLabels'
'body': 'DestroyAllDynamic3DTextLabels()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamic3DTextLabels':
'prefix': 'CountDynamic3DTextLabels'
'body': 'CountDynamic3DTextLabels()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyAllDynamicAreas':
'prefix': 'DestroyAllDynamicAreas'
'body': 'DestroyAllDynamicAreas()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CountDynamicAreas':
'prefix': 'CountDynamicAreas'
'body': 'CountDynamicAreas()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleChunkStream':
'prefix': 'Streamer_IsToggleChunkStream'
'body': 'Streamer_IsToggleChunkStream()'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetLastUpdateTime':
'prefix': 'Streamer_GetLastUpdateTime'
'body': 'Streamer_GetLastUpdateTime(${1:&Float:time})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetChunkSize':
'prefix': 'Streamer_GetChunkSize'
'body': 'Streamer_GetChunkSize(${1:type})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetPlayerTickRate':
'prefix': 'Streamer_GetPlayerTickRate'
'body': 'Streamer_GetPlayerTickRate(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'DestroyDynamicActor':
'prefix': 'DestroyDynamicActor'
'body': 'DestroyDynamicActor(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsValidDynamicActor':
'prefix': 'IsValidDynamicActor'
'body': 'IsValidDynamicActor(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorVirtualWorld':
'prefix': 'GetDynamicActorVirtualWorld'
'body': 'GetDynamicActorVirtualWorld(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ClearDynamicActorAnimations':
'prefix': 'ClearDynamicActorAnimations'
'body': 'ClearDynamicActorAnimations(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicActorInvulnerable':
'prefix': 'IsDynamicActorInvulnerable'
'body': 'IsDynamicActorInvulnerable(${1:STREAMER_TAG_ACTOR:actorid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerTargetDynamicActor':
'prefix': 'GetPlayerTargetDynamicActor'
'body': 'GetPlayerTargetDynamicActor(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerCameraTargetDynActor':
'prefix': 'GetPlayerCameraTargetDynActor'
'body': 'GetPlayerCameraTargetDynActor(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsToggleDynAreaSpectateMode':
'prefix': 'IsToggleDynAreaSpectateMode'
'body': 'IsToggleDynAreaSpectateMode(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetPlayerCameraTargetDynObject':
'prefix': 'GetPlayerCameraTargetDynObject'
'body': 'GetPlayerCameraTargetDynObject(${1:playerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleChunkStream':
'prefix': 'Streamer_ToggleChunkStream'
'body': 'Streamer_ToggleChunkStream(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreaType':
'prefix': 'GetDynamicAreaType'
'body': 'GetDynamicAreaType(${1:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_AmxUnloadDestroyItems':
'prefix': 'Streamer_AmxUnloadDestroyItems'
'body': 'Streamer_AmxUnloadDestroyItems(${1:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItemInvAreas':
'prefix': 'Streamer_IsToggleItemInvAreas'
'body': 'Streamer_IsToggleItemInvAreas(${1:type}, ${2:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetChunkTickRate':
'prefix': 'Streamer_GetChunkTickRate'
'body': 'Streamer_GetChunkTickRate(${1:type}, ${2:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetChunkSize':
'prefix': 'Streamer_SetChunkSize'
'body': 'Streamer_SetChunkSize(${1:type}, ${2:size})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetPlayerTickRate':
'prefix': 'Streamer_SetPlayerTickRate'
'body': 'Streamer_SetPlayerTickRate(${1:playerid}, ${2:rate})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsDynamicActorStreamedIn':
'prefix': 'IsDynamicActorStreamedIn'
'body': 'IsDynamicActorStreamedIn(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorVirtualWorld':
'prefix': 'SetDynamicActorVirtualWorld'
'body': 'SetDynamicActorVirtualWorld(${1:STREAMER_TAG_ACTOR:actorid}, ${2:vworld})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorFacingAngle':
'prefix': 'GetDynamicActorFacingAngle'
'body': 'GetDynamicActorFacingAngle(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:ang})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorFacingAngle':
'prefix': 'SetDynamicActorFacingAngle'
'body': 'SetDynamicActorFacingAngle(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:ang})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorHealth':
'prefix': 'GetDynamicActorHealth'
'body': 'GetDynamicActorHealth(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:health})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorHealth':
'prefix': 'SetDynamicActorHealth'
'body': 'SetDynamicActorHealth(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:health})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorInvulnerable':
'prefix': 'SetDynamicActorInvulnerable'
'body': 'SetDynamicActorInvulnerable(${1:STREAMER_TAG_ACTOR:actorid}, ${2:invulnerable = true})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ToggleDynAreaSpectateMode':
'prefix': 'ToggleDynAreaSpectateMode'
'body': 'ToggleDynAreaSpectateMode(${1:STREAMER_TAG_AREA:areaid}, ${2:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemInvAreas':
'prefix': 'Streamer_ToggleItemInvAreas'
'body': 'Streamer_ToggleItemInvAreas(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_IsToggleItem':
'prefix': 'Streamer_IsToggleItem'
'body': 'Streamer_IsToggleItem(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetChunkTickRate':
'prefix': 'Streamer_SetChunkTickRate'
'body': 'Streamer_SetChunkTickRate(${1:type}, ${2:rate}, ${3:playerid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItemCallbacks':
'prefix': 'Streamer_ToggleItemCallbacks'
'body': 'Streamer_ToggleItemCallbacks(${1:type}, ${2:id}, ${3:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetArrayDataLength':
'prefix': 'Streamer_GetArrayDataLength'
'body': 'Streamer_GetArrayDataLength(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:data})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleItem':
'prefix': 'Streamer_ToggleItem'
'body': 'Streamer_ToggleItem(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:id}, ${4:toggle})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorPos':
'prefix': 'GetDynamicActorPos'
'body': 'GetDynamicActorPos(${1:STREAMER_TAG_ACTOR:actorid}, ${2:&Float:x}, ${3:&Float:y}, ${4:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'SetDynamicActorPos':
'prefix': 'SetDynamicActorPos'
'body': 'SetDynamicActorPos(${1:STREAMER_TAG_ACTOR:actorid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetAllVisibleItems':
'prefix': 'Streamer_GetAllVisibleItems'
'body': 'Streamer_GetAllVisibleItems(${1:playerid}, ${2:type}, ${3:STREAMER_ALL_TAGS:items[]}, ${4:maxitems = sizeof items})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_ToggleAllItems':
'prefix': 'Streamer_ToggleAllItems'
'body': 'Streamer_ToggleAllItems(${1:playerid}, ${2:type}, ${3:toggle}, ${4:const exceptions[] = { -1 }}, ${5:maxexceptions = sizeof exceptions})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemOffset':
'prefix': 'Streamer_GetItemOffset'
'body': 'Streamer_GetItemOffset(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:&Float:x}, ${4:&Float:y}, ${5:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetItemOffset':
'prefix': 'Streamer_SetItemOffset'
'body': 'Streamer_SetItemOffset(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetItemPos':
'prefix': 'Streamer_GetItemPos'
'body': 'Streamer_GetItemPos(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:&Float:x}, ${4:&Float:y}, ${5:&Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_SetItemPos':
'prefix': 'Streamer_SetItemPos'
'body': 'Streamer_SetItemPos(${1:type}, ${2:STREAMER_ALL_TAGS:id}, ${3:Float:x}, ${4:Float:y}, ${5:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsLineInAnyDynamicArea':
'prefix': 'IsLineInAnyDynamicArea'
'body': 'IsLineInAnyDynamicArea(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetNumberDynamicAreasForLine':
'prefix': 'GetNumberDynamicAreasForLine'
'body': 'GetNumberDynamicAreasForLine(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'IsLineInDynamicArea':
'prefix': 'IsLineInDynamicArea'
'body': 'IsLineInDynamicArea(${1:STREAMER_TAG_AREA:areaid}, ${2:Float:x1}, ${3:Float:y1}, ${4:Float:z1}, ${5:Float:x2}, ${6:Float:y2}, ${7:Float:z2})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicAreasForLine':
'prefix': 'GetDynamicAreasForLine'
'body': 'GetDynamicAreasForLine(${1:Float:x1}, ${2:Float:y1}, ${3:Float:z1}, ${4:Float:x2}, ${5:Float:y2}, ${6:Float:z2}, ${7:STREAMER_TAG_AREA:areas[]}, ${8:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_GetNearbyItems':
'prefix': 'Streamer_GetNearbyItems'
'body': 'Streamer_GetNearbyItems(${1:Float:x}, ${2:Float:y}, ${3:Float:z}, ${4:type}, ${5:STREAMER_ALL_TAGS:items[]}, ${6:maxitems = sizeof items}, ${7:Float:range = 300.0}, ${8:worldid = -1})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'ApplyDynamicActorAnimation':
'prefix': 'ApplyDynamicActorAnimation'
'body': 'ApplyDynamicActorAnimation(${1:STREAMER_TAG_ACTOR:actorid}, ${2:const animlib[]}, ${3:const animname[]}, ${4:Float:fdelta}, ${5:loop}, ${6:lockx}, ${7:locky}, ${8:freeze}, ${9:time})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'GetDynamicActorAnimation':
'prefix': 'GetDynamicActorAnimation'
'body': 'GetDynamicActorAnimation(${1:STREAMER_TAG_ACTOR:actorid}, ${2:animlib[]}, ${3:animname[]}, ${4:&Float:fdelta}, ${5:&loop}, ${6:&lockx}, ${7:&locky}, ${8:&freeze}, ${9:&time}, ${10:maxanimlib = sizeof animlib}, ${11:maxanimname = sizeof animname})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicActor':
'prefix': 'CreateDynamicActor'
'body': 'CreateDynamicActor(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:r}, ${6:invulnerable = true}, ${7:Float:health = 100.0}, ${8:worldid = -1}, ${9:interiorid = -1}, ${10:playerid = -1}, ${11:Float:streamdistance = STREAMER_ACTOR_SD}, ${12:STREAMER_TAG_AREA:areaid = STREAMER_TAG_AREA:-1}, ${13:priority = 0})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'CreateDynamicActorEx':
'prefix': 'CreateDynamicActorEx'
'body': 'CreateDynamicActorEx(${1:modelid}, ${2:Float:x}, ${3:Float:y}, ${4:Float:z}, ${5:Float:r}, ${6:invulnerable = 1}, ${7:Float:health = 100.0}, ${8:Float:streamdistance = STREAMER_ACTOR_SD}, ${9:const worlds[] = { -1 }}, ${10:const interiors[] = { -1 }}, ${11:const players[] = { -1 }}, ${12:const STREAMER_TAG_AREA:areas[] = { STREAMER_TAG_AREA:-1 }}, ${13:priority = 0}, ${14:maxworlds = sizeof worlds}, ${15:maxinteriors = sizeof interiors}, ${16:maxplayers = sizeof players}, ${17:maxareas = sizeof areas})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicObjectMoved':
'prefix': 'OnDynamicObjectMoved'
'body': 'OnDynamicObjectMoved(${1:STREAMER_TAG_OBJECT:objectid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEditDynamicObject':
'prefix': 'OnPlayerEditDynamicObject'
'body': 'OnPlayerEditDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid}, ${3:response}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z}, ${7:Float:rx}, ${8:Float:ry}, ${9:Float:rz})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerSelectDynamicObject':
'prefix': 'OnPlayerSelectDynamicObject'
'body': 'OnPlayerSelectDynamicObject(${1:playerid}, ${2:STREAMER_TAG_OBJECT:objectid}, ${3:modelid}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerShootDynamicObject':
'prefix': 'OnPlayerShootDynamicObject'
'body': 'OnPlayerShootDynamicObject(${1:playerid}, ${2:weaponid}, ${3:STREAMER_TAG_OBJECT:objectid}, ${4:Float:x}, ${5:Float:y}, ${6:Float:z})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerPickUpDynamicPickup':
'prefix': 'OnPlayerPickUpDynamicPickup'
'body': 'OnPlayerPickUpDynamicPickup(${1:playerid}, ${2:STREAMER_TAG_PICKUP:pickupid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicCP':
'prefix': 'OnPlayerEnterDynamicCP'
'body': 'OnPlayerEnterDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicCP':
'prefix': 'OnPlayerLeaveDynamicCP'
'body': 'OnPlayerLeaveDynamicCP(${1:playerid}, ${2:STREAMER_TAG_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicRaceCP':
'prefix': 'OnPlayerEnterDynamicRaceCP'
'body': 'OnPlayerEnterDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicRaceCP':
'prefix': 'OnPlayerLeaveDynamicRaceCP'
'body': 'OnPlayerLeaveDynamicRaceCP(${1:playerid}, ${2:STREAMER_TAG_RACE_CP:checkpointid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerEnterDynamicArea':
'prefix': 'OnPlayerEnterDynamicArea'
'body': 'OnPlayerEnterDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerLeaveDynamicArea':
'prefix': 'OnPlayerLeaveDynamicArea'
'body': 'OnPlayerLeaveDynamicArea(${1:playerid}, ${2:STREAMER_TAG_AREA:areaid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnItemStreamIn':
'prefix': 'Streamer_OnItemStreamIn'
'body': 'Streamer_OnItemStreamIn(${1:type}, ${2:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnItemStreamOut':
'prefix': 'Streamer_OnItemStreamOut'
'body': 'Streamer_OnItemStreamOut(${1:type}, ${2:id})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicActorStreamIn':
'prefix': 'OnDynamicActorStreamIn'
'body': 'OnDynamicActorStreamIn(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnDynamicActorStreamOut':
'prefix': 'OnDynamicActorStreamOut'
'body': 'OnDynamicActorStreamOut(${1:STREAMER_TAG_ACTOR:actorid}, ${2:forplayerid})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'OnPlayerGiveDamageDynamicActor':
'prefix': 'OnPlayerGiveDamageDynamicActor'
'body': 'OnPlayerGiveDamageDynamicActor(${1:playerid}, ${2:STREAMER_TAG_ACTOR:actorid}, ${3:Float:amount}, ${4:weaponid}, ${5:bodypart})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
'Streamer_OnPluginError':
'prefix': 'Streamer_OnPluginError'
'body': 'Streamer_OnPluginError(${1:error[]})'
'description': 'Function from: Streamer'
'descriptionMoreURL': 'https://github.com/samp-incognito/samp-streamer-plugin/wiki'
|
[
{
"context": " datos\n# de la última generada\n#\n# Author:\n# lgaticaq\n\nsii = require(\"sii\")\nRut = require(\"rutjs\")\n\nget",
"end": 297,
"score": 0.9996551871299744,
"start": 289,
"tag": "USERNAME",
"value": "lgaticaq"
},
{
"context": "\n user = {rut: rut.getNiceRut(... | src/script.coffee | lgaticaq/hubot-sii | 1 | # Description
# Script de hubot para crear boleta de honorarios
#
# Dependencies:
# "rutjs": "^0.1.1"
# "sii": "0.0.1"
#
# Configuration:
# None
#
# Commands:
# hubot sii boleta <rut> <contraseña> <monto> - Genera boleta con los datos
# de la última generada
#
# Author:
# lgaticaq
sii = require("sii")
Rut = require("rutjs")
getValue = (amount) ->
return Math.round(1.1111111111111112 * amount)
module.exports = (robot) ->
ptRut = "((\\d{7,8}|\\d{1,2}\\.\\d{3}\\.\\d{3})-[0-9Kk])"
ptBill = "sii boleta #{ptRut} ([\\w\\W]+) ([\\d]+)"
reBill = new RegExp(ptBill, "i")
robot.respond reBill, (res) ->
rut = new Rut(res.match[1])
pass = res.match[3].trim()
return res.reply("rut invalido") if !rut.isValid
amount = parseInt(res.match[4], 10)
user = {rut: rut.getNiceRut(false), password: pass}
work = {value: getValue(amount)}
sii.byLastInvoice(user, work)
.then () ->
res.send("Boleta enviada")
.catch (err) ->
robot.emit "error", err
res.reply "ocurrio un error al intentar enviar la boleta"
| 75052 | # Description
# Script de hubot para crear boleta de honorarios
#
# Dependencies:
# "rutjs": "^0.1.1"
# "sii": "0.0.1"
#
# Configuration:
# None
#
# Commands:
# hubot sii boleta <rut> <contraseña> <monto> - Genera boleta con los datos
# de la última generada
#
# Author:
# lgaticaq
sii = require("sii")
Rut = require("rutjs")
getValue = (amount) ->
return Math.round(1.1111111111111112 * amount)
module.exports = (robot) ->
ptRut = "((\\d{7,8}|\\d{1,2}\\.\\d{3}\\.\\d{3})-[0-9Kk])"
ptBill = "sii boleta #{ptRut} ([\\w\\W]+) ([\\d]+)"
reBill = new RegExp(ptBill, "i")
robot.respond reBill, (res) ->
rut = new Rut(res.match[1])
pass = res.match[3].trim()
return res.reply("rut invalido") if !rut.isValid
amount = parseInt(res.match[4], 10)
user = {rut: rut.getNiceRut(false), password: <PASSWORD>}
work = {value: getValue(amount)}
sii.byLastInvoice(user, work)
.then () ->
res.send("Boleta enviada")
.catch (err) ->
robot.emit "error", err
res.reply "ocurrio un error al intentar enviar la boleta"
| true | # Description
# Script de hubot para crear boleta de honorarios
#
# Dependencies:
# "rutjs": "^0.1.1"
# "sii": "0.0.1"
#
# Configuration:
# None
#
# Commands:
# hubot sii boleta <rut> <contraseña> <monto> - Genera boleta con los datos
# de la última generada
#
# Author:
# lgaticaq
sii = require("sii")
Rut = require("rutjs")
getValue = (amount) ->
return Math.round(1.1111111111111112 * amount)
module.exports = (robot) ->
ptRut = "((\\d{7,8}|\\d{1,2}\\.\\d{3}\\.\\d{3})-[0-9Kk])"
ptBill = "sii boleta #{ptRut} ([\\w\\W]+) ([\\d]+)"
reBill = new RegExp(ptBill, "i")
robot.respond reBill, (res) ->
rut = new Rut(res.match[1])
pass = res.match[3].trim()
return res.reply("rut invalido") if !rut.isValid
amount = parseInt(res.match[4], 10)
user = {rut: rut.getNiceRut(false), password: PI:PASSWORD:<PASSWORD>END_PI}
work = {value: getValue(amount)}
sii.byLastInvoice(user, work)
.then () ->
res.send("Boleta enviada")
.catch (err) ->
robot.emit "error", err
res.reply "ocurrio un error al intentar enviar la boleta"
|
[
{
"context": "# Author: Josh Bass\n\nReact = require(\"react\");\nrequire(\"./res/styles/",
"end": 19,
"score": 0.9998725652694702,
"start": 10,
"tag": "NAME",
"value": "Josh Bass"
}
] | src/client/components/navigation_bar/NavigationBarView.coffee | jbass86/Aroma | 0 | # Author: Josh Bass
React = require("react");
require("./res/styles/navigation_bar.scss");
module.exports = React.createClass
getInitialState: ->
{nav_visible: false, nav_selection: ["inventory"]};
componentDidMount: ->
@props.nav_model.set("nav_selection", @state.nav_selection);
@props.nav_model.on("change:nav_visible", () =>
@setState({"nav_visible": @props.nav_model.get("nav_visible")});
);
render: ->
<div class="row">
<div className={"nav-bar-view collapsible-real" + if @state.nav_visible then " col-xs-6 col-md-4" else " no-width"}>
<h2 className="nav-header">
<span>Navigation</span>
</h2>
<div className="nav-selections unselectable">
<div className={@getNavSelectionClasses("orders")} onClick={@selectNavView.bind(@, "orders")}>Orders</div>
<div className={@getNavSelectionClasses("customers")} onClick={@selectNavView.bind(@, "customers")}>Customers</div>
<div className={@getNavSelectionClasses("inventory")} onClick={@selectNavView.bind(@, "inventory")}>Inventory</div>
<div className={@getNavSelectionClasses("analytics")} onClick={@selectNavView.bind(@, "analytics")}>Analytics</div>
</div>
</div>
</div>
getNavSelectionClasses: (name) ->
classes = "nav-selection label";
if (@state.nav_selection.includes(name))
classes += " widget-primary-0";
else
classes += " label-default";
classes;
selectNavView: (name, event) ->
selections = @state.nav_selection;
if (selections.includes(name) and selections.length > 1)
selections.splice(selections.indexOf(name), 1);
else
selections.push(name);
@setState({nav_selection: selections}, () =>
@props.nav_model.set("nav_selection", selections);
@props.nav_model.trigger("change:nav_selection", selections);
);
| 166464 | # Author: <NAME>
React = require("react");
require("./res/styles/navigation_bar.scss");
module.exports = React.createClass
getInitialState: ->
{nav_visible: false, nav_selection: ["inventory"]};
componentDidMount: ->
@props.nav_model.set("nav_selection", @state.nav_selection);
@props.nav_model.on("change:nav_visible", () =>
@setState({"nav_visible": @props.nav_model.get("nav_visible")});
);
render: ->
<div class="row">
<div className={"nav-bar-view collapsible-real" + if @state.nav_visible then " col-xs-6 col-md-4" else " no-width"}>
<h2 className="nav-header">
<span>Navigation</span>
</h2>
<div className="nav-selections unselectable">
<div className={@getNavSelectionClasses("orders")} onClick={@selectNavView.bind(@, "orders")}>Orders</div>
<div className={@getNavSelectionClasses("customers")} onClick={@selectNavView.bind(@, "customers")}>Customers</div>
<div className={@getNavSelectionClasses("inventory")} onClick={@selectNavView.bind(@, "inventory")}>Inventory</div>
<div className={@getNavSelectionClasses("analytics")} onClick={@selectNavView.bind(@, "analytics")}>Analytics</div>
</div>
</div>
</div>
getNavSelectionClasses: (name) ->
classes = "nav-selection label";
if (@state.nav_selection.includes(name))
classes += " widget-primary-0";
else
classes += " label-default";
classes;
selectNavView: (name, event) ->
selections = @state.nav_selection;
if (selections.includes(name) and selections.length > 1)
selections.splice(selections.indexOf(name), 1);
else
selections.push(name);
@setState({nav_selection: selections}, () =>
@props.nav_model.set("nav_selection", selections);
@props.nav_model.trigger("change:nav_selection", selections);
);
| true | # Author: PI:NAME:<NAME>END_PI
React = require("react");
require("./res/styles/navigation_bar.scss");
module.exports = React.createClass
getInitialState: ->
{nav_visible: false, nav_selection: ["inventory"]};
componentDidMount: ->
@props.nav_model.set("nav_selection", @state.nav_selection);
@props.nav_model.on("change:nav_visible", () =>
@setState({"nav_visible": @props.nav_model.get("nav_visible")});
);
render: ->
<div class="row">
<div className={"nav-bar-view collapsible-real" + if @state.nav_visible then " col-xs-6 col-md-4" else " no-width"}>
<h2 className="nav-header">
<span>Navigation</span>
</h2>
<div className="nav-selections unselectable">
<div className={@getNavSelectionClasses("orders")} onClick={@selectNavView.bind(@, "orders")}>Orders</div>
<div className={@getNavSelectionClasses("customers")} onClick={@selectNavView.bind(@, "customers")}>Customers</div>
<div className={@getNavSelectionClasses("inventory")} onClick={@selectNavView.bind(@, "inventory")}>Inventory</div>
<div className={@getNavSelectionClasses("analytics")} onClick={@selectNavView.bind(@, "analytics")}>Analytics</div>
</div>
</div>
</div>
getNavSelectionClasses: (name) ->
classes = "nav-selection label";
if (@state.nav_selection.includes(name))
classes += " widget-primary-0";
else
classes += " label-default";
classes;
selectNavView: (name, event) ->
selections = @state.nav_selection;
if (selections.includes(name) and selections.length > 1)
selections.splice(selections.indexOf(name), 1);
else
selections.push(name);
@setState({nav_selection: selections}, () =>
@props.nav_model.set("nav_selection", selections);
@props.nav_model.trigger("change:nav_selection", selections);
);
|
[
{
"context": "le_enum_attribute'\n value:\n key: 'enum-new-key'\n label: 'enum-new-key'\n\n sampleImport.",
"end": 10561,
"score": 0.8720152974128723,
"start": 10554,
"tag": "KEY",
"value": "new-key"
},
{
"context": " label: 'Enum 1 Label',\n key:... | test/integration.spec.coffee | sphereio/sphere-product-import | 5 | debug = require('debug')('spec:it:sphere-product-import')
_ = require 'underscore'
_.mixin require 'underscore-mixins'
{ProductImport} = require '../lib'
ClientConfig = require '../config'
Promise = require 'bluebird'
path = require 'path'
fs = require 'fs-extra'
jasmine = require 'jasmine-node'
{ deleteProducts } = require './integration/test-helper'
{ExtendedLogger} = require 'sphere-node-utils'
package_json = require '../package.json'
sampleImportJson = require '../samples/import.json'
sampleProductType = require '../samples/sample-product-type.json'
sampleType = require '../samples/sample-type.json'
sampleCategory = require '../samples/sample-category.json'
sampleTaxCategory = require '../samples/sample-tax-category.json'
frozenTimeStamp = new Date().getTime()
ensureResource = (service, predicate, sampleData) ->
debug 'Ensuring existence for: %s', predicate
service.where(predicate).fetch()
.then (result) ->
if result.statusCode is 200 and result.body.count is 0
service.create(sampleData)
.then (result) ->
debug "Sample #{JSON.stringify(result.body.name, null, 2)} created with id: #{result.body.id}"
Promise.resolve()
else
Promise.resolve()
describe 'Product import integration tests', ->
beforeEach (done) ->
@logger = new ExtendedLogger
additionalFields:
project_key: ClientConfig.config.project_key
logConfig:
name: "#{package_json.name}-#{package_json.version}"
streams: [
{ level: 'info', stream: process.stdout }
]
errorDir = path.join(__dirname, '../errors')
fs.emptyDirSync(errorDir)
Config =
clientConfig: ClientConfig
errorDir: errorDir
errorLimit: 30
ensureEnums: true
blackList: ['prices']
filterUnknownAttributes: true
@import = new ProductImport @logger, Config
@client = @import.client
@logger.info 'About to setup...'
deleteProducts(@logger, @client)
.then => ensureResource(@client.productTypes, 'name="Sample Product Type"', sampleProductType)
.then => ensureResource(@client.categories, 'name(en="Snowboard equipment")', sampleCategory)
.then => ensureResource(@client.taxCategories, 'name="Standard tax category"', sampleTaxCategory)
.then => ensureResource(@client.types, "key=\"#{sampleType.key}\"", sampleType)
.then ->
done()
.catch (err) -> done(_.prettify err)
, 10000 # 10sec
afterEach (done) ->
@logger.info 'About to cleanup...'
deleteProducts(@logger, @client)
.then -> done()
.catch (err) -> done(_.prettify err)
, 10000 # 10sec
it 'should import two new products', (done) ->
sampleImport = _.deepClone(sampleImportJson)
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.updated).toBe 0
@client.productProjections.staged(true).fetch()
.then (result) =>
fetchedProducts = result.body.results
expect(_.size fetchedProducts).toBe 2
fetchedSkus = @import._extractUniqueSkus(fetchedProducts)
sampleSkus = @import._extractUniqueSkus(sampleImport.products)
commonSkus = _.intersection(sampleSkus,fetchedSkus)
expect(_.size commonSkus).toBe _.size sampleSkus
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
fetchedProduct = result.body.results
expect(_.size fetchedProduct[0].variants).toBe _.size sampleImport.products[0].variants
expect(fetchedProduct[0].name).toEqual sampleImport.products[0].name
expect(fetchedProduct[0].slug).toEqual sampleImport.products[0].slug
done()
.catch done
, 10000
it 'should do nothing for empty products list', (done) ->
@import._processBatches([])
.then =>
expect(@import._summary.created).toBe 0
expect(@import._summary.updated).toBe 0
done()
.catch done
, 10000
it 'should generate missing slug', (done) ->
sampleImport = _.deepClone(sampleImportJson)
delete sampleImport.products[0].slug
delete sampleImport.products[1].slug
spyOn(@import, "_generateUniqueToken").andReturn("#{frozenTimeStamp}")
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.updated).toBe 0
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
fetchedProduct = result.body.results
expect(fetchedProduct[0].slug.en).toBe "product-sync-test-product-1-#{frozenTimeStamp}"
done()
.catch done
, 10000
it 'should update existing product', (done) ->
sampleImport = _.deepClone(sampleImportJson)
sampleUpdateRef = _.deepClone(sampleImportJson)
sampleUpdate = _.deepClone(sampleImportJson)
sampleUpdate.products = _.without(sampleUpdateRef.products,sampleUpdateRef.products[1])
sampleUpdate.products[0].variants = _.without(sampleUpdateRef.products[0].variants,sampleUpdateRef.products[0].variants[1])
sampleImport.products[0].name.de = 'Product_Sync_Test_Product_1_German'
sampleAttribute1 =
name: 'product_id'
value: 'sampe_product_id1'
sampleImport.products[0].masterVariant.attributes.push(sampleAttribute1)
sampleImport.products[0].variants[0].attributes.push(sampleAttribute1)
samplePrice =
value:
centAmount: 666
currencyCode: 'JPY'
country: 'JP'
sampleImport.products[0].variants[0].prices = [samplePrice]
spyOn(@import.sync, 'buildActions').andCallThrough()
@import._processBatches(sampleUpdate.products)
.then =>
expect(@import._summary.created).toBe 1
@import._resetSummary()
@import._resetCache()
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 1
expect(@import._summary.updated).toBe 1
expect(@import.sync.buildActions).toHaveBeenCalledWith(jasmine.any(Object), jasmine.any(Object), ['sample_attribute_1'], undefined)
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expect(_.size result.body.results[0].variants).toBe 2
done()
.catch (err) -> done(_.prettify err.body)
, 10000
it 'should continue on error - duplicate slug', (done) ->
# FIXME: looks like the API doesn't correctly validate for duplicate slugs
# for 2 concurrent requests (this happens randomly).
# For now we have to test it as 2 separate imports.
sampleImport = _.deepClone sampleImportJson
@import._processBatches([sampleImport.products[0]])
.then =>
expect(@import._summary.created).toBe
sampleImport2 = _.deepClone sampleImportJson
sampleImport2.products[1].slug.en = 'product-sync-test-product-1'
@import._resetSummary()
@import._processBatches([sampleImport2.products[1]])
.then =>
# import should fail because product 1 has same slug
expect(@import._summary.failed).toBe 1
expect(@import._summary.created).toBe 0
errorJson = require path.join(@import.errorDir,'error-1.json')
expect(errorJson.body.errors.length).toBe 1
error = errorJson.body.errors[0]
expect(error.code).toEqual "DuplicateField"
expect(error.duplicateValue).toEqual "product-sync-test-product-1"
expect(error.field).toEqual "slug.en"
done()
.catch done
it 'should continue of error - missing product name', (done) ->
deleteProducts(@logger, @client)
.then => ensureResource(@client.productTypes, 'name="Sample Product Type"', sampleProductType)
.then =>
sampleImport = _.deepClone sampleImportJson
delete sampleImport.products[1].name
delete sampleImport.products[1].slug
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.failed).toBe 1
expect(@import._summary.created).toBe 1
done()
.catch done
, 10000
it 'should handle set type attributes correctly', (done) ->
sampleImport = _.deepClone sampleImportJson
setTextAttribute =
name: 'sample_set_text'
value: ['text_1', 'text_2']
setTextAttributeUpdated =
name: 'sample_set_text'
value: ['text_1', 'text_2', 'text_3']
sampleImport.products[0].masterVariant.attributes.push setTextAttribute
predicate = "masterVariant(sku=#{JSON.stringify("eqsmlg-9'2\"\"")})"
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) =>
expect(result.body.results[0].masterVariant.attributes[0].value).toEqual setTextAttribute.value
sampleUpdate = _.deepClone sampleImportJson
sampleUpdate.products[0].masterVariant.attributes.push setTextAttributeUpdated
@import._processBatches(sampleUpdate.products)
.then =>
expect(@import._summary.updated).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expect(result.body.results[0].masterVariant.attributes[0].value).toEqual setTextAttributeUpdated.value
done()
.catch done
, 10000
it 'should filter unknown attributes and import product without errors', (done) ->
sampleImport = _.deepClone sampleImportJson
unknownAttribute =
name: 'unknownAttribute'
value: 'unknown value'
sampleImport.products[0].masterVariant.attributes.push unknownAttribute
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.unknownAttributeNames).toEqual ['unknownAttribute']
done()
.catch done
, 10000
it 'should update/create product with a new enum key', (done) ->
@logger.info ':: should update/create product with a new enum key'
sampleImport = _.deepClone sampleImportJson
productClone = _.deepClone(sampleImport.products[0])
existingEnumKeyAttr =
name: 'sample_enum_attribute'
value: 'enum-1-key'
newEnumKeyAttr =
name: 'sample_enum_attribute'
value: 'enum-new-key'
expectedNewEnumKeyAttr =
name: 'sample_enum_attribute'
value:
key: 'enum-new-key'
label: 'enum-new-key'
sampleImport.products[0].masterVariant.attributes.push(existingEnumKeyAttr)
sampleImport.products[0].masterVariant.attributes.push(newEnumKeyAttr)
predicate = "masterVariant(sku=#{JSON.stringify("eqsmlg-9'2\"\"")})"
# create product
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) =>
product = result.body.results[0]
expect(product.masterVariant.attributes[0]).toEqual
name: 'sample_enum_attribute',
value:
label: 'Enum 1 Label',
key: 'enum-1-key'
productClone.masterVariant.attributes.push(newEnumKeyAttr)
# update product
@import._processBatches([productClone])
.then =>
expect(@import._summary.updated).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
product = result.body.results[0]
expect(product.masterVariant.attributes[0]).toEqual expectedNewEnumKeyAttr
done()
.catch done
it 'should create product with custom price attributes', (done) ->
@logger.info ':: should create product with custom price attributes'
product = _.deepClone sampleImportJson.products[0]
predicate = "masterVariant(sku=#{JSON.stringify(product.masterVariant.sku)})"
prices = [
value:
currencyCode: "EUR",
centAmount: 329,
country: "DE",
validFrom: "2016-10-08T00:00:00.000Z",
validUntil: "9999-12-31T00:00:00.000Z",
custom:
type:
typeId: "type",
id: sampleType.key,
fields:
custom1: 20161008063011,
custom2: "string",
]
# inject prices with custom fields
product.masterVariant.prices = prices
product.variants[0].prices = prices
product.variants[1].prices = prices
@import._processBatches([product])
.then =>
expect(@import._summary.created).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expectedFields = prices[0].custom.fields
savedFields = result.body.results[0].masterVariant.prices[0].custom.fields
expect(savedFields).toEqual(expectedFields)
done()
.catch done
it 'should throw an error when importing non existing price reference', (done) ->
@logger.info ':: should throw an error when importing non existing price reference'
product = _.deepClone sampleImportJson.products[0]
errorLogger = null
errorCount = 0
error = null
prices = [
value:
currencyCode: "EUR",
centAmount: 329,
country: "DE",
validFrom: "2016-10-08T00:00:00.000Z",
validUntil: "9999-12-31T00:00:00.000Z",
custom:
type:
typeId: "type",
id: "wrong-price-reference",
fields:
custom1: 20161008063011,
custom2: "string",
]
# inject prices with custom fields
product.masterVariant.prices = prices
@import.errorCallback = (err, logger) ->
errorLogger = logger
errorCount += 1
error = err.toString()
@import._processBatches([product])
.then =>
expectedError = "Didn\'t find any match while resolving types (key=\"#{prices[0].custom.type.id}\")"
expect(@import._summary.failed).toBe 1
expect(errorCount).toBe 1
expect(error).toBe expectedError
expect(_.isObject(errorLogger)).toBe true
done()
.catch done
| 146143 | debug = require('debug')('spec:it:sphere-product-import')
_ = require 'underscore'
_.mixin require 'underscore-mixins'
{ProductImport} = require '../lib'
ClientConfig = require '../config'
Promise = require 'bluebird'
path = require 'path'
fs = require 'fs-extra'
jasmine = require 'jasmine-node'
{ deleteProducts } = require './integration/test-helper'
{ExtendedLogger} = require 'sphere-node-utils'
package_json = require '../package.json'
sampleImportJson = require '../samples/import.json'
sampleProductType = require '../samples/sample-product-type.json'
sampleType = require '../samples/sample-type.json'
sampleCategory = require '../samples/sample-category.json'
sampleTaxCategory = require '../samples/sample-tax-category.json'
frozenTimeStamp = new Date().getTime()
ensureResource = (service, predicate, sampleData) ->
debug 'Ensuring existence for: %s', predicate
service.where(predicate).fetch()
.then (result) ->
if result.statusCode is 200 and result.body.count is 0
service.create(sampleData)
.then (result) ->
debug "Sample #{JSON.stringify(result.body.name, null, 2)} created with id: #{result.body.id}"
Promise.resolve()
else
Promise.resolve()
describe 'Product import integration tests', ->
beforeEach (done) ->
@logger = new ExtendedLogger
additionalFields:
project_key: ClientConfig.config.project_key
logConfig:
name: "#{package_json.name}-#{package_json.version}"
streams: [
{ level: 'info', stream: process.stdout }
]
errorDir = path.join(__dirname, '../errors')
fs.emptyDirSync(errorDir)
Config =
clientConfig: ClientConfig
errorDir: errorDir
errorLimit: 30
ensureEnums: true
blackList: ['prices']
filterUnknownAttributes: true
@import = new ProductImport @logger, Config
@client = @import.client
@logger.info 'About to setup...'
deleteProducts(@logger, @client)
.then => ensureResource(@client.productTypes, 'name="Sample Product Type"', sampleProductType)
.then => ensureResource(@client.categories, 'name(en="Snowboard equipment")', sampleCategory)
.then => ensureResource(@client.taxCategories, 'name="Standard tax category"', sampleTaxCategory)
.then => ensureResource(@client.types, "key=\"#{sampleType.key}\"", sampleType)
.then ->
done()
.catch (err) -> done(_.prettify err)
, 10000 # 10sec
afterEach (done) ->
@logger.info 'About to cleanup...'
deleteProducts(@logger, @client)
.then -> done()
.catch (err) -> done(_.prettify err)
, 10000 # 10sec
it 'should import two new products', (done) ->
sampleImport = _.deepClone(sampleImportJson)
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.updated).toBe 0
@client.productProjections.staged(true).fetch()
.then (result) =>
fetchedProducts = result.body.results
expect(_.size fetchedProducts).toBe 2
fetchedSkus = @import._extractUniqueSkus(fetchedProducts)
sampleSkus = @import._extractUniqueSkus(sampleImport.products)
commonSkus = _.intersection(sampleSkus,fetchedSkus)
expect(_.size commonSkus).toBe _.size sampleSkus
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
fetchedProduct = result.body.results
expect(_.size fetchedProduct[0].variants).toBe _.size sampleImport.products[0].variants
expect(fetchedProduct[0].name).toEqual sampleImport.products[0].name
expect(fetchedProduct[0].slug).toEqual sampleImport.products[0].slug
done()
.catch done
, 10000
it 'should do nothing for empty products list', (done) ->
@import._processBatches([])
.then =>
expect(@import._summary.created).toBe 0
expect(@import._summary.updated).toBe 0
done()
.catch done
, 10000
it 'should generate missing slug', (done) ->
sampleImport = _.deepClone(sampleImportJson)
delete sampleImport.products[0].slug
delete sampleImport.products[1].slug
spyOn(@import, "_generateUniqueToken").andReturn("#{frozenTimeStamp}")
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.updated).toBe 0
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
fetchedProduct = result.body.results
expect(fetchedProduct[0].slug.en).toBe "product-sync-test-product-1-#{frozenTimeStamp}"
done()
.catch done
, 10000
it 'should update existing product', (done) ->
sampleImport = _.deepClone(sampleImportJson)
sampleUpdateRef = _.deepClone(sampleImportJson)
sampleUpdate = _.deepClone(sampleImportJson)
sampleUpdate.products = _.without(sampleUpdateRef.products,sampleUpdateRef.products[1])
sampleUpdate.products[0].variants = _.without(sampleUpdateRef.products[0].variants,sampleUpdateRef.products[0].variants[1])
sampleImport.products[0].name.de = 'Product_Sync_Test_Product_1_German'
sampleAttribute1 =
name: 'product_id'
value: 'sampe_product_id1'
sampleImport.products[0].masterVariant.attributes.push(sampleAttribute1)
sampleImport.products[0].variants[0].attributes.push(sampleAttribute1)
samplePrice =
value:
centAmount: 666
currencyCode: 'JPY'
country: 'JP'
sampleImport.products[0].variants[0].prices = [samplePrice]
spyOn(@import.sync, 'buildActions').andCallThrough()
@import._processBatches(sampleUpdate.products)
.then =>
expect(@import._summary.created).toBe 1
@import._resetSummary()
@import._resetCache()
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 1
expect(@import._summary.updated).toBe 1
expect(@import.sync.buildActions).toHaveBeenCalledWith(jasmine.any(Object), jasmine.any(Object), ['sample_attribute_1'], undefined)
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expect(_.size result.body.results[0].variants).toBe 2
done()
.catch (err) -> done(_.prettify err.body)
, 10000
it 'should continue on error - duplicate slug', (done) ->
# FIXME: looks like the API doesn't correctly validate for duplicate slugs
# for 2 concurrent requests (this happens randomly).
# For now we have to test it as 2 separate imports.
sampleImport = _.deepClone sampleImportJson
@import._processBatches([sampleImport.products[0]])
.then =>
expect(@import._summary.created).toBe
sampleImport2 = _.deepClone sampleImportJson
sampleImport2.products[1].slug.en = 'product-sync-test-product-1'
@import._resetSummary()
@import._processBatches([sampleImport2.products[1]])
.then =>
# import should fail because product 1 has same slug
expect(@import._summary.failed).toBe 1
expect(@import._summary.created).toBe 0
errorJson = require path.join(@import.errorDir,'error-1.json')
expect(errorJson.body.errors.length).toBe 1
error = errorJson.body.errors[0]
expect(error.code).toEqual "DuplicateField"
expect(error.duplicateValue).toEqual "product-sync-test-product-1"
expect(error.field).toEqual "slug.en"
done()
.catch done
it 'should continue of error - missing product name', (done) ->
deleteProducts(@logger, @client)
.then => ensureResource(@client.productTypes, 'name="Sample Product Type"', sampleProductType)
.then =>
sampleImport = _.deepClone sampleImportJson
delete sampleImport.products[1].name
delete sampleImport.products[1].slug
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.failed).toBe 1
expect(@import._summary.created).toBe 1
done()
.catch done
, 10000
it 'should handle set type attributes correctly', (done) ->
sampleImport = _.deepClone sampleImportJson
setTextAttribute =
name: 'sample_set_text'
value: ['text_1', 'text_2']
setTextAttributeUpdated =
name: 'sample_set_text'
value: ['text_1', 'text_2', 'text_3']
sampleImport.products[0].masterVariant.attributes.push setTextAttribute
predicate = "masterVariant(sku=#{JSON.stringify("eqsmlg-9'2\"\"")})"
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) =>
expect(result.body.results[0].masterVariant.attributes[0].value).toEqual setTextAttribute.value
sampleUpdate = _.deepClone sampleImportJson
sampleUpdate.products[0].masterVariant.attributes.push setTextAttributeUpdated
@import._processBatches(sampleUpdate.products)
.then =>
expect(@import._summary.updated).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expect(result.body.results[0].masterVariant.attributes[0].value).toEqual setTextAttributeUpdated.value
done()
.catch done
, 10000
it 'should filter unknown attributes and import product without errors', (done) ->
sampleImport = _.deepClone sampleImportJson
unknownAttribute =
name: 'unknownAttribute'
value: 'unknown value'
sampleImport.products[0].masterVariant.attributes.push unknownAttribute
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.unknownAttributeNames).toEqual ['unknownAttribute']
done()
.catch done
, 10000
it 'should update/create product with a new enum key', (done) ->
@logger.info ':: should update/create product with a new enum key'
sampleImport = _.deepClone sampleImportJson
productClone = _.deepClone(sampleImport.products[0])
existingEnumKeyAttr =
name: 'sample_enum_attribute'
value: 'enum-1-key'
newEnumKeyAttr =
name: 'sample_enum_attribute'
value: 'enum-new-key'
expectedNewEnumKeyAttr =
name: 'sample_enum_attribute'
value:
key: 'enum-<KEY>'
label: 'enum-new-key'
sampleImport.products[0].masterVariant.attributes.push(existingEnumKeyAttr)
sampleImport.products[0].masterVariant.attributes.push(newEnumKeyAttr)
predicate = "masterVariant(sku=#{JSON.stringify("eqsmlg-9'2\"\"")})"
# create product
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) =>
product = result.body.results[0]
expect(product.masterVariant.attributes[0]).toEqual
name: 'sample_enum_attribute',
value:
label: 'Enum 1 Label',
key: 'enum<KEY>-1-key'
productClone.masterVariant.attributes.push(newEnumKeyAttr)
# update product
@import._processBatches([productClone])
.then =>
expect(@import._summary.updated).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
product = result.body.results[0]
expect(product.masterVariant.attributes[0]).toEqual expectedNewEnumKeyAttr
done()
.catch done
it 'should create product with custom price attributes', (done) ->
@logger.info ':: should create product with custom price attributes'
product = _.deepClone sampleImportJson.products[0]
predicate = "masterVariant(sku=#{JSON.stringify(product.masterVariant.sku)})"
prices = [
value:
currencyCode: "EUR",
centAmount: 329,
country: "DE",
validFrom: "2016-10-08T00:00:00.000Z",
validUntil: "9999-12-31T00:00:00.000Z",
custom:
type:
typeId: "type",
id: sampleType.key,
fields:
custom1: 20161008063011,
custom2: "string",
]
# inject prices with custom fields
product.masterVariant.prices = prices
product.variants[0].prices = prices
product.variants[1].prices = prices
@import._processBatches([product])
.then =>
expect(@import._summary.created).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expectedFields = prices[0].custom.fields
savedFields = result.body.results[0].masterVariant.prices[0].custom.fields
expect(savedFields).toEqual(expectedFields)
done()
.catch done
it 'should throw an error when importing non existing price reference', (done) ->
@logger.info ':: should throw an error when importing non existing price reference'
product = _.deepClone sampleImportJson.products[0]
errorLogger = null
errorCount = 0
error = null
prices = [
value:
currencyCode: "EUR",
centAmount: 329,
country: "DE",
validFrom: "2016-10-08T00:00:00.000Z",
validUntil: "9999-12-31T00:00:00.000Z",
custom:
type:
typeId: "type",
id: "wrong-price-reference",
fields:
custom1: 20161008063011,
custom2: "string",
]
# inject prices with custom fields
product.masterVariant.prices = prices
@import.errorCallback = (err, logger) ->
errorLogger = logger
errorCount += 1
error = err.toString()
@import._processBatches([product])
.then =>
expectedError = "Didn\'t find any match while resolving types (key=\"#{prices[0].custom.type.id}\")"
expect(@import._summary.failed).toBe 1
expect(errorCount).toBe 1
expect(error).toBe expectedError
expect(_.isObject(errorLogger)).toBe true
done()
.catch done
| true | debug = require('debug')('spec:it:sphere-product-import')
_ = require 'underscore'
_.mixin require 'underscore-mixins'
{ProductImport} = require '../lib'
ClientConfig = require '../config'
Promise = require 'bluebird'
path = require 'path'
fs = require 'fs-extra'
jasmine = require 'jasmine-node'
{ deleteProducts } = require './integration/test-helper'
{ExtendedLogger} = require 'sphere-node-utils'
package_json = require '../package.json'
sampleImportJson = require '../samples/import.json'
sampleProductType = require '../samples/sample-product-type.json'
sampleType = require '../samples/sample-type.json'
sampleCategory = require '../samples/sample-category.json'
sampleTaxCategory = require '../samples/sample-tax-category.json'
frozenTimeStamp = new Date().getTime()
ensureResource = (service, predicate, sampleData) ->
debug 'Ensuring existence for: %s', predicate
service.where(predicate).fetch()
.then (result) ->
if result.statusCode is 200 and result.body.count is 0
service.create(sampleData)
.then (result) ->
debug "Sample #{JSON.stringify(result.body.name, null, 2)} created with id: #{result.body.id}"
Promise.resolve()
else
Promise.resolve()
describe 'Product import integration tests', ->
beforeEach (done) ->
@logger = new ExtendedLogger
additionalFields:
project_key: ClientConfig.config.project_key
logConfig:
name: "#{package_json.name}-#{package_json.version}"
streams: [
{ level: 'info', stream: process.stdout }
]
errorDir = path.join(__dirname, '../errors')
fs.emptyDirSync(errorDir)
Config =
clientConfig: ClientConfig
errorDir: errorDir
errorLimit: 30
ensureEnums: true
blackList: ['prices']
filterUnknownAttributes: true
@import = new ProductImport @logger, Config
@client = @import.client
@logger.info 'About to setup...'
deleteProducts(@logger, @client)
.then => ensureResource(@client.productTypes, 'name="Sample Product Type"', sampleProductType)
.then => ensureResource(@client.categories, 'name(en="Snowboard equipment")', sampleCategory)
.then => ensureResource(@client.taxCategories, 'name="Standard tax category"', sampleTaxCategory)
.then => ensureResource(@client.types, "key=\"#{sampleType.key}\"", sampleType)
.then ->
done()
.catch (err) -> done(_.prettify err)
, 10000 # 10sec
afterEach (done) ->
@logger.info 'About to cleanup...'
deleteProducts(@logger, @client)
.then -> done()
.catch (err) -> done(_.prettify err)
, 10000 # 10sec
it 'should import two new products', (done) ->
sampleImport = _.deepClone(sampleImportJson)
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.updated).toBe 0
@client.productProjections.staged(true).fetch()
.then (result) =>
fetchedProducts = result.body.results
expect(_.size fetchedProducts).toBe 2
fetchedSkus = @import._extractUniqueSkus(fetchedProducts)
sampleSkus = @import._extractUniqueSkus(sampleImport.products)
commonSkus = _.intersection(sampleSkus,fetchedSkus)
expect(_.size commonSkus).toBe _.size sampleSkus
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
fetchedProduct = result.body.results
expect(_.size fetchedProduct[0].variants).toBe _.size sampleImport.products[0].variants
expect(fetchedProduct[0].name).toEqual sampleImport.products[0].name
expect(fetchedProduct[0].slug).toEqual sampleImport.products[0].slug
done()
.catch done
, 10000
it 'should do nothing for empty products list', (done) ->
@import._processBatches([])
.then =>
expect(@import._summary.created).toBe 0
expect(@import._summary.updated).toBe 0
done()
.catch done
, 10000
it 'should generate missing slug', (done) ->
sampleImport = _.deepClone(sampleImportJson)
delete sampleImport.products[0].slug
delete sampleImport.products[1].slug
spyOn(@import, "_generateUniqueToken").andReturn("#{frozenTimeStamp}")
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.updated).toBe 0
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
fetchedProduct = result.body.results
expect(fetchedProduct[0].slug.en).toBe "product-sync-test-product-1-#{frozenTimeStamp}"
done()
.catch done
, 10000
it 'should update existing product', (done) ->
sampleImport = _.deepClone(sampleImportJson)
sampleUpdateRef = _.deepClone(sampleImportJson)
sampleUpdate = _.deepClone(sampleImportJson)
sampleUpdate.products = _.without(sampleUpdateRef.products,sampleUpdateRef.products[1])
sampleUpdate.products[0].variants = _.without(sampleUpdateRef.products[0].variants,sampleUpdateRef.products[0].variants[1])
sampleImport.products[0].name.de = 'Product_Sync_Test_Product_1_German'
sampleAttribute1 =
name: 'product_id'
value: 'sampe_product_id1'
sampleImport.products[0].masterVariant.attributes.push(sampleAttribute1)
sampleImport.products[0].variants[0].attributes.push(sampleAttribute1)
samplePrice =
value:
centAmount: 666
currencyCode: 'JPY'
country: 'JP'
sampleImport.products[0].variants[0].prices = [samplePrice]
spyOn(@import.sync, 'buildActions').andCallThrough()
@import._processBatches(sampleUpdate.products)
.then =>
expect(@import._summary.created).toBe 1
@import._resetSummary()
@import._resetCache()
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 1
expect(@import._summary.updated).toBe 1
expect(@import.sync.buildActions).toHaveBeenCalledWith(jasmine.any(Object), jasmine.any(Object), ['sample_attribute_1'], undefined)
predicate = "masterVariant(sku=#{JSON.stringify(sampleImport.products[0].masterVariant.sku)})"
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expect(_.size result.body.results[0].variants).toBe 2
done()
.catch (err) -> done(_.prettify err.body)
, 10000
it 'should continue on error - duplicate slug', (done) ->
# FIXME: looks like the API doesn't correctly validate for duplicate slugs
# for 2 concurrent requests (this happens randomly).
# For now we have to test it as 2 separate imports.
sampleImport = _.deepClone sampleImportJson
@import._processBatches([sampleImport.products[0]])
.then =>
expect(@import._summary.created).toBe
sampleImport2 = _.deepClone sampleImportJson
sampleImport2.products[1].slug.en = 'product-sync-test-product-1'
@import._resetSummary()
@import._processBatches([sampleImport2.products[1]])
.then =>
# import should fail because product 1 has same slug
expect(@import._summary.failed).toBe 1
expect(@import._summary.created).toBe 0
errorJson = require path.join(@import.errorDir,'error-1.json')
expect(errorJson.body.errors.length).toBe 1
error = errorJson.body.errors[0]
expect(error.code).toEqual "DuplicateField"
expect(error.duplicateValue).toEqual "product-sync-test-product-1"
expect(error.field).toEqual "slug.en"
done()
.catch done
it 'should continue of error - missing product name', (done) ->
deleteProducts(@logger, @client)
.then => ensureResource(@client.productTypes, 'name="Sample Product Type"', sampleProductType)
.then =>
sampleImport = _.deepClone sampleImportJson
delete sampleImport.products[1].name
delete sampleImport.products[1].slug
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.failed).toBe 1
expect(@import._summary.created).toBe 1
done()
.catch done
, 10000
it 'should handle set type attributes correctly', (done) ->
sampleImport = _.deepClone sampleImportJson
setTextAttribute =
name: 'sample_set_text'
value: ['text_1', 'text_2']
setTextAttributeUpdated =
name: 'sample_set_text'
value: ['text_1', 'text_2', 'text_3']
sampleImport.products[0].masterVariant.attributes.push setTextAttribute
predicate = "masterVariant(sku=#{JSON.stringify("eqsmlg-9'2\"\"")})"
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) =>
expect(result.body.results[0].masterVariant.attributes[0].value).toEqual setTextAttribute.value
sampleUpdate = _.deepClone sampleImportJson
sampleUpdate.products[0].masterVariant.attributes.push setTextAttributeUpdated
@import._processBatches(sampleUpdate.products)
.then =>
expect(@import._summary.updated).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expect(result.body.results[0].masterVariant.attributes[0].value).toEqual setTextAttributeUpdated.value
done()
.catch done
, 10000
it 'should filter unknown attributes and import product without errors', (done) ->
sampleImport = _.deepClone sampleImportJson
unknownAttribute =
name: 'unknownAttribute'
value: 'unknown value'
sampleImport.products[0].masterVariant.attributes.push unknownAttribute
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
expect(@import._summary.unknownAttributeNames).toEqual ['unknownAttribute']
done()
.catch done
, 10000
it 'should update/create product with a new enum key', (done) ->
@logger.info ':: should update/create product with a new enum key'
sampleImport = _.deepClone sampleImportJson
productClone = _.deepClone(sampleImport.products[0])
existingEnumKeyAttr =
name: 'sample_enum_attribute'
value: 'enum-1-key'
newEnumKeyAttr =
name: 'sample_enum_attribute'
value: 'enum-new-key'
expectedNewEnumKeyAttr =
name: 'sample_enum_attribute'
value:
key: 'enum-PI:KEY:<KEY>END_PI'
label: 'enum-new-key'
sampleImport.products[0].masterVariant.attributes.push(existingEnumKeyAttr)
sampleImport.products[0].masterVariant.attributes.push(newEnumKeyAttr)
predicate = "masterVariant(sku=#{JSON.stringify("eqsmlg-9'2\"\"")})"
# create product
@import._processBatches(sampleImport.products)
.then =>
expect(@import._summary.created).toBe 2
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) =>
product = result.body.results[0]
expect(product.masterVariant.attributes[0]).toEqual
name: 'sample_enum_attribute',
value:
label: 'Enum 1 Label',
key: 'enumPI:KEY:<KEY>END_PI-1-key'
productClone.masterVariant.attributes.push(newEnumKeyAttr)
# update product
@import._processBatches([productClone])
.then =>
expect(@import._summary.updated).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
product = result.body.results[0]
expect(product.masterVariant.attributes[0]).toEqual expectedNewEnumKeyAttr
done()
.catch done
it 'should create product with custom price attributes', (done) ->
@logger.info ':: should create product with custom price attributes'
product = _.deepClone sampleImportJson.products[0]
predicate = "masterVariant(sku=#{JSON.stringify(product.masterVariant.sku)})"
prices = [
value:
currencyCode: "EUR",
centAmount: 329,
country: "DE",
validFrom: "2016-10-08T00:00:00.000Z",
validUntil: "9999-12-31T00:00:00.000Z",
custom:
type:
typeId: "type",
id: sampleType.key,
fields:
custom1: 20161008063011,
custom2: "string",
]
# inject prices with custom fields
product.masterVariant.prices = prices
product.variants[0].prices = prices
product.variants[1].prices = prices
@import._processBatches([product])
.then =>
expect(@import._summary.created).toBe 1
@client.productProjections.where(predicate).staged(true).fetch()
.then (result) ->
expectedFields = prices[0].custom.fields
savedFields = result.body.results[0].masterVariant.prices[0].custom.fields
expect(savedFields).toEqual(expectedFields)
done()
.catch done
it 'should throw an error when importing non existing price reference', (done) ->
@logger.info ':: should throw an error when importing non existing price reference'
product = _.deepClone sampleImportJson.products[0]
errorLogger = null
errorCount = 0
error = null
prices = [
value:
currencyCode: "EUR",
centAmount: 329,
country: "DE",
validFrom: "2016-10-08T00:00:00.000Z",
validUntil: "9999-12-31T00:00:00.000Z",
custom:
type:
typeId: "type",
id: "wrong-price-reference",
fields:
custom1: 20161008063011,
custom2: "string",
]
# inject prices with custom fields
product.masterVariant.prices = prices
@import.errorCallback = (err, logger) ->
errorLogger = logger
errorCount += 1
error = err.toString()
@import._processBatches([product])
.then =>
expectedError = "Didn\'t find any match while resolving types (key=\"#{prices[0].custom.type.id}\")"
expect(@import._summary.failed).toBe 1
expect(errorCount).toBe 1
expect(error).toBe expectedError
expect(_.isObject(errorLogger)).toBe true
done()
.catch done
|
[
{
"context": "###\nGulp task build\n@create 2014-10-07\n@author KoutarouYabe <idolm@ster.pw>\n###\n\nmodule.exports = (gulp, plug",
"end": 59,
"score": 0.9998940825462341,
"start": 47,
"tag": "NAME",
"value": "KoutarouYabe"
},
{
"context": "sk build\n@create 2014-10-07\n@author Kouta... | tasks/register/build.coffee | moorvin/Sea-Fight | 1 | ###
Gulp task build
@create 2014-10-07
@author KoutarouYabe <idolm@ster.pw>
###
module.exports = (gulp, plugins)->
gulp.task 'build', (cb)->
plugins.sequence(
'compileAssets',
cb
)
| 96794 | ###
Gulp task build
@create 2014-10-07
@author <NAME> <<EMAIL>>
###
module.exports = (gulp, plugins)->
gulp.task 'build', (cb)->
plugins.sequence(
'compileAssets',
cb
)
| true | ###
Gulp task build
@create 2014-10-07
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
module.exports = (gulp, plugins)->
gulp.task 'build', (cb)->
plugins.sequence(
'compileAssets',
cb
)
|
[
{
"context": "e UI.Button unit tests\n#\n# Copyright (C) 2012-2013 Nikolay Nemshilov\n#\n{Test} = require('lovely')\n\ndescribe 'UI.Button",
"end": 74,
"score": 0.9998878240585327,
"start": 57,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | ui/core/test/button_test.coffee | lovely-io/lovely.io-stl | 2 | #
# The UI.Button unit tests
#
# Copyright (C) 2012-2013 Nikolay Nemshilov
#
{Test} = require('lovely')
describe 'UI.Button', ->
UI = Button = button = $ = null
before Test.load (build, win)->
UI = build
Button = UI.Button
button = new Button('some text')
$ = win.Lovely.module('dom')
it "should build buttons", ->
button.should.be.instanceOf Button
button._.tagName.should.equal 'BUTTON'
it "should inherit the Input class", ->
button.should.be.instanceOf $.Input
it "should have type of 'button'", ->
button._.type.should.equal 'button'
it "should assign the 'lui-button' class", ->
button._.className.should.equal 'lui-button'
it "should assign the button label as the HTML", ->
button._.innerHTML.should.equal 'some text'
it "should accept normal html options", ->
b = new Button('new text', id: 'my-id', class: 'my-class')
b._.id.should.equal 'my-id'
b._.className.should.equal 'my-class lui-button'
b._.innerHTML.should.equal 'new text'
| 132987 | #
# The UI.Button unit tests
#
# Copyright (C) 2012-2013 <NAME>
#
{Test} = require('lovely')
describe 'UI.Button', ->
UI = Button = button = $ = null
before Test.load (build, win)->
UI = build
Button = UI.Button
button = new Button('some text')
$ = win.Lovely.module('dom')
it "should build buttons", ->
button.should.be.instanceOf Button
button._.tagName.should.equal 'BUTTON'
it "should inherit the Input class", ->
button.should.be.instanceOf $.Input
it "should have type of 'button'", ->
button._.type.should.equal 'button'
it "should assign the 'lui-button' class", ->
button._.className.should.equal 'lui-button'
it "should assign the button label as the HTML", ->
button._.innerHTML.should.equal 'some text'
it "should accept normal html options", ->
b = new Button('new text', id: 'my-id', class: 'my-class')
b._.id.should.equal 'my-id'
b._.className.should.equal 'my-class lui-button'
b._.innerHTML.should.equal 'new text'
| true | #
# The UI.Button unit tests
#
# Copyright (C) 2012-2013 PI:NAME:<NAME>END_PI
#
{Test} = require('lovely')
describe 'UI.Button', ->
UI = Button = button = $ = null
before Test.load (build, win)->
UI = build
Button = UI.Button
button = new Button('some text')
$ = win.Lovely.module('dom')
it "should build buttons", ->
button.should.be.instanceOf Button
button._.tagName.should.equal 'BUTTON'
it "should inherit the Input class", ->
button.should.be.instanceOf $.Input
it "should have type of 'button'", ->
button._.type.should.equal 'button'
it "should assign the 'lui-button' class", ->
button._.className.should.equal 'lui-button'
it "should assign the button label as the HTML", ->
button._.innerHTML.should.equal 'some text'
it "should accept normal html options", ->
b = new Button('new text', id: 'my-id', class: 'my-class')
b._.id.should.equal 'my-id'
b._.className.should.equal 'my-class lui-button'
b._.innerHTML.should.equal 'new text'
|
[
{
"context": "Mapping\"\n \"Maps\"\n \"MargulisCode\"\n \"MarkGroebner\"\n \"Mass\"\n \"MasseyProduct\"\n \"MatRep",
"end": 107809,
"score": 0.9453768134117126,
"start": 107797,
"tag": "USERNAME",
"value": "MarkGroebner"
},
{
"context": "obian\"\n \... | settings/magma.cson | thomasgemunden/language-magma | 0 | ".source.Magma":
editor:
increaseIndentPattern: "^\\s*(function|procedure|if|for|while|elif|else|case|when|repeat|try|catch)[^;]*$|^\\s*\\b([A-Za-z_][A-Za-z0-9_]*)\\b\\s*:=\\s*\\b(function|procedure)\\b.*$"
decreaseIndentPattern: "^\\s*((end (for|if|procedure|function|case|while|try))|else|elif|until)\\b.*"
completions: [
"AFRNumber"
"AGCode"
"AGDecode"
"AGDualCode"
"AGL"
"AGM"
"AGammaL"
"AHom"
"AHomOverCentralizingField"
"AInfinityRecord"
"AModule"
"APNCompleteGeneration"
"APNGeneration"
"APNMatrix"
"APNRationalGeneration"
"AQInvariants"
"AQPrimes"
"ASL"
"ASigmaL"
"ATLASGroup"
"ATLASGroupNames"
"AbelianBasis"
"AbelianExtension"
"AbelianGroup"
"AbelianInvariants"
"AbelianLieAlgebra"
"AbelianNormalQuotient"
"AbelianNormalSubgroup"
"AbelianNormalSubgroupSSS"
"AbelianQuotient"
"AbelianQuotientInvariants"
"AbelianQuotientRewrite"
"AbelianSection"
"AbelianSubfield"
"AbelianSubgroups"
"AbelianpExtension"
"Abs"
"AbsDenominator"
"AbsEltseq"
"AbsIrrApplyConjugation"
"AbsIrrApplyGalois"
"AbsIrrFromMap"
"AbsIrrFromModul"
"AbsoluteAffineAlgebra"
"AbsoluteAlgebra"
"AbsoluteBasis"
"AbsoluteCartanMatrix"
"AbsoluteCharacteristicPolynomial"
"AbsoluteDecomposition"
"AbsoluteDegree"
"AbsoluteDiscriminant"
"AbsoluteField"
"AbsoluteFrobenius"
"AbsoluteFunctionField"
"AbsoluteGaloisGroup"
"AbsoluteInertiaDegree"
"AbsoluteInertiaIndex"
"AbsoluteInvariants"
"AbsoluteLogarithmicHeight"
"AbsoluteMinimalPolynomial"
"AbsoluteModuleOverMinimalField"
"AbsoluteModulesOverMinimalField"
"AbsoluteNorm"
"AbsoluteOrder"
"AbsolutePolynomial"
"AbsolutePrecision"
"AbsoluteQuotientRing"
"AbsoluteRamificationDegree"
"AbsoluteRamificationIndex"
"AbsoluteRank"
"AbsoluteRationalScroll"
"AbsoluteRepresentation"
"AbsoluteRepresentationMatrix"
"AbsoluteTotallyRamifiedExtension"
"AbsoluteTrace"
"AbsoluteValue"
"AbsoluteValues"
"AbsolutelyIrreducibleConstituents"
"AbsolutelyIrreducibleModule"
"AbsolutelyIrreducibleModules"
"AbsolutelyIrreducibleModulesBurnside"
"AbsolutelyIrreducibleModulesDelete"
"AbsolutelyIrreducibleModulesInit"
"AbsolutelyIrreducibleModulesSchur"
"AbsolutelyIrreducibleRepresentationProcessDelete"
"AbsolutelyIrreducibleRepresentationsApply"
"AbsolutelyIrreducibleRepresentationsDelete"
"AbsolutelyIrreducibleRepresentationsInit"
"AbsolutelyIrreducibleRepresentationsProcess"
"AbsolutelyIrreducibleRepresentationsProcessDegree"
"AbsolutelyIrreducibleRepresentationsProcessGroup"
"AbsolutelyIrreducibleRepresentationsSchur"
"Absolutize"
"Ac"
"ActingGroup"
"ActingWord"
"Action"
"ActionGenerator"
"ActionGenerators"
"ActionGroup"
"ActionImage"
"ActionKernel"
"ActionMatrix"
"ActionOnVector"
"AdamsOperator"
"AdaptedBasis"
"AdaptedBasisIndex"
"AdaptedBasisProcessAdd"
"AdaptedBasisProcessAddTest"
"AdaptedBasisProcessInit"
"Add"
"AddAttribute"
"AddAttributes"
"AddColumn"
"AddConstraints"
"AddCovers"
"AddCrossTerms"
"AddCubics"
"AddEdge"
"AddEdges"
"AddGenerator"
"AddMult"
"AddNormalizingGenerator"
"AddPrimes"
"AddRedundantGenerators"
"AddRelation"
"AddRelator"
"AddRepresentation"
"AddRow"
"AddScaledMatrix"
"AddSimplex"
"AddStrongGenerator"
"AddStrongGeneratorToLevel"
"AddSubgroupGenerator"
"AddVectorToLattice"
"AddVertex"
"AddVertices"
"AdditiveCode"
"AdditiveConstaCyclicCode"
"AdditiveCyclicCode"
"AdditiveGroup"
"AdditiveHilbert90"
"AdditiveMacWilliamsTransform"
"AdditiveOrder"
"AdditivePermutationCode"
"AdditivePolynomialFromRoots"
"AdditiveQuasiCyclicCode"
"AdditiveQuasiTwistedCyclicCode"
"AdditiveRepetitionCode"
"AdditiveUniverseCode"
"AdditiveZeroCode"
"AdditiveZeroSumCode"
"AdjacencyMatrix"
"Adjoin"
"Adjoint"
"AdjointAlgebra"
"AdjointGraph"
"AdjointIdeal"
"AdjointIdealForNodalCurve"
"AdjointLinearSystem"
"AdjointLinearSystemForNodalCurve"
"AdjointLinearSystemFromIdeal"
"AdjointMatrix"
"AdjointModule"
"AdjointPreimage"
"AdjointRepresentation"
"AdjointRepresentationDecomposition"
"AdjointVersion"
"Adjoints"
"AdmissableTriangleGroups"
"AdmissiblePair"
"Advance"
"Af"
"AffineAction"
"AffineAlgebra"
"AffineAlgebraMapKernel"
"AffineAmbient"
"AffineDecomposition"
"AffineGammaLinearGroup"
"AffineGeneralLinearGroup"
"AffineImage"
"AffineKernel"
"AffineLieAlgebra"
"AffineNormalForm"
"AffinePatch"
"AffinePlane"
"AffineRepresentative"
"AffineSigmaLinearGroup"
"AffineSpace"
"AffineSpecialLinearGroup"
"Agemo"
"Alarm"
"AlgComb"
"Algebra"
"AlgebraGenerators"
"AlgebraMap"
"AlgebraOverCenter"
"AlgebraOverFieldOfFractions"
"AlgebraStructure"
"AlgebraicClosure"
"AlgebraicGenerators"
"AlgebraicGeometricCode"
"AlgebraicGeometricDualCode"
"AlgebraicPowerSeries"
"AlgebraicToAnalytic"
"AlgorithmicFunctionField"
"AllCliques"
"AllCompactChainMaps"
"AllCones"
"AllDefiningPolynomials"
"AllExtensions"
"AllFaces"
"AllHomomorphisms"
"AllInformationSets"
"AllInverseDefiningPolynomials"
"AllIrreduciblePolynomials"
"AllLinearRelations"
"AllNilpotentLieAlgebras"
"AllPairsShortestPaths"
"AllParallelClasses"
"AllParallelisms"
"AllPartitions"
"AllPassants"
"AllRays"
"AllReductionMaps"
"AllReductionMaps_Factor"
"AllResolutions"
"AllRoots"
"AllSecants"
"AllSlopes"
"AllSolvableLieAlgebras"
"AllSqrts"
"AllSquareRoots"
"AllTangents"
"AllVertices"
"Alldeg"
"AllowableSubgroup"
"AlmostIntegralGModule"
"AlmostInvariantForm"
"AlmostSimpleGroupDatabase"
"Alphabet"
"AlphabetExtensionDegree"
"Alt"
"AlternantCode"
"AlternatingCharacter"
"AlternatingCharacterTable"
"AlternatingCharacterValue"
"AlternatingDominant"
"AlternatingElementToStandardWord"
"AlternatingElementToWord"
"AlternatingGroup"
"AlternatingPower"
"AlternatingSquarePreimage"
"AlternatingSum"
"AlternatingWeylSum"
"AlternativePatches"
"Ambient"
"AmbientLieAlgebra"
"AmbientMatrix"
"AmbientModule"
"AmbientSpace"
"AmbientVariety"
"AmbiguousForms"
"AnalyticDrinfeldModule"
"AnalyticHomomorphisms"
"AnalyticInformation"
"AnalyticJacobian"
"AnalyticModule"
"AnalyticRank"
"AnalyticRankNumberOfTerms"
"AnalyticRankQuadraticTwist"
"And"
"Angle"
"AnisotropicSubdatum"
"Annihilator"
"AntiAutomorphismTau"
"Antipode"
"AntisymmetricForms"
"AntisymmetricHermitianForms"
"AntisymmetricMatrix"
"AntisymmetricQuaternionicForms"
"AnyDescription"
"ApparentCodimension"
"ApparentEquationDegrees"
"ApparentSyzygyDegrees"
"Append"
"AppendBasePoint"
"AppendModule"
"Apply"
"ApplyAutomorphism"
"ApplyForAutgCoerce"
"ApplyTransformation"
"ApplyWeylElement"
"Approx"
"ApproximateByTorsionGroup"
"ApproximateByTorsionPoint"
"ApproximateOrder"
"ApproximateStabiliser"
"Arccos"
"Arccosec"
"Arccot"
"Arcsec"
"Arcsin"
"Arctan"
"Arctan2"
"AreCohomologous"
"AreCollinear"
"AreEqualMorphisms"
"AreEqualObjects"
"AreGenerators"
"AreIdentical"
"AreInvolutionsConjugate"
"AreLinearlyEquivalent"
"AreProportional"
"ArfInvariant"
"Arg"
"Argcosech"
"Argcosh"
"Argcoth"
"Argsech"
"Argsinh"
"Argtanh"
"Argument"
"ArithmeticGenus"
"ArithmeticGenusOfDesingularization"
"ArithmeticGeometricMean"
"ArithmeticLSeries"
"ArithmeticTriangleGroup"
"ArithmeticVolume"
"ArrowWeights"
"Arrows"
"ArtRepCreate"
"ArtinMap"
"ArtinRepresentation"
"ArtinRepresentations"
"ArtinSchreierExtension"
"ArtinSchreierImage"
"ArtinSchreierMap"
"ArtinSchreierSymbol"
"ArtinTateFormula"
"AsExtensionOf"
"AssertAttribute"
"AssertEmbedding"
"AssignBase"
"AssignCapacities"
"AssignCapacity"
"AssignEdgeLabels"
"AssignLDPCMatrix"
"AssignLabel"
"AssignLabels"
"AssignNamePrefix"
"AssignNames"
"AssignNamesBase"
"AssignVertexLabels"
"AssignWeight"
"AssignWeights"
"AssociatedEllipticCurve"
"AssociatedHyperellipticCurve"
"AssociatedNewSpace"
"AssociatedPrimitiveCharacter"
"AssociatedPrimitiveGrossencharacter"
"AssociativeAlgebra"
"AssociativeArray"
"AtEof"
"AteTPairing"
"AteqPairing"
"AtkinLehner"
"AtkinLehnerDecomposition"
"AtkinLehnerEigenvalue"
"AtkinLehnerInvolution"
"AtkinLehnerNumberOfFixedPoints"
"AtkinLehnerOperator"
"AtkinLehnerOperatorOverQ"
"AtkinLehnerPrimes"
"AtkinLehnerSubspace"
"AtkinModularEquation"
"AtkinModularPolynomial"
"AtlasGroup"
"AtlasGroupNames"
"AtlasVersionInfo"
"Attach"
"AttachSpec"
"AugmentCode"
"Augmentation"
"AugmentationIdeal"
"AugmentationMap"
"Aut"
"AutGpSG"
"AutPSp"
"AutoCorrelation"
"AutoDD"
"AutoDR"
"AutoDW"
"AutomaticGroup"
"Automorphism"
"AutomorphismGroup"
"AutomorphismGroupAsMatrixGroup"
"AutomorphismGroupFF"
"AutomorphismGroupOverCyclotomicExtension"
"AutomorphismGroupOverExtension"
"AutomorphismGroupOverQ"
"AutomorphismGroupPGroup"
"AutomorphismGroupPGroup2"
"AutomorphismGroupStabilizer"
"AutomorphismOmega"
"AutomorphismSubgroup"
"AutomorphismTalpha"
"AutomorphismWorld"
"Automorphisms"
"AutomorphousClasses"
"AuxiliaryLevel"
"AxisMultiplicities"
"BBSModulus"
"BCHBound"
"BCHCode"
"BDLC"
"BDLCLowerBound"
"BDLCUpperBound"
"BFSTree"
"BKLC"
"BKLCLowerBound"
"BKLCUpperBound"
"BKQC"
"BKZ"
"BLLC"
"BLLCLowerBound"
"BLLCUpperBound"
"BQPlotkinSum"
"BSGS"
"BSGSProcess"
"BString"
"BachBound"
"BacherPolynomialInternal"
"BacherPolynomialTestInternal"
"BadPlaces"
"BadPrimes"
"BaerDerivation"
"BaerSubplane"
"Ball"
"Bang"
"BarAutomorphism"
"Barvinok"
"BarycentricSubdivision"
"Base"
"Base64Decode"
"Base64DecodeFile"
"Base64Encode"
"Base64EncodeFile"
"BaseBlowupContribution"
"BaseCategory"
"BaseChange"
"BaseChangeMatrix"
"BaseChangedDefiningEquations"
"BaseComponent"
"BaseCurve"
"BaseElement"
"BaseExtend"
"BaseExtension"
"BaseExtensionMorphisms"
"BaseField"
"BaseGerm"
"BaseImage"
"BaseImageWordStrip"
"BaseLocus"
"BaseMPolynomial"
"BaseModule"
"BaseObject"
"BasePoint"
"BasePoints"
"BaseRing"
"BaseScheme"
"BaseSize"
"BaseSpace"
"BasicAlgebra"
"BasicAlgebraGrpPToBasicAlgebra"
"BasicAlgebraOfEndomorphismAlgebra"
"BasicAlgebraOfExtAlgebra"
"BasicAlgebraOfGroupAlgebra"
"BasicAlgebraOfHeckeAlgebra"
"BasicAlgebraOfMatrixAlgebra"
"BasicAlgebraOfSchurAlgebra"
"BasicAlgebraPGroup"
"BasicCodegrees"
"BasicDegrees"
"BasicOrbit"
"BasicOrbitLength"
"BasicOrbitLengths"
"BasicOrbits"
"BasicParameters"
"BasicRootMatrices"
"BasicStabiliser"
"BasicStabiliserChain"
"BasicStabilizer"
"BasicStabilizerChain"
"Basis"
"BasisChange"
"BasisDenominator"
"BasisElement"
"BasisMatrix"
"BasisMinus"
"BasisOfDegree0CoxMonomials"
"BasisOfDifferentialsFirstKind"
"BasisOfHolomorphicDifferentials"
"BasisOfRationalFunctionField"
"BasisPlus"
"BasisProduct"
"BasisProducts"
"BasisReduction"
"Basket"
"Bell"
"BerlekampMassey"
"Bernoulli"
"BernoulliApproximation"
"BernoulliNumber"
"BernoulliPolynomial"
"BesselFunction"
"BesselFunctionSecondKind"
"BestApproximation"
"BestDimensionLinearCode"
"BestKnownLinearCode"
"BestKnownQuantumCode"
"BestLengthLinearCode"
"BestTranslation"
"BetaFunction"
"BettiNumber"
"BettiNumbers"
"BettiTable"
"BianchiCuspForms"
"Bicomponents"
"BigO"
"BigPeriodMatrix"
"BigTorus"
"BilinearFormSign"
"BilinearFormType"
"BinaryCodedForm"
"BinaryForms"
"BinaryQuadraticForms"
"BinaryResidueCode"
"BinaryString"
"BinaryToBytes"
"BinaryTorsionCode"
"Binomial"
"BinomialPolynomial"
"BinomialToricEmbedding"
"BipartiteGraph"
"Bipartition"
"BiquadraticResidueSymbol"
"BitFlip"
"BitPrecision"
"BitwiseAnd"
"BitwiseNot"
"BitwiseOr"
"BitwiseXor"
"BlackboxGroup"
"Block"
"BlockDegree"
"BlockDegrees"
"BlockDiagMat"
"BlockDiagScalarMat"
"BlockGraph"
"BlockGroup"
"BlockMatrix"
"BlockSet"
"BlockSize"
"BlockSizes"
"BlockTranspose"
"Blocks"
"BlocksAction"
"BlocksImage"
"BlocksKernel"
"Blowup"
"BlumBlumShub"
"BlumBlumShubModulus"
"BogomolovNumber"
"BooleanPolynomial"
"BooleanPolynomialRing"
"Booleans"
"BorderedDoublyCirculantQRCode"
"Borel"
"BorelSubgroup"
"Bottom"
"Bound"
"Boundary"
"BoundaryIntersection"
"BoundaryMap"
"BoundaryMapGrpP"
"BoundaryMaps"
"BoundaryMatrix"
"BoundaryPoints"
"BoundedFSubspace"
"BoundingBox"
"BoxElements"
"BraidGroup"
"Branch"
"BranchVertexPath"
"BrandtModule"
"BrandtModuleDimension"
"BrauerCharacter"
"BrauerCharacterTable"
"BrauerClass"
"BravaisGroup"
"BreadthFirstSearchTree"
"Bruhat"
"BruhatDescendants"
"BruhatLessOrEqual"
"BuildHom"
"BurauRepresentation"
"BurnsideMatrix"
"BytesToString"
"C6Action"
"C6Basis"
"C6Image"
"C6Kernel"
"C6Parameters"
"C9AlternatingElementToStandardWord"
"C9AlternatingElementToWord"
"C9RecogniseAlternating"
"CFP"
"CGO"
"CGOMinus"
"CGOPlus"
"CGSp"
"CGU"
"CMPoints"
"CMTwists"
"CO"
"COMinus"
"COPlus"
"CRT"
"CSO"
"CSOMinus"
"CSOPlus"
"CSSCode"
"CSU"
"CSp"
"CU"
"CacheClearToricLattice"
"CacheClearToricVariety"
"CalabiYau"
"CalculateCanonicalClass"
"CalculateMultiplicities"
"CalculateRegularSpliceDiagram"
"CalculateTransverseIntersections"
"CalderbankShorSteaneCode"
"CambridgeMatrix"
"CanChangeRing"
"CanChangeUniverse"
"CanContinueEnumeration"
"CanDetermineIsomorphism"
"CanIdentifyGroup"
"CanMakeIntegral"
"CanMakeIntegralGModule"
"CanNormalize"
"CanReallyMakeIntegral"
"CanRedoEnumeration"
"CanSignNormalize"
"CanWriteOver"
"CanonicalBasis"
"CanonicalClass"
"CanonicalDegree"
"CanonicalDissidentPoints"
"CanonicalDivisor"
"CanonicalElements"
"CanonicalEmbedding"
"CanonicalFactorRepresentation"
"CanonicalGenerators"
"CanonicalGraph"
"CanonicalHeight"
"CanonicalImage"
"CanonicalInvolution"
"CanonicalLength"
"CanonicalLinearSystem"
"CanonicalLinearSystemFromIdeal"
"CanonicalMap"
"CanonicalModularEquation"
"CanonicalModularPolynomial"
"CanonicalModule"
"CanonicalMultiplicity"
"CanonicalRepresentation"
"CanonicalSheaf"
"Canonicalisation"
"CanteautChabaudsAttack"
"CantorComposition1"
"CantorComposition2"
"Capacities"
"Capacity"
"CarlitzModule"
"CarmichaelLambda"
"CartanInteger"
"CartanMatrix"
"CartanName"
"CartanSubalgebra"
"CarterSubgroup"
"CartesianPower"
"CartesianProduct"
"Cartier"
"CartierRepresentation"
"CartierToWeilMap"
"CasimirValue"
"CasselsMap"
"CasselsTatePairing"
"Catalan"
"Category"
"CayleyGraph"
"Ceiling"
"Cell"
"CellNumber"
"CellSize"
"CellSizeByPoint"
"Center"
"CenterDensity"
"CenterPolynomials"
"CentralCharacter"
"CentralCollineationGroup"
"CentralEndomorphisms"
"CentralExtension"
"CentralExtensionProcess"
"CentralExtensions"
"CentralIdempotents"
"CentralOrder"
"CentralProductDecomposition"
"CentralSumDecomposition"
"CentralValue"
"Centraliser"
"CentraliserOfInvolution"
"CentralisingMatrix"
"CentralisingRoots"
"Centralizer"
"CentralizerGLZ"
"CentralizerOfNormalSubgroup"
"Centre"
"CentreDensity"
"CentreOfEndomorphismAlgebra"
"CentreOfEndomorphismRing"
"CentrePolynomials"
"CentredAffinePatch"
"Chabauty"
"Chabauty0"
"ChabautyEquations"
"ChainComplex"
"ChainMap"
"ChainmapToCohomology"
"ChangGraphs"
"ChangeAmbient"
"ChangeBase"
"ChangeBasis"
"ChangeBasisCSAlgebra"
"ChangeDerivation"
"ChangeDifferential"
"ChangeDirectory"
"ChangeExponentDenominator"
"ChangeField"
"ChangeModel"
"ChangeN"
"ChangeOfBasisMatrix"
"ChangeOrder"
"ChangePrecision"
"ChangeRepresentationType"
"ChangeRing"
"ChangeRingAlgLie"
"ChangeSign"
"ChangeSupport"
"ChangeUniverse"
"Char"
"Character"
"CharacterDegrees"
"CharacterDegreesPGroup"
"CharacterField"
"CharacterFromTraces"
"CharacterMultiset"
"CharacterOfImage"
"CharacterRing"
"CharacterTable"
"CharacterTableConlon"
"CharacterTableDS"
"CharacterToModular"
"CharacterToRepresentation"
"Characteristic"
"CharacteristicPolynomial"
"CharacteristicPolynomialFromTraces"
"CharacteristicSeries"
"CharacteristicVector"
"Characters"
"CharpolyOfFrobenius"
"ChebyshevFirst"
"ChebyshevSecond"
"ChebyshevT"
"ChebyshevU"
"CheckBasket"
"CheckCharacterTable"
"CheckCodimension"
"CheckEmbed"
"CheckFunctionalEquation"
"CheckIdeal"
"CheckOrder"
"CheckPoint"
"CheckPolynomial"
"CheckSparseRootDatum"
"CheckWeilPolynomial"
"ChevalleyBasis"
"ChevalleyBasisOld"
"ChevalleyGroup"
"ChevalleyGroupOrder"
"ChevalleyOrderPolynomial"
"ChiefFactors"
"ChiefFactorsToString"
"ChiefSeries"
"ChienChoyCode"
"ChineseRemainderTheorem"
"Cholesky"
"ChromaticIndex"
"ChromaticNumber"
"ChromaticPolynomial"
"ChtrLiftInternal"
"Class"
"ClassAction"
"ClassCentraliser"
"ClassCentralizer"
"ClassField"
"ClassFunctionSpace"
"ClassGroup"
"ClassGroupAbelianInvariants"
"ClassGroupChecks"
"ClassGroupCyclicFactorGenerators"
"ClassGroupExactSequence"
"ClassGroupGenerationBound"
"ClassGroupGetUseMemory"
"ClassGroupPRank"
"ClassGroupPrimeRepresentatives"
"ClassGroupSetUseMemory"
"ClassGroupStructure"
"ClassImage"
"ClassMap"
"ClassMatrix"
"ClassNumber"
"ClassNumberApproximation"
"ClassNumberApproximationBound"
"ClassPowerCharacter"
"ClassPowerGroup"
"ClassRepresentative"
"ClassRepresentativeFromInvariants"
"ClassTwo"
"ClassUnion"
"Classes"
"ClassesAHInternal"
"ClassesAlmostSimpleInternal"
"ClassesData"
"ClassesInductive"
"ClassesInductiveSetup"
"ClassesLiftCentPMSetup"
"ClassesTF"
"ClassesTFOrbitReps"
"ClassicalConstructiveRecognition"
"ClassicalElementToWord"
"ClassicalForms"
"ClassicalFormsCS"
"ClassicalGroupOrder"
"ClassicalGroupQuotient"
"ClassicalIntersection"
"ClassicalMaximals"
"ClassicalModularEquation"
"ClassicalModularPolynomial"
"ClassicalMultiplication"
"ClassicalMultiplierMap"
"ClassicalPeriod"
"ClassicalStandardGenerators"
"ClassicalStandardPresentation"
"ClassicalSylow"
"ClassicalSylowConjugation"
"ClassicalSylowNormaliser"
"ClassicalSylowToPC"
"ClassicalType"
"ClassifyProjectiveSurface"
"Clean"
"CleanCompositionTree"
"ClearDenominator"
"ClearDenominators"
"ClearIdentificationTree"
"ClearPrevious"
"ClearRowDenominators"
"ClearVerbose"
"ClebschGraph"
"ClebschInvariants"
"ClebschToIgusaClebsch"
"CliffordAlgebra"
"CliqueComplex"
"CliqueNumber"
"ClockCycles"
"CloseSmallGroupDatabase"
"CloseVectors"
"CloseVectorsMatrix"
"CloseVectorsProcess"
"ClosestUnit"
"ClosestVectors"
"ClosestVectorsMatrix"
"ClosureGraph"
"ClosureLiE"
"Cluster"
"CoblesRadicand"
"CoboundaryMapImage"
"Cocycle"
"CocycleMap"
"CodeComplement"
"CodeEntry"
"CodeEntryQECC"
"CodePermutationToMatrix"
"CodeToString"
"Codegree"
"Codifferent"
"Codimension"
"Codomain"
"Coefficient"
"CoefficientField"
"CoefficientHeight"
"CoefficientIdeal"
"CoefficientIdeals"
"CoefficientLength"
"CoefficientMap"
"CoefficientMorphism"
"CoefficientRing"
"CoefficientSpace"
"Coefficients"
"CoefficientsAndMonomials"
"CoefficientsNonSpiral"
"CoefficientsToElementarySymmetric"
"CoerceByClassAction"
"CoerceGrpLie"
"Coercion"
"CoercionGrpLie"
"Coercions"
"Cofactor"
"Cofactors"
"CohenCoxeterName"
"CohomologicalDimension"
"CohomologicalDimensions"
"Cohomology"
"CohomologyClass"
"CohomologyDimension"
"CohomologyElementToChainMap"
"CohomologyElementToCompactChainMap"
"CohomologyGeneratorToChainMap"
"CohomologyGroup"
"CohomologyLeftModuleGenerators"
"CohomologyModule"
"CohomologyRelations"
"CohomologyRightModuleGenerators"
"CohomologyRing"
"CohomologyRingGenerators"
"CohomologyRingQuotient"
"CohomologyToChainmap"
"Coincidence"
"CoisogenyGroup"
"Cokernel"
"ColinearPointsOnPlane"
"CollateWhiteSpace"
"Collect"
"CollectRelations"
"CollineationGroup"
"CollineationGroupStabilizer"
"CollineationSubgroup"
"Colon"
"ColonIdeal"
"ColonIdealEquivalent"
"ColonModule"
"Column"
"ColumnLength"
"ColumnMatrix"
"ColumnSkewLength"
"ColumnSubmatrix"
"ColumnSubmatrixRange"
"ColumnWeight"
"ColumnWeights"
"ColumnWord"
"Columns"
"CombineIdealFactorisation"
"CombineInvariants"
"CommonComplement"
"CommonComponent"
"CommonDenominator"
"CommonEigenspaces"
"CommonModularStructure"
"CommonOverfield"
"CommonZeros"
"Commutator"
"CommutatorGraph"
"CommutatorGroup"
"CommutatorIdeal"
"CommutatorModule"
"CommutatorSubgroup"
"CompactDeletedProjectiveResolution"
"CompactInjectiveResolution"
"CompactPart"
"CompactPresentation"
"CompactProjectiveResolution"
"CompactProjectiveResolutionPGroup"
"CompactProjectiveResolutionsOfAllSimpleModules"
"CompactSystemOfEigenvalues"
"CompactSystemOfEigenvaluesOverQ"
"CompactSystemOfEigenvaluesVector"
"CompanionMatrix"
"Complement"
"ComplementBasis"
"ComplementDFA"
"ComplementEquationsMatrix"
"ComplementOfImage"
"ComplementVectors"
"ComplementaryDivisor"
"ComplementaryErrorFunction"
"Complements"
"Complete"
"CompleteClassGroup"
"CompleteDescription"
"CompleteDigraph"
"CompleteGraph"
"CompleteKArc"
"CompleteTheSquare"
"CompleteTupleList"
"CompleteUnion"
"CompleteWeightEnumerator"
"Completion"
"Complex"
"ComplexCartanMatrix"
"ComplexConjugate"
"ComplexEmbeddings"
"ComplexField"
"ComplexReflectionGroup"
"ComplexReflectionGroupOld"
"ComplexRootDatum"
"ComplexRootMatrices"
"ComplexToPolar"
"ComplexValue"
"Component"
"ComponentGroup"
"ComponentGroupOfIntersection"
"ComponentGroupOfKernel"
"ComponentGroupOrder"
"ComponentProduct"
"Components"
"ComposeQuotients"
"ComposeTransformations"
"Composite"
"CompositeFields"
"Composition"
"CompositionFactors"
"CompositionSequence"
"CompositionSeries"
"CompositionSeriesMatrix"
"CompositionTree"
"CompositionTreeCBM"
"CompositionTreeElementToWord"
"CompositionTreeFactorNumber"
"CompositionTreeFastVerification"
"CompositionTreeNiceGroup"
"CompositionTreeNiceToUser"
"CompositionTreeOrder"
"CompositionTreeReductionInfo"
"CompositionTreeSLPGroup"
"CompositionTreeSeries"
"CompositionTreeVerify"
"Compositum"
"ComputePreImageRule"
"ComputePrimeFactorisation"
"ComputeReducedFactorisation"
"ComputeSubgroupLattice"
"Comultiplication"
"ConcatenateProcesses"
"ConcatenatedCode"
"CondensationMatrices"
"CondensedAlgebra"
"CondensedAlgebraSimpleModules"
"CondensedModule"
"ConditionNumber"
"ConditionalClassGroup"
"ConditionedGroup"
"Conductor"
"ConductorOfCharacterField"
"ConductorRange"
"Cone"
"ConeInSublattice"
"ConeIndices"
"ConeIntersection"
"ConeQuotientByLinearSubspace"
"ConeToPolyhedron"
"ConeWithInequalities"
"Cones"
"ConesOfCodimension"
"ConformalClassicalGroup"
"ConformalHamiltonianLieAlgebra"
"ConformalOrthogonalGroup"
"ConformalOrthogonalGroupMinus"
"ConformalOrthogonalGroupPlus"
"ConformalSpecialLieAlgebra"
"ConformalSymplecticGroup"
"ConformalUnitaryGroup"
"CongruenceGroup"
"CongruenceGroupAnemic"
"CongruenceImage"
"CongruenceIndices"
"CongruenceModulus"
"CongruenceSubgroup"
"Conic"
"ConicOverSubfield"
"ConjecturalRegulator"
"ConjecturalSha"
"ConjugacyClasses"
"Conjugate"
"ConjugateComplementSubspace"
"ConjugateIntoBorel"
"ConjugateIntoTorus"
"ConjugatePartition"
"ConjugateTranspose"
"Conjugates"
"ConjugatesToPowerSums"
"ConjugatingElement"
"ConjugationClassLength"
"Connect"
"ConnectedCenter"
"ConnectedCentre"
"ConnectedComponents"
"ConnectedKernel"
"ConnectingHomomorphism"
"ConnectionNumber"
"ConnectionPolynomial"
"Conorm"
"Consistency"
"ConstaCyclicCode"
"ConstantCoefficient"
"ConstantField"
"ConstantFieldExtension"
"ConstantMap"
"ConstantRing"
"ConstantTerm"
"ConstantWords"
"Constituent"
"Constituents"
"ConstituentsWithMultiplicities"
"Constraint"
"ConstructBasicOrbit"
"ConstructBasicOrbits"
"ConstructOneOrbitInternal"
"ConstructOrbitsInternal"
"ConstructPermsInternal"
"ConstructTable"
"Construction"
"ConstructionX"
"ConstructionX3"
"ConstructionX3u"
"ConstructionXChain"
"ConstructionXX"
"ConstructionXXu"
"ConstructionY1"
"ContactLieAlgebra"
"ContainsQuadrangle"
"ContainsZero"
"Content"
"ContentAndPrimitivePart"
"Continuations"
"ContinueEnumeration"
"ContinuedFraction"
"Contpp"
"Contract"
"Contraction"
"Contravariants"
"ControlledNot"
"Convergents"
"ConvergentsSequence"
"Converse"
"ConvertFromManinSymbol"
"ConvertToCWIFormat"
"Convolution"
"ConwayPolynomial"
"Coordelt"
"Coordinate"
"CoordinateLattice"
"CoordinateMatrix"
"CoordinateRing"
"CoordinateSpace"
"CoordinateSubvariety"
"CoordinateVector"
"Coordinates"
"CoordinatesToElement"
"Coppersmith"
"CoprimeBasis"
"CoprimeBasisInsert"
"CoprimeRepresentative"
"Copy"
"CopyNames"
"CopyRepresentation"
"CordaroWagnerCode"
"Core"
"CoreflectionGroup"
"CoreflectionMatrices"
"CoreflectionMatrix"
"CorestrictCocycle"
"CorestrictionMapImage"
"Coroot"
"CorootAction"
"CorootGSet"
"CorootHeight"
"CorootLattice"
"CorootNorm"
"CorootNorms"
"CorootPosition"
"CorootSpace"
"Coroots"
"CorrectForm"
"Correlation"
"CorrelationGroup"
"CorrespondingResolutionGraph"
"CorrespondingVertices"
"Cos"
"Cosec"
"Cosech"
"CosetAction"
"CosetDistanceDistribution"
"CosetEnumerationProcess"
"CosetGeometry"
"CosetGraphIntersect"
"CosetImage"
"CosetIntersection"
"CosetKernel"
"CosetLeaders"
"CosetNumber"
"CosetRepresentatives"
"CosetSatisfying"
"CosetSpace"
"CosetTable"
"CosetTableToDFA"
"CosetTableToPermutationGroup"
"CosetTableToRepresentation"
"CosetsSatisfying"
"Cosh"
"Cot"
"Coth"
"Cotrace"
"Counit"
"CountEntriesEqual"
"CountPGroups"
"Covalence"
"Covariant"
"CoveringCovariants"
"CoveringMap"
"CoveringRadius"
"CoveringStructure"
"CoveringSubgroup"
"Coverlattice"
"CoweightLattice"
"CoxMonomialLattice"
"CoxRing"
"CoxeterDiagram"
"CoxeterElement"
"CoxeterForm"
"CoxeterGraph"
"CoxeterGroup"
"CoxeterGroupFactoredOrder"
"CoxeterGroupOrder"
"CoxeterLength"
"CoxeterMatrix"
"CoxeterNumber"
"Cputime"
"CreateCharacterFile"
"CreateCycleFile"
"CreateElement"
"CreateFanoData"
"CreateK3Data"
"CreateLieGroup"
"CreateLieGroupElement"
"CreateNilpOrbAlgLie"
"CreateRootVectorSpace"
"CreateVirtualRays"
"Create_SmallCrvMod_Structure"
"CremonaDatabase"
"CremonaReference"
"CremonaReferenceData"
"CriticalStrip"
"CrossCorrelation"
"CrossPolytope"
"CrvGenericGroup"
"CryptographicCurve"
"CrystalGraph"
"CubicFromPoint"
"CubicModel"
"CubicModelSearch"
"CubicSurfaceByHexahedralCoefficients"
"Cunningham"
"Current"
"CurrentLabel"
"Curve"
"CurveDifferential"
"CurveDivisor"
"CurvePlace"
"CurveQuotient"
"Curves"
"Cusp"
"CuspForms"
"CuspIsSingular"
"CuspPlaces"
"CuspWidth"
"CuspidalInducingDatum"
"CuspidalProjection"
"CuspidalSubgroup"
"CuspidalSubspace"
"Cusps"
"CutVertices"
"Cycle"
"CycleCount"
"CycleDecomposition"
"CycleIndexPolynomial"
"CycleStructure"
"CycleStructureToSeq"
"CyclicCode"
"CyclicGroup"
"CyclicPolytope"
"CyclicShiftsMatrix"
"CyclicSubgroups"
"CyclicToRadical"
"CyclotomicAutomorphismGroup"
"CyclotomicClassNumbers"
"CyclotomicEmbedding"
"CyclotomicFactors"
"CyclotomicField"
"CyclotomicOrder"
"CyclotomicPolynomial"
"CyclotomicQuadraticExtensions"
"CyclotomicRelativeField"
"CyclotomicUnitGroup"
"CyclotomicUnits"
"CyclotomicUnramifiedExtension"
"Cylinder"
"DFSTree"
"Darstellungsgruppe"
"Data"
"DataAutLie"
"DatabaseID"
"DatabaseType"
"DawsonIntegral"
"DecimalToBitPrecision"
"Decimation"
"Decode"
"DecodeML"
"DecodingAttack"
"DecomposeAutomorphism"
"DecomposeCharacter"
"DecomposeExteriorPower"
"DecomposeKronecker"
"DecomposeSymmetricPower"
"DecomposeTensorProduct"
"DecomposeUsing"
"DecomposeVector"
"Decomposition"
"DecompositionField"
"DecompositionGroup"
"DecompositionMatrix"
"DecompositionMultiset"
"DecompositionOldAndNew"
"DecompositionType"
"DecompositionTypeFrequency"
"Decycle"
"DedekindEta"
"DedekindTest"
"DeepHoles"
"DefRing"
"Defect"
"DefectGroup"
"DefinedInDegrees"
"DefinesAbelianSubvariety"
"DefinesHomomorphism"
"DefinesTableau"
"DefiningConstantField"
"DefiningEquation"
"DefiningEquations"
"DefiningIdeal"
"DefiningMap"
"DefiningMatrix"
"DefiningModularSymbolsSpace"
"DefiningModulusIsConductor"
"DefiningMonomial"
"DefiningPoints"
"DefiningPolynomial"
"DefiningPolynomials"
"DefiningSubschemePolynomial"
"DefiniteClassNumber"
"DefiniteGramMatrix"
"DefiniteNorm"
"DefinitionSets"
"DegeneracyCosetRepsInner"
"DegeneracyMap"
"DegeneracyMatrix"
"Degree"
"Degree2Subcovers"
"Degree3Subcovers"
"Degree6DelPezzoType2_1"
"Degree6DelPezzoType2_2"
"Degree6DelPezzoType2_3"
"Degree6DelPezzoType3"
"Degree6DelPezzoType4"
"Degree6DelPezzoType6"
"DegreeMap"
"DegreeOfCharacterField"
"DegreeOfExactConstantField"
"DegreeOfFieldExtension"
"DegreeOnePrimeIdeals"
"DegreeRange"
"DegreeReduction"
"DegreeSequence"
"Degrees"
"DegreesOfCohomologyGenerators"
"DegreesOfGenerators"
"DelPezzoSurface"
"Delaunay"
"DelaunayMesh"
"DeleteAllAssociatedData"
"DeleteAttributes"
"DeleteCapacities"
"DeleteCapacity"
"DeleteCollector"
"DeleteData"
"DeleteEdgeLabels"
"DeleteGenerator"
"DeleteGlobalModularFormsData"
"DeleteHeckePrecomputation"
"DeleteLabel"
"DeleteLabels"
"DeleteNonsplitCollector"
"DeleteNonsplitSolutionspace"
"DeleteProcess"
"DeleteProcessComplete"
"DeleteProcessDown"
"DeleteRelation"
"DeleteSplitCollector"
"DeleteSplitSolutionspace"
"DeleteStoredWords"
"DeleteVertexLabels"
"DeleteWeight"
"DeleteWeights"
"DeletedProjectiveResolution"
"DelsarteGoethalsCode"
"Delta"
"DeltaPreimage"
"Demazure"
"Denominator"
"Density"
"DensityEvolutionBinarySymmetric"
"DensityEvolutionGaussian"
"Depth"
"DepthFirstSearchTree"
"Derivation"
"Derivative"
"DerivedGroup"
"DerivedGroupMonteCarlo"
"DerivedLength"
"DerivedSeries"
"DerivedSubgroup"
"DerksenIdeal"
"Descendants"
"DescentInformation"
"DescentMaps"
"Design"
"Detach"
"DetachSpec"
"Determinant"
"Development"
"Diagonal"
"DiagonalAutomorphism"
"DiagonalBlockDecomposition"
"DiagonalBlockStructure"
"DiagonalBlocks"
"DiagonalBlocksStructure"
"DiagonalForm"
"DiagonalJoin"
"DiagonalMatrix"
"DiagonalModel"
"DiagonalSparseMatrix"
"DiagonalSum"
"Diagonalisation"
"DiagonalisingMatrix"
"Diagonalization"
"Diagram"
"DiagramAutomorphism"
"Diameter"
"DiameterPath"
"DickmanRho"
"DicksonFirst"
"DicksonInvariant"
"DicksonSecond"
"DicyclicGroup"
"Difference"
"DifferenceSet"
"Different"
"DifferentDivisor"
"Differential"
"DifferentialBasis"
"DifferentialField"
"DifferentialFieldExtension"
"DifferentialIdeal"
"DifferentialLaurentSeriesRing"
"DifferentialOperator"
"DifferentialOperatorRing"
"DifferentialRing"
"DifferentialRingExtension"
"DifferentialSpace"
"Differentiation"
"DifferentiationSequence"
"DihedralForms"
"DihedralGroup"
"Dilog"
"Dimension"
"DimensionBoundTest"
"DimensionByFormula"
"DimensionComplexTorus"
"DimensionCuspForms"
"DimensionCuspFormsGamma0"
"DimensionCuspFormsGamma1"
"DimensionNewCuspForms"
"DimensionNewCuspFormsGamma0"
"DimensionNewCuspFormsGamma1"
"DimensionOfAlgebra"
"DimensionOfCentreOfEndomorphismRing"
"DimensionOfEndomorphismRing"
"DimensionOfExactConstantField"
"DimensionOfFieldOfGeometricIrreducibility"
"DimensionOfHighestWeightModule"
"DimensionOfHom"
"DimensionOfHomology"
"DimensionOfKernelZ2"
"DimensionOfNonQFactorialLocus"
"DimensionOfSpanZ2"
"DimensionsEstimate"
"DimensionsOfHomology"
"DimensionsOfInjectiveModules"
"DimensionsOfProjectiveModules"
"DimensionsOfTerms"
"DirectProduct"
"DirectSum"
"DirectSumDecomposition"
"DirectSumRestrictionOfScalarsToQ"
"DirichletCharacter"
"DirichletCharacterFromValuesOnUnitGenerators"
"DirichletCharacters"
"DirichletGroup"
"DirichletGroupCopy"
"DirichletGroupFull"
"DirichletRestriction"
"DiscToPlane"
"Disconnect"
"DiscreteLogMapSmooth"
"Discriminant"
"DiscriminantDivisor"
"DiscriminantOfHeckeAlgebra"
"DiscriminantRange"
"DisownChildren"
"Display"
"DisplayBurnsideMatrix"
"DisplayCompTreeNodes"
"DisplayFareySymbolDomain"
"DisplayPolyMap"
"DisplayPolygons"
"Distance"
"DistanceMatrix"
"DistancePartition"
"Distances"
"DistinctDegreeFactorization"
"DistinctExtensions"
"DistinguishedOrbitsOnSimples"
"DistinguishedRoot"
"DivideOutIntegers"
"DivisionFunction"
"DivisionPoints"
"DivisionPolynomial"
"DivisionPsi"
"Divisor"
"DivisorBasis"
"DivisorClassGroup"
"DivisorClassLattice"
"DivisorGroup"
"DivisorIdeal"
"DivisorMap"
"DivisorOfDegreeOne"
"DivisorSigma"
"DivisorToPoint"
"DivisorToSheaf"
"Divisors"
"Dodecacode"
"DoesDefineFan"
"Domain"
"DominantCharacter"
"DominantDiagonalForm"
"DominantLSPath"
"DominantWeight"
"DotProduct"
"Double"
"DoubleCoset"
"DoubleCosetRepresentatives"
"DoubleCosets"
"DoubleDual"
"DoubleGenusOneModel"
"DoublePlotkinSum"
"DoubleSpaceQuartic"
"DoublyCirculantQRCode"
"DoublyCirculantQRCodeGF4"
"Dual"
"DualAtkinLehner"
"DualAtkinLehnerOperator"
"DualBasisLattice"
"DualCoxeterForm"
"DualEuclideanWeightDistribution"
"DualFaceInDualFan"
"DualFan"
"DualGraphCanonical"
"DualGraphMultiplicities"
"DualHeckeOperator"
"DualIsogeny"
"DualKroneckerZ4"
"DualLattice"
"DualLeeWeightDistribution"
"DualMatrixToPerm"
"DualMatrixToWord"
"DualModularSymbol"
"DualMorphism"
"DualPartition"
"DualPrintName"
"DualQuotient"
"DualRepresentation"
"DualStarInvolution"
"DualVectorSpace"
"DualWeightDistribution"
"DualityAutomorphism"
"DumpVerbose"
"DuvalPuiseuxExpansion"
"DynkinDiagram"
"DynkinDigraph"
"E2NForm"
"E4Form"
"E6Form"
"E8gens"
"EARNS"
"ECCanonicalLiftTraceGen"
"ECCanonicalLiftTraceGenus0"
"ECCanonicalLiftTraceHyp"
"ECDeformationTrace"
"ECM"
"ECMFactoredOrder"
"ECMOrder"
"ECMSteps"
"ECPCShanks"
"EFAModuleMaps"
"EFAModules"
"EFASeries"
"EIS"
"EISDatabase"
"EModule"
"EVALInternal"
"Ealpha"
"EasyBasis"
"EasyClean"
"EasyIdeal"
"EchRat"
"EchelonForm"
"EchelonNullspace"
"EcheloniseWord"
"Echelonize"
"EchelonizeWord"
"EckardtPoints"
"EdgeCapacities"
"EdgeConnectivity"
"EdgeDeterminant"
"EdgeGroup"
"EdgeIndices"
"EdgeLabel"
"EdgeLabels"
"EdgeMultiplicity"
"EdgeSeparator"
"EdgeSet"
"EdgeUnion"
"EdgeWeights"
"Edges"
"EffectivePossibilities"
"EffectiveSubcanonicalCurves"
"EhrhartCoefficient"
"EhrhartCoefficients"
"EhrhartDeltaVector"
"EhrhartPolynomial"
"EhrhartSeries"
"EichlerInvariant"
"Eigenform"
"Eigenforms"
"Eigenspace"
"Eigenvalues"
"EigenvectorInTermsOfExpansionBasis"
"EightCoverings"
"EightDescent"
"Eisenstein"
"EisensteinData"
"EisensteinIntegerRing"
"EisensteinIntegers"
"EisensteinProjection"
"EisensteinSeries"
"EisensteinSubspace"
"EisensteinTwo"
"Element"
"ElementOfNorm"
"ElementOfOrder"
"ElementOffset"
"ElementSequence"
"ElementSet"
"ElementToMonoidSequence"
"ElementToSequence"
"ElementToSequencePad"
"ElementToTuple"
"ElementType"
"ElementaryAbelianGroup"
"ElementaryAbelianNormalSubgroup"
"ElementaryAbelianQuotient"
"ElementaryAbelianSection"
"ElementaryAbelianSeries"
"ElementaryAbelianSeriesCanonical"
"ElementaryAbelianSubgroups"
"ElementaryDivisors"
"ElementaryDivisorsMultiset"
"ElementarySymmetricPolynomial"
"ElementarySymmetricToCoefficients"
"ElementarySymmetricToPowerSums"
"ElementaryToHomogeneousMatrix"
"ElementaryToMonomialMatrix"
"ElementaryToPowerSumMatrix"
"ElementaryToSchurMatrix"
"Elements"
"EliasAsymptoticBound"
"EliasBound"
"Eliminate"
"EliminateGenerators"
"EliminateRedundancy"
"EliminateRedundantBasePoints"
"Elimination"
"EliminationIdeal"
"EllipticCurve"
"EllipticCurveDatabase"
"EllipticCurveDatabaseLarge"
"EllipticCurveFromjInvariant"
"EllipticCurveSearch"
"EllipticCurveWithGoodReductionSearch"
"EllipticCurveWithjInvariant"
"EllipticCurves"
"EllipticExponential"
"EllipticFactors"
"EllipticInvariants"
"EllipticLogarithm"
"EllipticPeriods"
"EllipticPoints"
"EltTup"
"Eltlist"
"Eltnum"
"Eltseq"
"EltseqPad"
"Embed"
"EmbedIntoMinimalCyclotomicField"
"EmbedPlaneCurveInP3"
"Embedding"
"EmbeddingMap"
"EmbeddingMatrix"
"EmbeddingSpace"
"Embeddings"
"EmptyBasket"
"EmptyCohomologyModule"
"EmptyDigraph"
"EmptyGraph"
"EmptyMultiDigraph"
"EmptyMultiGraph"
"EmptyNetwork"
"EmptyPolyhedron"
"EmptyScheme"
"EmptySubscheme"
"End"
"EndVertices"
"EndomorphismAlgebra"
"EndomorphismRing"
"Endomorphisms"
"EndpointWeight"
"EnterStauduhar"
"EntriesInterpolation"
"EntriesInterpolationExpansion"
"Entropy"
"Entry"
"Enumerate"
"EnumerationCost"
"EnumerationCostArray"
"Eof"
"EqualDFA"
"EqualDegreeFactorization"
"Equality"
"EqualizeDegrees"
"Equation"
"EquationOrder"
"EquationOrderFinite"
"EquationOrderInfinite"
"Equations"
"EquiDecomposition"
"EquidimensionalDecomposition"
"EquidimensionalPart"
"EquidimensionalRadical"
"EquitablePartition"
"EquivalentPoint"
"EquivalentQuotients"
"Erf"
"Erfc"
"Error"
"ErrorFunction"
"EstimateOrbit"
"Eta"
"EtaTPairing"
"EtaqPairing"
"EuclideanLeftDivision"
"EuclideanNorm"
"EuclideanRightDivision"
"EuclideanWeight"
"EuclideanWeightDistribution"
"EuclideanWeightEnumerator"
"EulerCharacteristic"
"EulerFactor"
"EulerFactorModChar"
"EulerFactorsByDeformation"
"EulerGamma"
"EulerGraphDatabase"
"EulerPhi"
"EulerPhiInverse"
"EulerProduct"
"EulerianGraphDatabase"
"EulerianNumber"
"Evaluate"
"EvaluateAt"
"EvaluateByPowerSeries"
"EvaluateClassGroup"
"EvaluateDerivatives"
"EvaluatePolynomial"
"EvaluationPowerSeries"
"EvenOrderElement"
"EvenSublattice"
"EvenWeightCode"
"EvenWeightSubcode"
"ExactConstantField"
"ExactExtension"
"ExactLattice"
"ExactLength"
"ExactQuotient"
"ExactScalarProduct"
"ExactValue"
"ExceptionalCurveIntersection"
"ExceptionalSelfIntersection"
"ExceptionalUnitOrbit"
"ExceptionalUnits"
"ExchangeElement"
"Exclude"
"ExcludedConjugate"
"ExcludedConjugates"
"ExistsConwayPolynomial"
"ExistsCosetSatisfying"
"ExistsCoveringStructure"
"ExistsExcludedConjugate"
"ExistsGroupData"
"ExistsModularCurveDatabase"
"ExistsNormalisingCoset"
"ExistsNormalizingCoset"
"Exp"
"Expand"
"ExpandBasis"
"ExpandQuaternionicBasis"
"ExpandToPrecision"
"ExpandZ"
"Experimental_InnerTwistOperator"
"ExplicitCoset"
"Exponent"
"ExponentDenominator"
"ExponentLattice"
"ExponentLaw"
"ExponentSum"
"ExponentialFieldExtension"
"ExponentialIntegral"
"ExponentialIntegralE1"
"Exponents"
"ExpurgateCode"
"ExpurgateWeightCode"
"Ext"
"ExtAlgebra"
"ExtGenerators"
"Extcont"
"Extend"
"ExtendBasicOrbit"
"ExtendBasicOrbits"
"ExtendBasis"
"ExtendCode"
"ExtendDynkinDiagramPermutation"
"ExtendEchelonForm"
"ExtendField"
"ExtendFieldCode"
"ExtendGaloisCocycle"
"ExtendGeodesic"
"ExtendIsometry"
"ExtendMultiplicativeGroup"
"ExtendPrimaryInvariants"
"ExtendedCategory"
"ExtendedCohomologyClass"
"ExtendedGreatestCommonDivisor"
"ExtendedGreatestCommonLeftDivisor"
"ExtendedGreatestCommonRightDivisor"
"ExtendedLeastCommonLeftMultiple"
"ExtendedOneCocycle"
"ExtendedPerfectCodeZ4"
"ExtendedReals"
"ExtendedRing"
"ExtendedSL"
"ExtendedSp"
"ExtendedType"
"ExtendedValuationRing"
"Extends"
"Extension"
"ExtensionCategory"
"ExtensionClasses"
"ExtensionExponents"
"ExtensionMorphism"
"ExtensionNumbers"
"ExtensionPrimes"
"ExtensionProcess"
"ExtensionsOfElementaryAbelianGroup"
"ExtensionsOfSolubleGroup"
"Exterior"
"ExteriorAlgebra"
"ExteriorPower"
"ExteriorPowerNaturalModule"
"ExteriorSquare"
"ExternalLines"
"ExtraAutomorphism"
"ExtraSpecialAction"
"ExtraSpecialBasis"
"ExtraSpecialGroup"
"ExtraSpecialNormaliser"
"ExtraSpecialParameters"
"ExtractBlock"
"ExtractBlockRange"
"ExtractDiagonalBlocks"
"ExtractGenerators"
"ExtractGroup"
"ExtractRep"
"ExtraspecialPair"
"ExtraspecialPairs"
"ExtraspecialSigns"
"ExtremalLieAlgebra"
"ExtremalRayContraction"
"ExtremalRayContractionDivisor"
"ExtremalRayContractions"
"ExtremalRays"
"F4O"
"FFPatchIndex"
"FGIntersect"
"FPGroup"
"FPGroupColouring"
"FPGroupStrong"
"FPQuotient"
"Face"
"FaceFunction"
"FaceIndices"
"FaceSupportedBy"
"Faces"
"FacesContaining"
"FacetIndices"
"Facets"
"Facint"
"Facpol"
"Factor"
"FactorBasis"
"FactorBasisCreate"
"FactorBasisVerify"
"FactoredCarmichaelLambda"
"FactoredCharacteristicPolynomial"
"FactoredChevalleyGroupOrder"
"FactoredClassicalGroupOrder"
"FactoredDefiningPolynomials"
"FactoredDiscriminant"
"FactoredEulerPhi"
"FactoredEulerPhiInverse"
"FactoredHeckePolynomial"
"FactoredIndex"
"FactoredInverseDefiningPolynomials"
"FactoredMCPolynomials"
"FactoredMinimalAndCharacteristicPolynomials"
"FactoredMinimalPolynomial"
"FactoredModulus"
"FactoredOrder"
"FactoredOrderGL"
"FactoredProjectiveOrder"
"Factorial"
"FactorialValuation"
"Factorisation"
"FactorisationOverSplittingField"
"FactorisationToInteger"
"FactorisationToPolynomial"
"Factorization"
"FactorizationOfQuotient"
"FactorizationOverSplittingField"
"FactorizationToInteger"
"FaithfulModule"
"FakeIsogenySelmerSet"
"FakeProjectiveSpace"
"Falpha"
"FaltingsHeight"
"FamilyOfMultivaluedSections"
"Fan"
"FanOfAffineSpace"
"FanOfFakeProjectiveSpace"
"FanOfWPS"
"Fano"
"FanoBaseGenus"
"FanoBaskets"
"FanoDatabase"
"FanoGenus"
"FanoIndex"
"FanoIsolatedBaskets"
"FanoToRecord"
"FareySymbol"
"FastRoots"
"FewGenerators"
"Fibonacci"
"FibonacciGroup"
"Field"
"FieldAutomorphism"
"FieldCategory"
"FieldCharacteristic"
"FieldExponent"
"FieldMorphism"
"FieldOfDefinition"
"FieldOfFractions"
"FieldOfGeometricIrreducibility"
"FieldSize"
"FileProcess"
"FilterProcess"
"FilterVector"
"FindAsocAlgebraRep"
"FindChevalleyBasis"
"FindChevalleyBasisDiagonal"
"FindChevalleyBasisQuad"
"FindCommonEmbeddings"
"FindDependencies"
"FindEntries"
"FindFirstGenerators"
"FindGenerators"
"FindIndexes"
"FindLieAlgebra"
"FindN"
"FindPowerSeries"
"FindPowerSeriesForChabauty"
"FindRelations"
"FindRelationsInCWIFormat"
"FindSplitElement"
"FindWord"
"FindXYH"
"FineEquidimensionalDecomposition"
"FiniteAffinePlane"
"FiniteDivisor"
"FiniteField"
"FiniteLieAlgebra"
"FiniteProjectivePlane"
"FiniteSplit"
"FireCode"
"FirstCohomology"
"FirstIndexOfColumn"
"FirstIndexOfRow"
"FirstPoleElement"
"FirstWeights"
"FischerSubgroup"
"FittingGroup"
"FittingIdeal"
"FittingIdeals"
"FittingLength"
"FittingSeries"
"FittingSubgroup"
"Fix"
"FixedArc"
"FixedField"
"FixedGroup"
"FixedPoints"
"FixedSubspaceToPolyhedron"
"FlagComplex"
"Flat"
"FlatProduct"
"FlatsNullMatrix"
"Flexes"
"Flip"
"Floor"
"Flow"
"Flush"
"Form"
"FormType"
"FormalChain"
"FormalGroupHomomorphism"
"FormalGroupLaw"
"FormalLog"
"FormalPoint"
"FormalSet"
"Format"
"FourCoverPullback"
"FourDescent"
"FourToTwoCovering"
"FourierMotzkin"
"FractionalPart"
"FrattiniQuotientRank"
"FrattiniSubgroup"
"FreeAbelianGroup"
"FreeAbelianQuotient"
"FreeAlgebra"
"FreeGenerators"
"FreeGroup"
"FreeGroupIndex"
"FreeGroupIsIn"
"FreeLieAlgebra"
"FreeMonoid"
"FreeNilpotentGroup"
"FreeProduct"
"FreeResolution"
"FreeSemigroup"
"FreefValues"
"Frobenius"
"FrobeniusActionOnPoints"
"FrobeniusActionOnReducibleFiber"
"FrobeniusActionOnTrivialLattice"
"FrobeniusAutomorphism"
"FrobeniusAutomorphisms"
"FrobeniusElement"
"FrobeniusEndomorphism"
"FrobeniusForm"
"FrobeniusFormAlternating"
"FrobeniusImage"
"FrobeniusMap"
"FrobeniusPolynomial"
"FrobeniusTraceDirect"
"FrobeniusTracesToWeilPolynomials"
"FromAnalyticJacobian"
"FromLiE"
"FuchsianGroup"
"FuchsianMatrixRepresentation"
"FullCharacteristicPolynomial"
"FullCone"
"FullCorootLattice"
"FullDimension"
"FullDirichletGroup"
"FullMinimalPolynomialTest"
"FullModule"
"FullPrimaryInvariantSpaces"
"FullRootLattice"
"Function"
"FunctionDegree"
"FunctionField"
"FunctionFieldCategory"
"FunctionFieldDatabase"
"FunctionFieldDifferential"
"FunctionFieldDivisor"
"FunctionFieldPlace"
"FunctionFields"
"Functor"
"FundamentalClassGroup"
"FundamentalClassGroupStructure"
"FundamentalClassNumber"
"FundamentalClosure"
"FundamentalCoweights"
"FundamentalDiscriminant"
"FundamentalDomain"
"FundamentalElement"
"FundamentalGroup"
"FundamentalInvariants"
"FundamentalInvariantsKing"
"FundamentalKernel"
"FundamentalQuotient"
"FundamentalUnit"
"FundamentalUnits"
"FundamentalVolume"
"FundamentalWeights"
"G2"
"G2Invariants"
"G2Reduced"
"G2ToIgusaInvariants"
"GCD"
"GCDSup"
"GCLD"
"GCRD"
"GF"
"GHom"
"GHomOverCentralizingField"
"GL"
"GLB"
"GLNormaliser"
"GModule"
"GModuleAction"
"GModuleConductorOfCoefficientField"
"GModuleLinear"
"GModulePrimes"
"GO"
"GOMinus"
"GOPlus"
"GPCGroup"
"GR"
"GRBsktToRec"
"GRCrvSToRec"
"GRHBound"
"GRPtSToRec"
"GRSCode"
"GRSchToRec"
"GSShortOrbitSubset"
"GSShortSubset"
"GSet"
"GSetFromIndexed"
"GU"
"GabidulinCode"
"GallagerCode"
"GaloisActionOnLines"
"GaloisCohomology"
"GaloisConjugacyRepresentatives"
"GaloisConjugate"
"GaloisData"
"GaloisField"
"GaloisGroup"
"GaloisGroupInvariant"
"GaloisImage"
"GaloisMultiplicities"
"GaloisOrbit"
"GaloisProof"
"GaloisQuotient"
"GaloisRepresentation"
"GaloisRing"
"GaloisRoot"
"GaloisSplittingField"
"GaloisSubfieldTower"
"GaloisSubgroup"
"Gamma"
"Gamma0"
"Gamma1"
"GammaAction"
"GammaActionOnSimples"
"GammaCorootSpace"
"GammaD"
"GammaFactors"
"GammaGroup"
"GammaOrbitOnRoots"
"GammaOrbitsOnRoots"
"GammaOrbitsRepresentatives"
"GammaRootSpace"
"GammaUpper0"
"GammaUpper1"
"GapNumbers"
"GaussNumber"
"GaussReduce"
"GaussReduceGram"
"GaussSum"
"GaussianBinomial"
"GaussianFactorial"
"GaussianIntegerRing"
"GaussianIntegers"
"Gcd"
"GcdSup"
"GcdWithLoss"
"GegenbauerPolynomial"
"GenCrvGrpData"
"GenModuleProject"
"GeneralLinearGroup"
"GeneralOrthogonalGroup"
"GeneralOrthogonalGroupMinus"
"GeneralOrthogonalGroupPlus"
"GeneralReeTorusElement"
"GeneralUnitaryGroup"
"GeneralisedEquationOrder"
"GeneralisedNorm"
"GeneralisedRowReduction"
"GeneralisedWallForm"
"GeneralizedAGCode"
"GeneralizedAlgebraicGeometricCode"
"GeneralizedFibonacciNumber"
"GeneralizedNorm"
"GeneralizedSrivastavaCode"
"GenerateGraphs"
"GeneratepGroups"
"GeneratingPolynomial"
"GeneratingSet"
"GeneratingSubfields"
"GeneratingSubfieldsLattice"
"GeneratingWords"
"Generator"
"GeneratorMatrix"
"GeneratorNumber"
"GeneratorOrder"
"GeneratorPolynomial"
"GeneratorStructure"
"Generators"
"GeneratorsOverBaseRing"
"GeneratorsSequence"
"GeneratorsSequenceOverBaseRing"
"Generic"
"GenericAbelianGroup"
"GenericDatabase"
"GenericGenus"
"GenericGroup"
"GenericModel"
"GenericPoint"
"GenericPolynomial"
"Genus"
"GenusContribution"
"GenusDistribution"
"GenusField"
"GenusOneModel"
"GenusRepresentatives"
"GenusX0N"
"GenusX0NQuotient"
"GenusX1N"
"Geodesic"
"GeodesicExists"
"Geodesics"
"GeodesicsIntersection"
"GeometricAutomorphismGroup"
"GeometricAutomorphismGroupClassification"
"GeometricGenus"
"GeometricGenusOfDesingularization"
"GeometricGenusUsingToricGeometry"
"GeometricMordellWeilLattice"
"GeometricPicardGroup"
"GeometricSupport"
"GeometricTorsionBound"
"Germ"
"GetAssertions"
"GetAttributes"
"GetAutoColumns"
"GetAutoCompact"
"GetBeep"
"GetBraidRelations"
"GetCells"
"GetChild"
"GetChildren"
"GetClassGroupBoundFactorBasis"
"GetClassGroupBoundGenerators"
"GetColumns"
"GetConicSubfieldMethodDegreeBound"
"GetCurrentDirectory"
"GetDefaultRealField"
"GetEchoInput"
"GetElementPrintFormat"
"GetEnv"
"GetEnvironmentValue"
"GetEvaluationComparison"
"GetForceCFP"
"GetHelpExternalBrowser"
"GetHelpExternalSystem"
"GetHelpUseExternal"
"GetHistorySize"
"GetIgnoreEof"
"GetIgnorePrompt"
"GetIgnoreSpaces"
"GetIloadAllowEsc"
"GetIndent"
"GetIntegerNewtonPolygon"
"GetIntrinsicName"
"GetKantPrecision"
"GetKaratsubaThreshold"
"GetLibraries"
"GetLibraryRoot"
"GetLineEditor"
"GetMPCVersion"
"GetMPFRVersion"
"GetMS"
"GetMaximumMemoryUsage"
"GetMemoryExtensionSize"
"GetMemoryLimit"
"GetMemoryUsage"
"GetModule"
"GetModules"
"GetMonoidNewtonPolygon"
"GetNthreads"
"GetParent"
"GetPath"
"GetPrecision"
"GetPresentation"
"GetPreviousSize"
"GetPrimes"
"GetPrintLevel"
"GetPrompt"
"GetQuotient"
"GetRep"
"GetRows"
"GetSeed"
"GetShellCompletion"
"GetShowPromptAlways"
"GetStoredFactors"
"GetTempDir"
"GetTraceback"
"GetTransGroupIDMany"
"GetUserProcessData"
"GetVerbose"
"GetVersion"
"GetViMode"
"Getc"
"Getpid"
"Gets"
"Getuid"
"Getvecs"
"GewirtzGraph"
"GilbertVarshamovAsymptoticBound"
"GilbertVarshamovBound"
"GilbertVarshamovLinearBound"
"Girth"
"GirthCycle"
"GlobalSectionSubmodule"
"GlobalUnitGroup"
"Glue"
"GoethalsCode"
"GoethalsDelsarteCode"
"GolayCode"
"GolayCodeZ4"
"GoodBasePoints"
"GoodDescription"
"GoodLDPCEnsemble"
"GoppaCode"
"GoppaDesignedDistance"
"GorensteinClosure"
"GorensteinIndex"
"Graded"
"GradedBettiTable"
"GradedCokernel"
"GradedCommutativeRing"
"GradedCone"
"GradedDirectSum"
"GradedDual"
"GradedDualComplex"
"GradedDualWithHoms"
"GradedFreeModule"
"GradedHoms"
"GradedIdentityMap"
"GradedImage"
"GradedKernel"
"GradedMinimalFreeResolution"
"GradedModule"
"GradedRingData"
"GradedRingDatabase"
"GradedToricLattice"
"GradientVector"
"GradientVectors"
"Grading"
"Gradings"
"GramIsomorphismInvariants"
"GramLength"
"GramMatrix"
"GramReduction"
"GramSchmidtProcess"
"GramSchmidtReduce"
"GramSchmidtReduction"
"Graph"
"GraphAutomorphism"
"GraphInBytes"
"GraphSizeInBytes"
"Graphs"
"GrayMap"
"GrayMapImage"
"GreatestCommonDivisor"
"GreatestCommonLeftDivisor"
"GreatestCommonRightDivisor"
"GreatestLowerBound"
"GriesmerBound"
"GriesmerLengthBound"
"GriesmerMinimumWeightBound"
"Groebner"
"GroebnerBasis"
"GroebnerBasisUnreduced"
"GroebnerWalk"
"GrossenCheck"
"Grossencharacter"
"GroundField"
"Group"
"GroupAlgebra"
"GroupAlgebraAsStarAlgebra"
"GroupData"
"GroupGenerators"
"GroupIdeal"
"GroupOfLieType"
"GroupOfLieTypeFactoredOrder"
"GroupOfLieTypeHomomorphism"
"GroupOfLieTypeOrder"
"GroupType"
"Groupsp7"
"GrowthFunction"
"GrowthFunctionDFA"
"GrowthFunctionOld"
"GrpFPToCox"
"GrpPermToCox"
"GuessAltsymDegree"
"H2_G_A"
"H2_G_QmodZ"
"HBChevalleyGroupOrder"
"HBClassicalGroupOrder"
"HBinomial"
"HKZ"
"HKZGram"
"HadamardAutomorphismGroup"
"HadamardCanonicalForm"
"HadamardCodeZ4"
"HadamardColumnDesign"
"HadamardDatabase"
"HadamardDatabaseInformation"
"HadamardDatabaseInformationEmpty"
"HadamardEltseq"
"HadamardGraph"
"HadamardInvariant"
"HadamardMatrixFromInteger"
"HadamardMatrixToInteger"
"HadamardNormalize"
"HadamardRowDesign"
"HadamardTransformation"
"HalfIntegralWeightForms"
"HalfspaceToPolyhedron"
"HallSubgroup"
"HamiltonianLieAlgebra"
"HammingAsymptoticBound"
"HammingCode"
"HammingWeightEnumerator"
"HarmonicNumber"
"HasAdditionAlgorithm"
"HasAffinePatch"
"HasAllPQuotientsMetacyclic"
"HasAllRootsOnUnitCircle"
"HasAlmostUniqueLocalParametrization"
"HasAlmostUniqueLocalUniformizer"
"HasAssociatedNewSpace"
"HasAttribute"
"HasAutomorphisms"
"HasBSGS"
"HasBaseExtension"
"HasBaseExtensionMorphisms"
"HasBlockDiagMat"
"HasC6Decomposition"
"HasCM"
"HasClique"
"HasClosedCosetTable"
"HasCoercion"
"HasComplement"
"HasCompleteCosetTable"
"HasComplexConjugate"
"HasComplexMultiplication"
"HasComposition"
"HasCompositionSequence"
"HasCompositionTree"
"HasComputableAbelianQuotient"
"HasComputableLCS"
"HasComputableSubgroups"
"HasConic"
"HasCoordinates"
"HasDecomposition"
"HasDefinedModuleMap"
"HasDefinedTerm"
"HasDefiningMap"
"HasDenseAndSparseRep"
"HasDenseRep"
"HasDenseRepOnly"
"HasEasyIdeal"
"HasEchelonForm"
"HasElementaryBasis"
"HasEmbedding"
"HasExtension"
"HasFactorisation"
"HasFactorization"
"HasFiniteAQ"
"HasFiniteAbelianQuotient"
"HasFiniteDimension"
"HasFiniteKernel"
"HasFiniteOrder"
"HasFixedBaseObject"
"HasFrobeniusEndomorphism"
"HasFunctionField"
"HasGCD"
"HasGNB"
"HasGrevlexOrder"
"HasGroebnerBasis"
"HasHomogeneousBasis"
"HasIdentity"
"HasImage"
"HasInclusion"
"HasIndexOne"
"HasIndexOneEverywhereLocally"
"HasInfiniteComputableAbelianQuotient"
"HasInfinitePSL2Quotient"
"HasIntegralPoint"
"HasIntersectionProperty"
"HasIntersectionPropertyN"
"HasInverse"
"HasIrregularFibres"
"HasIsomorphismExtension"
"HasIsomorphismExtensions"
"HasIsomorphisms"
"HasIsotropicVector"
"HasKnownInverse"
"HasLeviSubalgebra"
"HasLine"
"HasLinearGrayMapImage"
"HasMatrix"
"HasMonomialBasis"
"HasMorphism"
"HasMorphismAutomorphism"
"HasMorphismAutomorphisms"
"HasMorphismFromImages"
"HasMorphismFromImagesAndBaseMorphism"
"HasMultiplicityOne"
"HasNegativeWeightCycle"
"HasNonSingularFibres"
"HasNonsingularPoint"
"HasOddDegreeModel"
"HasOne"
"HasOnlyOrdinarySingularities"
"HasOnlyOrdinarySingularitiesMonteCarlo"
"HasOrder"
"HasOutputFile"
"HasPRoot"
"HasParallelClass"
"HasParallelism"
"HasPlace"
"HasPoint"
"HasPointsEverywhereLocally"
"HasPointsOverExtension"
"HasPolynomial"
"HasPolynomialFactorization"
"HasPolynomialGroebnerBasis"
"HasPolynomialResultant"
"HasPowerSumBasis"
"HasPreimage"
"HasPreimageFunction"
"HasProjectiveDerivation"
"HasRandomPlace"
"HasRationalPoint"
"HasRationalPointUsingSubfield"
"HasRationalSolutions"
"HasReducedFibres"
"HasResolution"
"HasRestriction"
"HasResultant"
"HasRightCancellation"
"HasRoot"
"HasRootOfUnity"
"HasSchurBasis"
"HasSignature"
"HasSingularPointsOverExtension"
"HasSingularVector"
"HasSparseRep"
"HasSparseRepOnly"
"HasSquareSha"
"HasSupplement"
"HasTwistedHopfStructure"
"HasValidCosetTable"
"HasValidIndex"
"HasWeakIntersectionProperty"
"HasZeroDerivation"
"Hash"
"HasseMinkowskiInvariant"
"HasseMinkowskiInvariants"
"HasseWittInvariant"
"HeckeAlgebra"
"HeckeAlgebraFields"
"HeckeAlgebraZBasis"
"HeckeBound"
"HeckeCharacter"
"HeckeCharacterGroup"
"HeckeCharacteristicPolynomial"
"HeckeCorrespondence"
"HeckeEigenvalue"
"HeckeEigenvalueBound"
"HeckeEigenvalueField"
"HeckeEigenvalueRing"
"HeckeFieldSpan"
"HeckeImages"
"HeckeImagesAll"
"HeckeLift"
"HeckeMatrix"
"HeckeMatrixBianchi"
"HeckeOperator"
"HeckeOperatorModSym"
"HeckePolynomial"
"HeckeSpan"
"HeckeTrace"
"HeegnerDiscriminants"
"HeegnerForms"
"HeegnerIndex"
"HeegnerPoint"
"HeegnerPointNumberOfTerms"
"HeegnerPoints"
"HeegnerTorsionElement"
"Height"
"HeightConstant"
"HeightOnAmbient"
"HeightPairing"
"HeightPairingLattice"
"HeightPairingMatrix"
"HeightZeroSublattice"
"HeilbronnCremona"
"HeilbronnMerel"
"HenselLift"
"HenselProcess"
"HermiteConstant"
"HermiteForm"
"HermiteNormalFormProcess"
"HermiteNormalForms"
"HermiteNumber"
"HermitePolynomial"
"HermitianAutomorphismGroup"
"HermitianCartanMatrix"
"HermitianCode"
"HermitianCurve"
"HermitianDual"
"HermitianFunctionField"
"HermitianTranspose"
"HesseCovariants"
"HesseModel"
"HessePolynomials"
"HessenbergForm"
"Hessian"
"HessianMatrix"
"Hexacode"
"HighMap"
"HighProduct"
"HighRankExceptionalStdGens"
"HighestCoroot"
"HighestLongCoroot"
"HighestLongRoot"
"HighestRoot"
"HighestShortCoroot"
"HighestShortRoot"
"HighestWeightModule"
"HighestWeightRepresentation"
"HighestWeightSpace"
"HighestWeightVectors"
"HighestWeights"
"HighestWeightsAndVectors"
"Hilbert90"
"HilbertBasis"
"HilbertCharacterSubgroup"
"HilbertClassField"
"HilbertClassPolynomial"
"HilbertCoefficient"
"HilbertCoefficients"
"HilbertCuspForms"
"HilbertDeltaVector"
"HilbertDenominator"
"HilbertFunction"
"HilbertGroebnerBasis"
"HilbertIdeal"
"HilbertMatrix"
"HilbertNumerator"
"HilbertNumeratorBettiNumbers"
"HilbertPolynomial"
"HilbertPolynomialOfCurve"
"HilbertSeries"
"HilbertSeriesApproximation"
"HilbertSeriesMultipliedByMinimalDenominator"
"HilbertSpace"
"HilbertSymbol"
"HirschNumber"
"HirzebruchSurface"
"Holes"
"Holomorph"
"Hom"
"HomAdjoints"
"HomGenerators"
"HomogeneousBlock"
"HomogeneousComponent"
"HomogeneousComponents"
"HomogeneousModuleTest"
"HomogeneousModuleTestBasis"
"HomogeneousRadical"
"HomogeneousToElementaryMatrix"
"HomogeneousToMonomialMatrix"
"HomogeneousToPowerSumMatrix"
"HomogeneousToSchurMatrix"
"Homogenization"
"HomologicalDimension"
"Homology"
"HomologyBasis"
"HomologyData"
"HomologyGenerators"
"HomologyGroup"
"HomologyOfChainComplex"
"Homomorphism"
"Homomorphisms"
"HomomorphismsLM"
"HomomorphismsProcess"
"HookLength"
"HorizontalFunction"
"HorizontalJoin"
"HorizontalVertices"
"Hull"
"HyperbolicBasis"
"HyperbolicCoxeterGraph"
"HyperbolicCoxeterMatrix"
"HyperbolicPair"
"HyperbolicSplitting"
"Hypercenter"
"Hypercentre"
"HyperellipticCurve"
"HyperellipticCurveFromG2Invariants"
"HyperellipticCurveFromIgusaClebsch"
"HyperellipticCurveOfGenus"
"HyperellipticInfiniteIntegral0"
"HyperellipticIntegral"
"HyperellipticInvolution"
"HyperellipticPolynomial"
"HyperellipticPolynomials"
"HypergeometricSeries"
"HypergeometricSeries2F1"
"HypergeometricU"
"Hyperplane"
"HyperplaneAtInfinity"
"HyperplaneSectionDivisor"
"HyperplaneToPolyhedron"
"ISA"
"ISABaseField"
"Id"
"IdDataNLAC"
"IdDataSLAC"
"Ideal"
"IdealFactorisation"
"IdealOfSupport"
"IdealQuotient"
"IdealWithFixedBasis"
"Idealiser"
"Idealizer"
"Ideals"
"IdealsAreEqual"
"IdealsUpTo"
"Idempotent"
"IdempotentActionGenerators"
"IdempotentGenerators"
"IdempotentPositions"
"Idempotents"
"IdenticalAmbientSpace"
"IdentificationNumber"
"IdentifyAlmostSimpleGroup"
"IdentifyGroup"
"IdentifyOneCocycle"
"IdentifyTwoCocycle"
"IdentifyZeroCocycle"
"Identity"
"IdentityAutomorphism"
"IdentityFieldMorphism"
"IdentityHomomorphism"
"IdentityIsogeny"
"IdentityMap"
"IdentityMatrix"
"IdentityMorphism"
"IdentitySparseMatrix"
"IdentityTransformation"
"IgusaClebschInvariants"
"IgusaClebschToClebsch"
"IgusaInvariants"
"IgusaToG2Invariants"
"IharaBound"
"Ilog"
"Ilog2"
"Im"
"Image"
"ImageBasis"
"ImageFan"
"ImageFromMat"
"ImageFunction"
"ImageOfComponentGroupOfJ0N"
"ImageSystem"
"ImageWithBasis"
"Imaginary"
"ImplicitFunction"
"Implicitization"
"ImportExternalMorphism"
"ImprimitiveAction"
"ImprimitiveBasis"
"ImprimitiveReflectionGroup"
"ImprimitiveReflectionGroupOld"
"ImproveAutomorphismGroup"
"InDegree"
"InEdge"
"InNeighbors"
"InNeighbours"
"IncidenceDigraph"
"IncidenceGeometry"
"IncidenceGraph"
"IncidenceMatrix"
"IncidenceStructure"
"IncidentEdges"
"Include"
"IncludeAutomorphism"
"IncludeWeight"
"InclusionMap"
"Inclusions"
"IndCond"
"IndecomposableSummands"
"IndentPop"
"IndentPush"
"IndependenceNumber"
"IndependentGenerators"
"IndependentUnits"
"IndeterminacyLocus"
"Index"
"IndexCalculus"
"IndexCalculusMatrix"
"IndexFormEquation"
"IndexOfFirstWhiteSpace"
"IndexOfNonWhiteSpace"
"IndexOfPartition"
"IndexOfSpeciality"
"IndexToElement"
"IndexedCoset"
"IndexedSet"
"IndexedSetToSequence"
"IndexedSetToSet"
"Indicator"
"Indices"
"IndicialPolynomial"
"IndivisibleSubdatum"
"IndivisibleSubsystem"
"InduceWG"
"InduceWGtable"
"InducedAutomorphism"
"InducedDivisorMap"
"InducedDivisorMap_old"
"InducedGammaGroup"
"InducedMap"
"InducedMapOnHomology"
"InducedOneCocycle"
"InducedPermutation"
"Induction"
"InductionCondensation"
"InductionSpin"
"IneffectiveDivisorToSheaf"
"IneffectivePossibilities"
"IneffectiveRiemannRochBasis"
"IneffectiveSubcanonicalCurves"
"Inequalities"
"InertiaDegree"
"InertiaField"
"InertiaGroup"
"InertialElement"
"Infimum"
"InfiniteDivisor"
"InfiniteOrderTest"
"InfinitePart"
"InfinitePlaces"
"InfinitePolynomial"
"InfiniteSum"
"Infinity"
"InflationMap"
"InflationMapImage"
"InflectionPoints"
"InformationRate"
"InformationSet"
"InformationSpace"
"InitProspector"
"InitialCoefficients"
"InitialVertex"
"InitialiseProspector"
"Initialize"
"InitializeBase"
"InitializeEvaluation"
"InitializeGaussianQuadrature"
"Injection"
"Injections"
"InjectiveHull"
"InjectiveModule"
"InjectiveResolution"
"InjectiveSyzygyModule"
"InnerAutomorphism"
"InnerAutomorphismGroup"
"InnerFaces"
"InnerGenerators"
"InnerNormal"
"InnerNormals"
"InnerProduct"
"InnerProductMatrix"
"InnerShape"
"InnerSlopes"
"InnerTwistOperator"
"InnerTwists"
"InnerVertices"
"InseparableDegree"
"Insert"
"InsertBasePoint"
"InsertBlock"
"InsertVertex"
"InstallInverseConstructor"
"Instance"
"InstancesForDimensions"
"IntegerMatrixEntryBound"
"IntegerRelation"
"IntegerRing"
"IntegerSolutionVariables"
"IntegerToSequence"
"IntegerToString"
"Integers"
"Integral"
"IntegralBasis"
"IntegralBasisLattice"
"IntegralBasisMinus"
"IntegralBasisPlus"
"IntegralClosure"
"IntegralDecomposition"
"IntegralGramMatrix"
"IntegralGroup"
"IntegralHeckeOperator"
"IntegralHomology"
"IntegralMapping"
"IntegralMatrix"
"IntegralMatrixByRows"
"IntegralMatrixGroupDatabase"
"IntegralMatrixOverQ"
"IntegralModel"
"IntegralModule"
"IntegralMultiple"
"IntegralNormEquation"
"IntegralPart"
"IntegralPoints"
"IntegralQuarticPoints"
"IntegralRepresentation"
"IntegralSplit"
"IntegralUEA"
"IntegralUEAlgebra"
"IntegralUniversalEnvelopingAlgebra"
"IntegralVector"
"Interior"
"InteriorPoints"
"Interpolation"
"IntersectKernels"
"Intersection"
"IntersectionArray"
"IntersectionCardinality"
"IntersectionForm"
"IntersectionForms"
"IntersectionGroup"
"IntersectionMatrix"
"IntersectionNumber"
"IntersectionOfImages"
"IntersectionPairing"
"IntersectionPairingIntegral"
"IntersectionPoints"
"IntersectionWithNormalSubgroup"
"IntersectionZBasis"
"Intseq"
"InvHom"
"InvariantBasis"
"InvariantFactors"
"InvariantField"
"InvariantForm"
"InvariantForms"
"InvariantHermitianForms"
"InvariantModule"
"InvariantQuaternionicForms"
"InvariantRepresentation"
"InvariantRing"
"Invariants"
"InvariantsMetacyclicPGroup"
"InvariantsOfDegree"
"Inverse"
"InverseDefiningPolynomials"
"InverseErf"
"InverseJeuDeTaquin"
"InverseKrawchouk"
"InverseMattsonSolomonTransform"
"InverseMod"
"InverseRSKCorrespondenceDoubleWord"
"InverseRSKCorrespondenceMatrix"
"InverseRSKCorrespondenceSingleWord"
"InverseRoot"
"InverseRowInsert"
"InverseSqrt"
"InverseSquareRoot"
"InverseTransformation"
"InverseWordMap"
"Involution"
"InvolutionClassicalGroupEven"
"Iroot"
"IrrationalPart"
"IrreducibleCartanMatrix"
"IrreducibleComponents"
"IrreducibleCoxeterGraph"
"IrreducibleCoxeterGroup"
"IrreducibleCoxeterMatrix"
"IrreducibleDynkinDigraph"
"IrreducibleFiniteStandardParabolicSubgroups"
"IrreducibleLowTermGF2Polynomial"
"IrreducibleMatrix"
"IrreducibleMatrixGroup"
"IrreducibleModule"
"IrreducibleModules"
"IrreducibleModulesBurnside"
"IrreducibleModulesInit"
"IrreducibleModulesSchur"
"IrreduciblePolynomial"
"IrreducibleReflectionGroup"
"IrreducibleRepresentationsInit"
"IrreducibleRepresentationsSchur"
"IrreducibleRootDatum"
"IrreducibleRootSystem"
"IrreducibleSecondaryInvariants"
"IrreducibleSimpleSubalgebraTreeSU"
"IrreducibleSimpleSubalgebrasOfSU"
"IrreducibleSolubleSubgroups"
"IrreducibleSparseGF2Polynomial"
"IrreducibleSubgroups"
"IrreducibleTrinomialsDatabase"
"IrreducibleWord"
"IrregularLDPCEnsemble"
"IrregularValues"
"IrregularVertices"
"Irregularity"
"IrrelevantComponents"
"IrrelevantGenerators"
"IrrelevantIdeal"
"Is2T1"
"IsAModule"
"IsAPN"
"IsAbelian"
"IsAbelianByFinite"
"IsAbelianVariety"
"IsAbsoluteField"
"IsAbsoluteOrder"
"IsAbsolutelyIrreducible"
"IsAbstractCartanMatrix"
"IsAcceptedWordDFA"
"IsAdditive"
"IsAdditiveOrder"
"IsAdditiveProjective"
"IsAdjoint"
"IsAffine"
"IsAffineLinear"
"IsAlgebraic"
"IsAlgebraicDifferentialField"
"IsAlgebraicField"
"IsAlgebraicGeometric"
"IsAlgebraicallyDependent"
"IsAlgebraicallyIsomorphic"
"IsAlmostIntegral"
"IsAlternating"
"IsAltsym"
"IsAmbient"
"IsAmbientSpace"
"IsAmple"
"IsAnalyticallyIrreducible"
"IsAnisotropic"
"IsAnticanonical"
"IsAntisymmetric"
"IsArc"
"IsArithmeticWeight"
"IsArithmeticallyCohenMacaulay"
"IsAssociative"
"IsAttachedToModularSymbols"
"IsAttachedToNewform"
"IsAutomatic"
"IsAutomaticGroup"
"IsAutomorphism"
"IsBalanced"
"IsBase64Encoded"
"IsBasePointFree"
"IsBiconnected"
"IsBig"
"IsBijective"
"IsBipartite"
"IsBlock"
"IsBlockTransitive"
"IsBogomolovUnstable"
"IsBoundary"
"IsBravaisEquivalent"
"IsCM"
"IsCalabiYauNumericalSeries"
"IsCanonical"
"IsCanonicalWithTwist"
"IsCapacitated"
"IsCartanEquivalent"
"IsCartanMatrix"
"IsCartanSubalgebra"
"IsCartier"
"IsCategory"
"IsCentral"
"IsCentralByFinite"
"IsCentralCollineation"
"IsChainMap"
"IsCharacter"
"IsChevalleyBasis"
"IsClassicalType"
"IsCluster"
"IsCoercible"
"IsCoercibleGrpLie"
"IsCohenMacaulay"
"IsCokernelTorsionFree"
"IsCollinear"
"IsCommutative"
"IsCompactHyperbolic"
"IsCompatible"
"IsComplete"
"IsCompletelyReducible"
"IsComplex"
"IsComponent"
"IsConcurrent"
"IsConditioned"
"IsConfluent"
"IsCongruence"
"IsCongruent"
"IsConic"
"IsConjugate"
"IsConjugateSubgroup"
"IsConnected"
"IsConnectedFibre"
"IsConsistent"
"IsConstaCyclic"
"IsConstant"
"IsConstantCurve"
"IsConway"
"IsCoprime"
"IsCorootSpace"
"IsCoxeterAffine"
"IsCoxeterCompactHyperbolic"
"IsCoxeterFinite"
"IsCoxeterGraph"
"IsCoxeterHyperbolic"
"IsCoxeterIrreducible"
"IsCoxeterIsomorphic"
"IsCoxeterMatrix"
"IsCrystallographic"
"IsCubeHeuristically"
"IsCubicModel"
"IsCurve"
"IsCusp"
"IsCuspidal"
"IsCuspidalNewform"
"IsCyclic"
"IsCyclotomic"
"IsCyclotomicPolynomial"
"IsDecomposable"
"IsDefault"
"IsDeficient"
"IsDefined"
"IsDefinedByQuadric"
"IsDefinedByQuadrics"
"IsDefinite"
"IsDegenerate"
"IsDelPezzo"
"IsDenselyRepresented"
"IsDesarguesian"
"IsDesign"
"IsDiagonal"
"IsDifferenceSet"
"IsDifferentialField"
"IsDifferentialIdeal"
"IsDifferentialLaurentSeriesRing"
"IsDifferentialOperatorRing"
"IsDifferentialRing"
"IsDifferentialRingElement"
"IsDifferentialSeriesRing"
"IsDirectSum"
"IsDirectSummand"
"IsDirected"
"IsDiscriminant"
"IsDisjoint"
"IsDistanceRegular"
"IsDistanceTransitive"
"IsDivisible"
"IsDivisibleBy"
"IsDivisionAlgebra"
"IsDivisionRing"
"IsDivisorialContraction"
"IsDomain"
"IsDominant"
"IsDoublePoint"
"IsDoublyEven"
"IsDualComputable"
"IsDynkinDigraph"
"IsEdgeCapacitated"
"IsEdgeLabelled"
"IsEdgeTransitive"
"IsEdgeWeighted"
"IsEffective"
"IsEichler"
"IsEigenform"
"IsEisenstein"
"IsEisensteinSeries"
"IsElementaryAbelian"
"IsEllipticCurve"
"IsEllipticWeierstrass"
"IsEmbedded"
"IsEmpty"
"IsEmptySimpleQuotientProcess"
"IsEmptyWord"
"IsEndomorphism"
"IsEof"
"IsEqual"
"IsEquationOrder"
"IsEquidistant"
"IsEquitable"
"IsEquivalent"
"IsEuclideanDomain"
"IsEuclideanRing"
"IsEulerian"
"IsEven"
"IsExact"
"IsExactlyDivisible"
"IsExceptionalUnit"
"IsExport"
"IsExtensionCategory"
"IsExtensionOf"
"IsExtraSpecial"
"IsExtraSpecialNormaliser"
"IsFTGeometry"
"IsFace"
"IsFactorial"
"IsFactorisationPrime"
"IsFaithful"
"IsFakeWeightedProjectiveSpace"
"IsFanMap"
"IsFano"
"IsField"
"IsFieldCategory"
"IsFinite"
"IsFiniteOrder"
"IsFirm"
"IsFixedAtLevel"
"IsFlag"
"IsFlex"
"IsFlexFast"
"IsFlipping"
"IsForest"
"IsFree"
"IsFrobenius"
"IsFuchsianOperator"
"IsFull"
"IsFunctionFieldCategory"
"IsFunctor"
"IsFundamental"
"IsFundamentalDiscriminant"
"IsGE"
"IsGHom"
"IsGL2Equivalent"
"IsGLConjugate"
"IsGLConjugateBigClassical"
"IsGLConjugateClassical"
"IsGLConjugateExtraspecial"
"IsGLConjugateImprimitive"
"IsGLConjugateReducible"
"IsGLConjugateSemilinear"
"IsGLConjugateSubfield"
"IsGLConjugateTensor"
"IsGLConjugateTensorInduced"
"IsGLQConjugate"
"IsGLZConjugate"
"IsGLattice"
"IsGamma"
"IsGamma0"
"IsGamma1"
"IsGammaUpper0"
"IsGammaUpper1"
"IsGe"
"IsGeneralizedCartanMatrix"
"IsGeneralizedCharacter"
"IsGenuineWeightedDynkinDiagram"
"IsGenus"
"IsGenusComputable"
"IsGenusOneModel"
"IsGeometricallyHyperelliptic"
"IsGerm"
"IsGlobal"
"IsGlobalUnit"
"IsGlobalUnitWithPreimage"
"IsGloballySplit"
"IsGorenstein"
"IsGorensteinSurface"
"IsGraded"
"IsGraph"
"IsGroebner"
"IsHadamard"
"IsHadamardEquivalent"
"IsHadamardEquivalentLeon"
"IsHeckeAlgebra"
"IsHeckeOperator"
"IsHereditary"
"IsHilbertNumerator"
"IsHolzerReduced"
"IsHomeomorphic"
"IsHomogeneous"
"IsHomomorphism"
"IsHyperbolic"
"IsHyperelliptic"
"IsHyperellipticCurve"
"IsHyperellipticCurveOfGenus"
"IsHyperellipticWeierstrass"
"IsHyperplane"
"IsHypersurface"
"IsHypersurfaceDivisor"
"IsId"
"IsIdeal"
"IsIdempotent"
"IsIdentical"
"IsIdenticalPresentation"
"IsIdentity"
"IsIdentityProduct"
"IsInArtinSchreierRepresentation"
"IsInBasicOrbit"
"IsInCorootSpace"
"IsInDual"
"IsInImage"
"IsInInterior"
"IsInKummerRepresentation"
"IsInRadical"
"IsInRootSpace"
"IsInSecantVariety"
"IsInSmallGroupDatabase"
"IsInSmallModularCurveDatabase"
"IsInSupport"
"IsInTangentVariety"
"IsInTwistedForm"
"IsIndecomposable"
"IsIndefinite"
"IsIndependent"
"IsIndivisibleRoot"
"IsInduced"
"IsInert"
"IsInertial"
"IsInfinite"
"IsInflectionPoint"
"IsInjective"
"IsInner"
"IsInnerAutomorphism"
"IsInt"
"IsInteger"
"IsIntegral"
"IsIntegralDomain"
"IsIntegralModel"
"IsIntegrallyClosed"
"IsInterior"
"IsIntrinsic"
"IsInvariant"
"IsInvertible"
"IsIrreducible"
"IsIrreducibleFiniteNilpotent"
"IsIrregularSingularPlace"
"IsIsogenous"
"IsIsogenousPeriodMatrices"
"IsIsogeny"
"IsIsolated"
"IsIsometric"
"IsIsometry"
"IsIsomorphic"
"IsIsomorphicBigPeriodMatrices"
"IsIsomorphicCubicSurface"
"IsIsomorphicFF"
"IsIsomorphicOverBase"
"IsIsomorphicOverQt"
"IsIsomorphicPGroups"
"IsIsomorphicSmallPeriodMatrices"
"IsIsomorphicWithTwist"
"IsIsomorphism"
"IsJacobianPencil"
"IsKEdgeConnected"
"IsKVertexConnected"
"IsKnownIsomorphic"
"IsKnuthEquivalent"
"IsLDPC"
"IsLE"
"IsLabelled"
"IsLabelledEdge"
"IsLabelledVertex"
"IsLargeReeGroup"
"IsLaurent"
"IsLe"
"IsLeaf"
"IsLeftIdeal"
"IsLeftIsomorphic"
"IsLeftModule"
"IsLehmerCode"
"IsLexicographicallyOrdered"
"IsLie"
"IsLineRegular"
"IsLineTransitive"
"IsLinear"
"IsLinearGroup"
"IsLinearScheme"
"IsLinearSpace"
"IsLinearSystemNonEmpty"
"IsLinearlyDependent"
"IsLinearlyEquivalent"
"IsLinearlyEquivalentToCartier"
"IsLinearlyIndependent"
"IsLittleWoodRichardson"
"IsLocal"
"IsLocalNorm"
"IsLocallyFree"
"IsLocallySoluble"
"IsLocallySolvable"
"IsLocallyTwoTransitive"
"IsLongRoot"
"IsLowerTriangular"
"IsMDS"
"IsMagmaEuclideanRing"
"IsMatrixRing"
"IsMaximal"
"IsMaximalAtRamifiedPrimes"
"IsMaximisingFunction"
"IsMaximumDimensional"
"IsMaximumDistanceSeparable"
"IsMemberBasicOrbit"
"IsMetacyclicPGroup"
"IsMinimal"
"IsMinimalModel"
"IsMinimalTwist"
"IsMinusOne"
"IsMinusQuotient"
"IsMixed"
"IsMobile"
"IsModular"
"IsModularCurve"
"IsModuleHomomorphism"
"IsMonic"
"IsMonomial"
"IsMonomialIsomorphic"
"IsMonomialRepresentation"
"IsMoriFibreSpace"
"IsMorphism"
"IsMorphismCategory"
"IsMultiChar"
"IsNarrowlyPrincipal"
"IsNearLinearSpace"
"IsNearlyPerfect"
"IsNeat"
"IsNef"
"IsNefAndBig"
"IsNegative"
"IsNegativeDefinite"
"IsNegativeSemiDefinite"
"IsNew"
"IsNewform"
"IsNewtonPolygonOf"
"IsNilpotent"
"IsNilpotentByFinite"
"IsNodalCurve"
"IsNode"
"IsNonSingular"
"IsNonsingular"
"IsNorm"
"IsNormal"
"IsNormalised"
"IsNormalising"
"IsNormalized"
"IsNull"
"IsNullHomotopy"
"IsNumberField"
"IsObject"
"IsOdd"
"IsOddDegree"
"IsOnBoundary"
"IsOne"
"IsOneCoboundary"
"IsOneCocycle"
"IsOnlyMotivic"
"IsOptimal"
"IsOrbit"
"IsOrder"
"IsOrderTerm"
"IsOrdered"
"IsOrdinary"
"IsOrdinaryProjective"
"IsOrdinarySingularity"
"IsOrthogonalGroup"
"IsOuter"
"IsOverQ"
"IsOverSmallerField"
"IsPID"
"IsPIR"
"IsPRI"
"IsPSaturated"
"IsParabolic"
"IsParallel"
"IsParallelClass"
"IsParallelWeight"
"IsParallelism"
"IsPartialRoot"
"IsPartition"
"IsPartitionRefined"
"IsPath"
"IsPerfect"
"IsPerfectlyCentered"
"IsPermutationModule"
"IsPlanar"
"IsPlaneCurve"
"IsPlusQuotient"
"IsPoint"
"IsPointRegular"
"IsPointTransitive"
"IsPointed"
"IsPolycyclic"
"IsPolycyclicByFinite"
"IsPolygon"
"IsPolynomial"
"IsPolytope"
"IsPositive"
"IsPositiveDefinite"
"IsPositiveSemiDefinite"
"IsPower"
"IsPowerOf"
"IsPrimary"
"IsPrime"
"IsPrimeCertificate"
"IsPrimeField"
"IsPrimePower"
"IsPrimitive"
"IsPrimitiveFiniteNilpotent"
"IsPrincipal"
"IsPrincipalIdealDomain"
"IsPrincipalIdealRing"
"IsPrincipalSeries"
"IsProbablePrime"
"IsProbablyMaximal"
"IsProbablyPerfect"
"IsProbablyPermutationPolynomial"
"IsProbablyPrime"
"IsProbablySupersingular"
"IsProductOfParallelDescendingCycles"
"IsProjective"
"IsProjectivelyIrreducible"
"IsProper"
"IsProperChainMap"
"IsProportional"
"IsPseudoReflection"
"IsPure"
"IsPureOrder"
"IsPureQuantumCode"
"IsPyramid"
"IsQCartier"
"IsQFactorial"
"IsQGorenstein"
"IsQPrincipal"
"IsQuadratic"
"IsQuadraticSpace"
"IsQuadraticTwist"
"IsQuadricIntersection"
"IsQuasiCyclic"
"IsQuasiSimpleTwistedCyclic"
"IsQuasiTwistedCyclic"
"IsQuasisplit"
"IsQuaternionAlgebra"
"IsQuaternionic"
"IsQuotient"
"IsRC"
"IsRPRI"
"IsRWP"
"IsRWPRI"
"IsRadical"
"IsRamified"
"IsRational"
"IsRationalCurve"
"IsRationalFunctionField"
"IsRationalPoint"
"IsRawCurve"
"IsReal"
"IsRealReflectionGroup"
"IsRealisableOverSmallerField"
"IsRealisableOverSubfield"
"IsReduced"
"IsReductive"
"IsReeGroup"
"IsReflection"
"IsReflectionGroup"
"IsReflectionSubgroup"
"IsReflexive"
"IsRegular"
"IsRegularLDPC"
"IsRegularPlace"
"IsRegularSingularOperator"
"IsRegularSingularPlace"
"IsRepresentation"
"IsResiduallyConnected"
"IsResiduallyPrimitive"
"IsResiduallyWeaklyPrimitive"
"IsResolution"
"IsRestrictable"
"IsRestricted"
"IsRestrictedLieAlgebra"
"IsRestrictedSubalgebra"
"IsReverseLatticeWord"
"IsRightIdeal"
"IsRightIsomorphic"
"IsRightModule"
"IsRing"
"IsRingHomomorphism"
"IsRingOfAllModularForms"
"IsRoot"
"IsRootOfUnity"
"IsRootSpace"
"IsRootedTree"
"IsSIntegral"
"IsSPrincipal"
"IsSUnit"
"IsSUnitWithPreimage"
"IsSatisfied"
"IsSaturated"
"IsScalar"
"IsScalarGroup"
"IsSelfDual"
"IsSelfNormalising"
"IsSelfNormalizing"
"IsSelfOrthogonal"
"IsSemiLinear"
"IsSemiregular"
"IsSemisimple"
"IsSeparable"
"IsSeparating"
"IsServerSocket"
"IsSharplyTransitive"
"IsShortExactSequence"
"IsShortRoot"
"IsSimilar"
"IsSimple"
"IsSimpleOrder"
"IsSimpleStarAlgebra"
"IsSimplex"
"IsSimplicial"
"IsSimplifiedModel"
"IsSimplyConnected"
"IsSimplyLaced"
"IsSinglePrecision"
"IsSingular"
"IsSkew"
"IsSmooth"
"IsSmoothHyperSurface"
"IsSoluble"
"IsSolubleByFinite"
"IsSolvable"
"IsSpecial"
"IsSpinorGenus"
"IsSpinorNorm"
"IsSplit"
"IsSplitAsIdealAt"
"IsSplitToralSubalgebra"
"IsSplittingCartanSubalgebra"
"IsSplittingField"
"IsSquare"
"IsSquarefree"
"IsStandard"
"IsStandardAffinePatch"
"IsStandardGF"
"IsStandardParabolicSubgroup"
"IsStarAlgebra"
"IsSteiner"
"IsStratum"
"IsStrictlyConvex"
"IsStrictlyNef"
"IsStronglyAG"
"IsStronglyConnected"
"IsSubcanonicalCurve"
"IsSubfield"
"IsSubgraph"
"IsSubgroup"
"IsSublattice"
"IsSubmodule"
"IsSubnormal"
"IsSubscheme"
"IsSubsequence"
"IsSuitableQuaternionOrder"
"IsSuperSummitRepresentative"
"IsSupercuspidal"
"IsSuperlattice"
"IsSupersingular"
"IsSupersoluble"
"IsSupportingHyperplane"
"IsSurjective"
"IsSuzukiGroup"
"IsSymmetric"
"IsSymplecticGroup"
"IsSymplecticMatrix"
"IsSymplecticSelfDual"
"IsSymplecticSelfOrthogonal"
"IsSymplecticSpace"
"IsTIrreducible"
"IsTSelfdual"
"IsTamelyRamified"
"IsTangent"
"IsTensor"
"IsTensorInduced"
"IsTerminal"
"IsTerminalThreefold"
"IsThick"
"IsThin"
"IsToralSubalgebra"
"IsTorsionUnit"
"IsTorusInvariant"
"IsTotallyComplex"
"IsTotallyEven"
"IsTotallyPositive"
"IsTotallyRamified"
"IsTotallyReal"
"IsTotallySingular"
"IsTotallySplit"
"IsTransformation"
"IsTransitive"
"IsTransvection"
"IsTransverse"
"IsTree"
"IsTriangleGroup"
"IsTriangulable"
"IsTriconnected"
"IsTrivial"
"IsTrivialOnUnits"
"IsTwist"
"IsTwisted"
"IsTwoCoboundary"
"IsTwoSidedIdeal"
"IsUFD"
"IsUltraSummitRepresentative"
"IsUndirected"
"IsUniform"
"IsUnipotent"
"IsUniqueFactorisationDomain"
"IsUniqueFactorizationDomain"
"IsUniquePartialRoot"
"IsUnit"
"IsUnitWithPreimage"
"IsUnital"
"IsUnitary"
"IsUnitaryGroup"
"IsUnitaryRepresentation"
"IsUnitarySpace"
"IsUnivariate"
"IsUnramified"
"IsUpperTriangular"
"IsValid"
"IsValidLargeReeOrder"
"IsVerbose"
"IsVertex"
"IsVertexLabelled"
"IsVertexTransitive"
"IsWGsymmetric"
"IsWP"
"IsWPRI"
"IsWeaklyAG"
"IsWeaklyAGDual"
"IsWeaklyAdjoint"
"IsWeaklyConnected"
"IsWeaklyEqual"
"IsWeaklyMonic"
"IsWeaklyPrimitive"
"IsWeaklySimplyConnected"
"IsWeaklyZero"
"IsWeierstrassModel"
"IsWeierstrassPlace"
"IsWeight"
"IsWeightVector"
"IsWeighted"
"IsWeightedProjectiveSpace"
"IsWeil"
"IsWildlyRamified"
"IsWindows"
"IsWreathProduct"
"IsZero"
"IsZeroAt"
"IsZeroComplex"
"IsZeroDimensional"
"IsZeroDivisor"
"IsZeroMap"
"IsZeroTerm"
"Isetseq"
"Isetset"
"Iso"
"IsogeniesAreEqual"
"IsogenousCurves"
"Isogeny"
"IsogenyFromKernel"
"IsogenyFromKernelFactored"
"IsogenyGroup"
"IsogenyMapOmega"
"IsogenyMapPhi"
"IsogenyMapPhiMulti"
"IsogenyMapPsi"
"IsogenyMapPsiMulti"
"IsogenyMapPsiSquared"
"IsogenyMu"
"IsolGroup"
"IsolGroupDatabase"
"IsolGroupOfDegreeFieldSatisfying"
"IsolGroupOfDegreeSatisfying"
"IsolGroupSatisfying"
"IsolGroupsOfDegreeFieldSatisfying"
"IsolGroupsOfDegreeSatisfying"
"IsolGroupsSatisfying"
"IsolGuardian"
"IsolInfo"
"IsolIsPrimitive"
"IsolMinBlockSize"
"IsolNumberOfDegreeField"
"IsolOrder"
"IsolProcess"
"IsolProcessGroup"
"IsolProcessInfo"
"IsolProcessIsEmpty"
"IsolProcessLabel"
"IsolProcessNext"
"IsolProcessOfDegree"
"IsolProcessOfDegreeField"
"IsolProcessOfField"
"IsolateRoots"
"IsolatedGorensteinSingularitiesOfIndex"
"IsolatedPointsFinder"
"IsolatedPointsLiftToMinimalPolynomials"
"IsolatedPointsLifter"
"IsometricCircle"
"IsometryGroup"
"IsomorphicCopy"
"IsomorphicMatrixLieAlgebra"
"IsomorphicProjectionToSubspace"
"IsomorphicSubmodules"
"Isomorphism"
"IsomorphismData"
"IsomorphismExtension"
"IsomorphismExtensions"
"IsomorphismToIsogeny"
"IsomorphismToStandardCopy"
"IsomorphismToStandardSCDtm"
"Isomorphisms"
"IsomorphismsOverBase"
"IsotropicSubspace"
"IsotropicVector"
"IspGroup"
"IspIntegral"
"IspLieAlgebra"
"IspMaximal"
"IspMinimal"
"IspNormal"
"IspSubalgebra"
"Isqrt"
"Itest"
"JBessel"
"JH"
"JInvariants"
"JOne"
"JZero"
"Jacobi"
"JacobiSymbol"
"JacobiTheta"
"JacobiThetaNullK"
"Jacobian"
"JacobianIdeal"
"JacobianMatrix"
"JacobianOrdersByDeformation"
"JacobianPoint"
"JacobianSequence"
"JacobianSubrankScheme"
"JacobsonRadical"
"JacobsonRadicalAlgBas"
"JacobsonRadicalOverFiniteField"
"JellyfishConstruction"
"JellyfishImage"
"JellyfishPreimage"
"JenningsLieAlgebra"
"JenningsSeries"
"JeuDeTaquin"
"JohnsonBound"
"Join"
"JoinDFA"
"JordanBlock"
"JordanDecomposition"
"JordanForm"
"Js"
"JustesenCode"
"Juxtaposition"
"K3Baskets"
"K3Copy"
"K3Database"
"K3Surface"
"K3SurfaceFromRawData"
"K3SurfaceRawData"
"K3SurfaceToRecord"
"K3SurfaceWithCodimension"
"KBessel"
"KBessel2"
"KBinomial"
"KCubeGraph"
"KDegree"
"KLPolynomial"
"KMatrixSpace"
"KMatrixSpaceWithBasis"
"KModule"
"KModuleWithBasis"
"KSpace"
"KSpaceWithBasis"
"KacMoodyClass"
"KacMoodyClasses"
"KappaLattice"
"KaratsubaMultiplication"
"KeepAbelian"
"KeepElementary"
"KeepElementaryAbelian"
"KeepGeneratorAction"
"KeepGeneratorOrder"
"KeepGroupAction"
"KeepPGroupWeights"
"KeepPrimePower"
"KeepSplit"
"KeepSplitAbelian"
"KeepSplitElementaryAbelian"
"KerdockCode"
"Kernel"
"KernelBasis"
"KernelEmbedding"
"KernelMatrix"
"KernelOrder"
"KernelZ2CodeZ4"
"Kernels"
"Keys"
"KillingDifferentialModp"
"KillingForm"
"KillingMatrix"
"KissingNumber"
"KleinBottle"
"KleinQuartic"
"KnapsackSolutions"
"Knot"
"KnownAutomorphismSubgroup"
"KnownCoefficient"
"KnownFactors"
"KnownFactorsAndCoefficient"
"KnownIrreducibles"
"KodairaSymbol"
"KodairaSymbols"
"KostkaNumber"
"KrawchoukPolynomial"
"KrawchoukTransform"
"KroneckerCharacter"
"KroneckerDelta"
"KroneckerProduct"
"KroneckerSymbol"
"KummerSurface"
"KummerSurfacePointRaw"
"KummerSurfaceRaw"
"LCLM"
"LCM"
"LCT"
"LCfRequired"
"LDPCBinarySymmetricThreshold"
"LDPCCode"
"LDPCDecode"
"LDPCDensity"
"LDPCEnsembleRate"
"LDPCGaussianThreshold"
"LDPCGirth"
"LDPCMatrix"
"LDPCSimulate"
"LFSRSequence"
"LFSRStep"
"LFunction"
"LGetCoefficients"
"LHS"
"LLL"
"LLLBasis"
"LLLBasisMatrix"
"LLLBlock"
"LLLGram"
"LLLGramMatrix"
"LLLReducedModel"
"LMGCenter"
"LMGCentre"
"LMGChiefFactors"
"LMGChiefSeries"
"LMGCommutatorSubgroup"
"LMGCompositionFactors"
"LMGCompositionSeries"
"LMGDerivedGroup"
"LMGEqual"
"LMGFactoredOrder"
"LMGFittingSubgroup"
"LMGIndex"
"LMGIsIn"
"LMGIsNilpotent"
"LMGIsNormal"
"LMGIsSoluble"
"LMGIsSolvable"
"LMGIsSubgroup"
"LMGNormalClosure"
"LMGOrder"
"LMGSocleStar"
"LMGSocleStarAction"
"LMGSocleStarActionKernel"
"LMGSocleStarFactors"
"LMGSocleStarQuotient"
"LMGSolubleRadical"
"LMGSolvableRadical"
"LMGSylow"
"LMGUnipotentRadical"
"LPProcess"
"LPolynomial"
"LProduct"
"LRatio"
"LRatioOddPart"
"LSeries"
"LSeriesData"
"LSeriesLeadingCoefficient"
"LSetCoefficients"
"LSetPrecision"
"LStar"
"LTaylor"
"LUB"
"Label"
"LabelToMatrixInternal"
"Labelling"
"Labels"
"LaguerrePolynomial"
"LaminatedLattice"
"Lang"
"LanguageCountInternal"
"LanguageDFA"
"Laplace"
"LargeRee"
"LargeReeBNpair"
"LargeReeConjugacy"
"LargeReeDiagonalisation"
"LargeReeElementToWord"
"LargeReeGeneralRecogniser"
"LargeReeGroup"
"LargeReeInvolutionCentraliser"
"LargeReeInvolutionClass"
"LargeReeIrreducibleRepresentation"
"LargeReeMaximalSubgroups"
"LargeReeRecognition"
"LargeReeReduction"
"LargeReeRedundantSLPGenerators"
"LargeReeResetRandomProcess"
"LargeReeSLPCoercion"
"LargeReeStandardConstructiveMembership"
"LargeReeStandardMaximalSubgroups"
"LargeReeStandardMembership"
"LargeReeStandardRecogniser"
"LargeReeSylow"
"LargeReeSzInvolution"
"LargestConductor"
"LargestDimension"
"LargestOrder"
"LastColumnEntry"
"LastIndexOfRow"
"Lattice"
"LatticeBasisInCone"
"LatticeBasisMatrix"
"LatticeCoordinates"
"LatticeData"
"LatticeDatabase"
"LatticeElementToMonomial"
"LatticeMap"
"LatticeMinkowskiDecomposition"
"LatticeName"
"LatticeToZGram"
"LatticeVector"
"LatticeVectorsInBox"
"LatticeWithBasis"
"LatticeWithGram"
"LaurentSeriesAlgebra"
"LaurentSeriesRing"
"LayerBoundary"
"LayerLength"
"LazyPowerSeriesRing"
"LazySeries"
"Lcm"
"LeadingCoefficient"
"LeadingExponent"
"LeadingGenerator"
"LeadingMonomial"
"LeadingMonomialIdeal"
"LeadingTerm"
"LeadingTotalDegree"
"LeadingWeightedDegree"
"LeastCommonLeftMultiple"
"LeastCommonMultiple"
"LeastUpperBound"
"LeeBrickellsAttack"
"LeeDistance"
"LeeDistance1"
"LeeWeight"
"LeeWeight1"
"LeeWeightDistribution"
"LeeWeightEnumerator"
"LeftAnnihilator"
"LeftComplex"
"LeftConjugate"
"LeftCosetSpace"
"LeftDescentSet"
"LeftDiv"
"LeftExactExtension"
"LeftGCD"
"LeftGcd"
"LeftGreatestCommonDivisor"
"LeftIdeal"
"LeftIdealClasses"
"LeftInverse"
"LeftInverseMorphism"
"LeftIsomorphism"
"LeftLCM"
"LeftLcm"
"LeftLeastCommonMultiple"
"LeftMixedCanonicalForm"
"LeftNormalForm"
"LeftOrder"
"LeftRepresentationMatrix"
"LeftString"
"LeftStringLength"
"LeftZeroExtension"
"LegendreEquation"
"LegendreModel"
"LegendrePolynomial"
"LegendreSymbol"
"LehmerCode"
"LehmerCodeToPerm"
"Length"
"LengthenCode"
"Lengths"
"LensSpace"
"LeonsAttack"
"LetterCreate"
"LetterDelete"
"LetterPreImage"
"LetterPrint"
"LetterVarAlgebra"
"LetterVarCalc"
"LetterVarCheck"
"LetterVarCocycles"
"LetterVarConsistency"
"LetterVarConsistencyProc"
"LetterVarCreate"
"LetterVarDelete"
"LetterVarEquations"
"LetterVarFpRelsProc"
"LetterVarGroup"
"LetterVarPreImage"
"LetterVarPrint"
"Level"
"Levels"
"LevenshteinBound"
"LexProduct"
"LexicographicalOrdering"
"LiEMaximalSubgroups"
"LiERootDatum"
"LiESymmetricCharacterValue"
"LibFileOpen"
"LieAlgebra"
"LieAlgebraHomomorphism"
"LieAlgebraOfDerivations"
"LieBracket"
"LieCharacteristic"
"LieConstant_C"
"LieConstant_M"
"LieConstant_N"
"LieConstant_epsilon"
"LieConstant_eta"
"LieConstant_p"
"LieConstant_q"
"LieRepresentationDecomposition"
"LieType"
"Lift"
"LiftCharacter"
"LiftCharacters"
"LiftCocycle"
"LiftDescendant"
"LiftHomomorphism"
"LiftHomomorphismGroupP"
"LiftIsogeny"
"LiftIsomorphism"
"LiftMap"
"LiftModule"
"LiftModules"
"LiftNonsplitExtension"
"LiftNonsplitExtensionRow"
"LiftPoint"
"LiftSplitExtension"
"LiftSplitExtensionRow"
"LiftToChainmap"
"Line"
"LineAtInfinity"
"LineGraph"
"LineGroup"
"LineOrbits"
"LineSet"
"LinearCharacters"
"LinearCode"
"LinearCombinationOfEigenformsOverC"
"LinearConeGenerators"
"LinearElimination"
"LinearGraph"
"LinearRelation"
"LinearRelations"
"LinearRepresentationSetup"
"LinearRepresentations"
"LinearShift"
"LinearSpace"
"LinearSpanEquations"
"LinearSpanGenerators"
"LinearSubspaceGenerators"
"LinearSystem"
"LinearSystemAtPhi"
"LinearSystemTrace"
"LinearlyEquivalentDivisorWithNoSupportOn"
"Lines"
"LinesInScheme"
"Linking"
"LinkingNumbers"
"ListAttributes"
"ListCategories"
"ListEntriesEqual"
"ListSignatures"
"ListTypes"
"ListVerbose"
"LittlewoodRichardsonTensor"
"LocalComponent"
"LocalCoxeterGroup"
"LocalDegree"
"LocalFactorization"
"LocalField"
"LocalGenera"
"LocalGlobalSelmerDiagram"
"LocalHeight"
"LocalInformation"
"LocalIntersectionData"
"LocalPolynomialRing"
"LocalRing"
"LocalRootNumber"
"LocalTwoSelmerMap"
"LocalUniformizer"
"Localisation"
"Localization"
"Log"
"LogCanonicalThreshold"
"LogCanonicalThresholdAtOrigin"
"LogCanonicalThresholdOverExtension"
"LogDerivative"
"LogGamma"
"LogIntegral"
"LogNorms"
"LogarithmicFieldExtension"
"Logs"
"LongBits"
"LongDivision"
"LongExactSequenceOnHomology"
"LongWords"
"LongestElement"
"LongestWeylWord"
"Lookup"
"LookupPrime"
"LowDimSubmodules"
"LowIndexNormalSubgroups"
"LowIndexProcess"
"LowIndexSubgroups"
"LowIndexSubgroupsSn"
"LowIndexSubmodules"
"LowerCentralSeries"
"LowerFaces"
"LowerSlopes"
"LowerTriangularMatrix"
"LowerVertices"
"Lucas"
"MAXSGPInternal"
"MCPolynomials"
"MCSplit"
"MDSCode"
"MEANS"
"MGCD"
"MMP"
"MPQS"
"MSQLetternonsplit"
"MSQLettersplit"
"MSQnonsplit"
"MSQnonsplitBase"
"MSQsplit"
"MSQsplitBase"
"MSetPolynomial"
"MSumPolynomial"
"MacWilliamsTransform"
"MagicNumber"
"Main"
"MainInvolution"
"MakeBasket"
"MakeCoprime"
"MakeCyclotomic"
"MakeDirected"
"MakeHomWithPreimageHandler"
"MakeIsSquare"
"MakeMapWithPreimageHandler"
"MakeModCubes"
"MakePCMap"
"MakeProjectiveClosureMap"
"MakeRepsDB"
"MakeRepsSmall"
"MakeResiduesSEA"
"MakeResolutionGraph"
"MakeSpliceDiagram"
"MakeType"
"Manifold"
"ManifoldDatabase"
"ManinConstant"
"ManinSymbol"
"MantissaExponent"
"MapToMatrix"
"Mapping"
"Maps"
"MargulisCode"
"MarkGroebner"
"Mass"
"MasseyProduct"
"MatRep"
"MatRepCharacteristics"
"MatRepDegrees"
"MatRepFieldSizes"
"MatRepKeys"
"Match"
"Matrices"
"Matrix"
"MatrixAlgebra"
"MatrixGroup"
"MatrixLieAlgebra"
"MatrixOfElement"
"MatrixOfInequalities"
"MatrixOfIsomorphism"
"MatrixQuotient"
"MatrixRepresentation"
"MatrixRing"
"MatrixToLabelInternal"
"MatrixToPerm"
"MatrixToWord"
"MatrixUnit"
"MatrixWithGivenCharacteristicPolynomial"
"MattsonSolomonTransform"
"Max"
"MaxCones"
"MaxNorm"
"MaxOrthPCheck"
"MaxParabolics"
"MaxSub"
"MaxSubKeys"
"MaxSubsTF2"
"MaxSubsTF4"
"Maxdeg"
"MaximalAbelianSubfield"
"MaximalCoefficientCode"
"MaximalExtension"
"MaximalIdeals"
"MaximalIncreasingSequence"
"MaximalIncreasingSequences"
"MaximalIntegerSolution"
"MaximalLeftIdeals"
"MaximalNormalSubgroup"
"MaximalNumberOfCosets"
"MaximalOddOrderNormalSubgroup"
"MaximalOrder"
"MaximalOrderBasis"
"MaximalOrderFinite"
"MaximalOrderInfinite"
"MaximalOvergroup"
"MaximalParabolics"
"MaximalPartition"
"MaximalRightIdeals"
"MaximalSingularSubspace"
"MaximalSolution"
"MaximalSubfields"
"MaximalSubgroups"
"MaximalSubgroupsAlt"
"MaximalSubgroupsAltSym"
"MaximalSubgroupsData"
"MaximalSubgroupsH"
"MaximalSubgroupsSym"
"MaximalSubgroupsTF"
"MaximalSublattices"
"MaximalSubmodules"
"MaximalTotallyIsotropicSubspace"
"MaximalVertexFacetHeightMatrix"
"MaximalZeroOneSolution"
"Maximum"
"MaximumBettiDegree"
"MaximumClique"
"MaximumDegree"
"MaximumFlow"
"MaximumInDegree"
"MaximumIndependentSet"
"MaximumMatching"
"MaximumNorm"
"MaximumOutDegree"
"MaximumStoredIrreducibleDegree"
"Maxindeg"
"Maxoutdeg"
"McElieceEtAlAsymptoticBound"
"McEliecesAttack"
"Meataxe"
"MeetDFA"
"MelikianLieAlgebra"
"MemCompact"
"MemProfile"
"Memory"
"MergeFields"
"MergeFiles"
"MergeUnits"
"MetacyclicPGroups"
"Mij2EltRootTable"
"Mike1"
"MilnorNumber"
"Min"
"MinParabolics"
"MinRowsGeneratorMatrix"
"Mindeg"
"MinimalAlgebraGenerators"
"MinimalAndCharacteristicPolynomials"
"MinimalBaseRingCharacter"
"MinimalBasis"
"MinimalBlocks"
"MinimalCoefficientDegree"
"MinimalCyclotomicField"
"MinimalDecomposition"
"MinimalDegreeModel"
"MinimalElementConjugatingToPositive"
"MinimalElementConjugatingToSuperSummit"
"MinimalElementConjugatingToUltraSummit"
"MinimalExtensionBasis"
"MinimalField"
"MinimalFreeResolution"
"MinimalHeckePolynomial"
"MinimalIdeals"
"MinimalInequalities"
"MinimalInteger"
"MinimalIntegerSolution"
"MinimalIsogeny"
"MinimalLeeWords"
"MinimalLeftIdeals"
"MinimalModel"
"MinimalNormalSubgroup"
"MinimalNormalSubgroups"
"MinimalOverfields"
"MinimalOvergroup"
"MinimalOvergroups"
"MinimalParabolics"
"MinimalPartition"
"MinimalPartitions"
"MinimalPolynomial"
"MinimalPolynomialFrobenius"
"MinimalPositiveGenerators"
"MinimalPrimeComponents"
"MinimalQuadraticTwist"
"MinimalRGenerators"
"MinimalRelations"
"MinimalRightIdeals"
"MinimalSolution"
"MinimalSubmodule"
"MinimalSubmodules"
"MinimalSuperlattices"
"MinimalSupermodules"
"MinimalSyzygyModule"
"MinimalTwist"
"MinimalVectorSequence"
"MinimalWeierstrassModel"
"MinimalWords"
"MinimalZeroOneSolution"
"MinimisationMatrix"
"Minimise"
"MinimiseConicToMatrix"
"MinimiseReduce"
"MinimiseWeights"
"Minimize"
"MinimizeCubicSurface"
"MinimizeDFA"
"MinimizeDeg4delPezzo"
"MinimizeGenerators"
"MinimizePlaneQuartic"
"MinimizeReduce"
"MinimizeReduceCubicSurface"
"MinimizeReduceDeg4delPezzo"
"MinimizeReducePlaneQuartic"
"Minimum"
"MinimumCut"
"MinimumDegree"
"MinimumDistance"
"MinimumDominatingSet"
"MinimumEuclideanDistance"
"MinimumEuclideanWeight"
"MinimumInDegree"
"MinimumLeeDistance"
"MinimumLeeWeight"
"MinimumLeeWeightBounds"
"MinimumLeeWords"
"MinimumOutDegree"
"MinimumWeight"
"MinimumWeightBounds"
"MinimumWeightTree"
"MinimumWord"
"MinimumWords"
"Minindeg"
"MinkowskiBound"
"MinkowskiDecomposition"
"MinkowskiGramReduction"
"MinkowskiLattice"
"MinkowskiReduction"
"MinkowskiSpace"
"Minor"
"MinorBoundary"
"MinorLength"
"Minors"
"Minoutdeg"
"Minus"
"MinusInfinity"
"MinusTamagawaNumber"
"MinusVolume"
"MixedCanonicalForm"
"ModByPowerOf2"
"ModelParent"
"ModelToSequence"
"ModelToString"
"ModelType"
"Modexp"
"ModifyProcess"
"ModifySelfintersection"
"ModifyTransverseIntersection"
"Modinv"
"Modorder"
"Modsqrt"
"ModularAbelianVariety"
"ModularComposition"
"ModularCompositionApply"
"ModularCompositionSetup"
"ModularCompositions"
"ModularCurve"
"ModularCurveDatabase"
"ModularCurveQuotient"
"ModularCurves"
"ModularDegree"
"ModularEmbedding"
"ModularEquation"
"ModularForm"
"ModularForms"
"ModularHyperellipticCurve"
"ModularKernel"
"ModularNonHyperellipticCurveGenus3"
"ModularParameterization"
"ModularParametrisation"
"ModularParametrization"
"ModularPolarization"
"ModularSolution"
"ModularSymbol"
"ModularSymbolApply"
"ModularSymbolEven"
"ModularSymbolOdd"
"ModularSymbolRepresentation"
"ModularSymbolToIntegralHomology"
"ModularSymbolToRationalHomology"
"ModularSymbols"
"ModularSymbolsH"
"ModularSymbolsModSmallPrime"
"Module"
"ModuleExtension"
"ModuleExtensionComplement"
"ModuleHomomorphism"
"ModuleMap"
"ModuleMaps"
"ModuleOverSmallerField"
"ModuleProject"
"ModuleProjectM"
"ModuleSaturation"
"ModuleToZModule"
"ModuleWithBasis"
"Modules"
"ModulesOverCommonField"
"ModulesOverSmallerField"
"Moduli"
"ModuliPoints"
"Modulus"
"ModulusIsFinite"
"MoebiusMu"
"MoebiusStrip"
"MolienSeries"
"MolienSeriesApproximation"
"MonicDifferentialOperator"
"MonicModel"
"MonodromyPairing"
"MonodromyWeights"
"Monoid"
"Monomial"
"MonomialAutomorphismGroup"
"MonomialBasis"
"MonomialCoefficient"
"MonomialDivisionList"
"MonomialGroup"
"MonomialGroupStabilizer"
"MonomialLattice"
"MonomialMatrix"
"MonomialOrder"
"MonomialOrderWeightVectors"
"MonomialSubgroup"
"MonomialToCoxMonomialsLattice"
"MonomialToElementaryMatrix"
"MonomialToHomogeneousMatrix"
"MonomialToPowerSumMatrix"
"MonomialToSchurMatrix"
"Monomials"
"MonomialsOfDegree"
"MonomialsOfDegreeZero"
"MonomialsOfWeightedDegree"
"MooreDeterminant"
"MordellWeilGroup"
"MordellWeilLattice"
"MordellWeilRank"
"MordellWeilRankBounds"
"MordellWeilShaInformation"
"MordellWeilSubgroup"
"MoriCone"
"Morphism"
"MorphismAutomorphism"
"MorphismAutomorphisms"
"MorphismCategory"
"MorphismFromImages"
"MorphismFromImagesAndBaseMorphism"
"MorphismMap"
"MorphismMapHasPreimage"
"MotivicWeight"
"MovablePart"
"Mult"
"MultiKnapsackSolutions"
"MultiQuotientMaps"
"MultiRank"
"MultiSpaces"
"MultiTuple"
"Multidegree"
"Multinomial"
"MultipartiteGraph"
"MultiplicationByMMap"
"MultiplicationTable"
"MultiplicativeGroup"
"MultiplicativeJordanDecomposition"
"MultiplicativeOrder"
"MultiplicatorRing"
"Multiplicities"
"Multiplicity"
"Multiplier"
"MultiplyByTranspose"
"MultiplyColumn"
"MultiplyDivisor"
"MultiplyFrobenius"
"MultiplyRow"
"MultiplyTransformations"
"Multiset"
"MultisetToSet"
"Multisets"
"MultivaluedSection"
"MultivariatePolynomial"
"MurphyAlphaApproximation"
"MyAbelianGroup"
"MyBasis"
"MyCompletion"
"MyDumbExpand"
"MyEval"
"MyExpand"
"MyExtOrder"
"MyFPGroup"
"MyGCD"
"MyGetLowPrecisionExpand"
"MyGetLowPrecisionExpandAS"
"MyGradedMap"
"MyInvars"
"MyIsConjugate"
"MyIsConjugateQuotient"
"MyIsConjugateSubgroup"
"MyIsMaximal"
"MyIsSquare"
"MyMaximalOrder"
"MyPrimitivePart"
"MyRationalPoints"
"MyRelativeInvariant"
"NFS"
"NFSProcess"
"NFaces"
"NGrad"
"NMS"
"NMatReps"
"NMaxSubs"
"NNZEntries"
"NP"
"NPCGenerators"
"NPCgens"
"NPermReps"
"NSpin"
"NagataAutomorphism"
"Nagens"
"NaiveHeight"
"Nalggens"
"Name"
"Name2Mij"
"NameSimple"
"Names"
"NarrowClassGroup"
"NarrowClassNumber"
"NaturalActionGenerator"
"NaturalBlackBoxGroup"
"NaturalFreeAlgebraCover"
"NaturalGroup"
"NaturalMap"
"NaturalMaps"
"Nclasses"
"Ncols"
"NearLinearSpace"
"NefCone"
"NegationMap"
"Negative"
"NegativeGammaOrbitsOnRoots"
"NegativePrimeDivisors"
"NegativeRelativeRoots"
"Neighbor"
"NeighborClosure"
"Neighbors"
"Neighbour"
"NeighbourClosure"
"NeighbouringGerms"
"Neighbours"
"NewAndOldSubspacesUsingHeckeAction"
"NewEnv"
"NewLLLBasis"
"NewLevel"
"NewModularHyperellipticCurve"
"NewModularHyperellipticCurves"
"NewModularNonHyperellipticCurveGenus3"
"NewModularNonHyperellipticCurvesGenus3"
"NewQuotient"
"NewSaturation"
"NewStore"
"NewSubspace"
"NewSubvariety"
"Newform"
"NewformDecomposition"
"Newforms"
"NewtonPolygon"
"NewtonPolynomial"
"NewtonPolynomials"
"NewtonPolytope"
"NextClass"
"NextElement"
"NextExtension"
"NextFactor"
"NextGraph"
"NextModule"
"NextPermutation"
"NextPrime"
"NextRepresentation"
"NextSimpleQuotient"
"NextSubgroup"
"NextVector"
"Ngens"
"Nice"
"NiceRepresentativeModuloPowers"
"NiceRepresentativesModuloPowers"
"NicerQuaternionAlgebra"
"NilRadical"
"NilpotencyClass"
"NilpotentBoundary"
"NilpotentLength"
"NilpotentLieAlgebra"
"NilpotentOrbit"
"NilpotentOrbits"
"NilpotentPresentation"
"NilpotentQuotient"
"NilpotentSection"
"NilpotentSubgroups"
"Nilradical"
"NineDescent"
"NineSelmerSet"
"NoCommonComponent"
"NoetherNormalisation"
"NoetherNormalization"
"NoetherNumerator"
"NoetherWeights"
"NonCuspidalQRationalPoints"
"NonIdempotentActionGenerators"
"NonIdempotentGenerators"
"NonNilpotentElement"
"NonNormalizedLcm"
"NonPrimitiveAlternant"
"NonPrincipalPrimesUpTo"
"NonQFactorialLocus"
"NonReducedFibres"
"NonSimplicialCones"
"NonSpecialDivisor"
"NonZeroCoordinates"
"NonsolvableSubgroups"
"NonsplitAbelianSection"
"NonsplitCollector"
"NonsplitElementaryAbelianSection"
"NonsplitExtensionSpace"
"NonsplitSection"
"Nonsquare"
"NonvanishingForm"
"Norm"
"NormAbs"
"NormEquation"
"NormGroup"
"NormGroupDiscriminant"
"NormKernel"
"NormModule"
"NormOneGroup"
"NormOneSubgroup"
"NormResidueSymbol"
"NormSpace"
"NormalBasisGenerator"
"NormalClosure"
"NormalClosureMonteCarlo"
"NormalComplements"
"NormalCone"
"NormalElement"
"NormalFan"
"NormalForm"
"NormalLattice"
"NormalNumber"
"NormalSubfields"
"NormalSubgroup"
"NormalSubgroupRandomElement"
"NormalSubgroups"
"Normalisation"
"NormalisationCoefficient"
"Normalise"
"NormalisedCone"
"Normaliser"
"NormaliserCode"
"NormaliserMatrix"
"Normalization"
"NormalizationCoefficient"
"Normalize"
"NormalizeIdeals"
"Normalizer"
"NormalizerCode"
"NormalizerGLZ"
"NormalizerMatrix"
"Norms"
"Not"
"Nqubits"
"Nrels"
"Nrows"
"Nsgens"
"NthPrime"
"NuclearRank"
"NullGraph"
"NullHomotopy"
"NullSpace"
"Nullity"
"Nullspace"
"NullspaceMatrix"
"NullspaceOfTranspose"
"NullspaceOfTransposeMatrix"
"NumExtraspecialPairs"
"NumPosRoots"
"Number"
"NumberField"
"NumberFieldDatabase"
"NumberFieldSieve"
"NumberFields"
"NumberOfActionGenerators"
"NumberOfAffinePatches"
"NumberOfAlgebraicGenerators"
"NumberOfAntisymmetricForms"
"NumberOfBlocks"
"NumberOfBoundaryPoints"
"NumberOfCells"
"NumberOfClasses"
"NumberOfColumns"
"NumberOfComponents"
"NumberOfConstantWords"
"NumberOfConstraints"
"NumberOfCoordinates"
"NumberOfCurves"
"NumberOfDivisors"
"NumberOfEGenerators"
"NumberOfEdges"
"NumberOfElements"
"NumberOfExtensions"
"NumberOfFGenerators"
"NumberOfFaces"
"NumberOfFacets"
"NumberOfFields"
"NumberOfFixedSpaces"
"NumberOfGenerators"
"NumberOfGradings"
"NumberOfGraphs"
"NumberOfGroups"
"NumberOfGroupsSF"
"NumberOfGroupsp7"
"NumberOfInclusions"
"NumberOfInteriorPoints"
"NumberOfInvariantForms"
"NumberOfIrreducibleMatrixGroups"
"NumberOfIsogenyClasses"
"NumberOfK3Surfaces"
"NumberOfKGenerators"
"NumberOfLabels"
"NumberOfLattices"
"NumberOfLevels"
"NumberOfLines"
"NumberOfMatrices"
"NumberOfMetacyclicPGroups"
"NumberOfNames"
"NumberOfNewformClasses"
"NumberOfNonZeroEntries"
"NumberOfOperations"
"NumberOfPCGenerators"
"NumberOfPartitions"
"NumberOfPermutations"
"NumberOfPlacesDegECF"
"NumberOfPlacesOfDegreeOne"
"NumberOfPlacesOfDegreeOneECF"
"NumberOfPlacesOfDegreeOneECFBound"
"NumberOfPlacesOfDegreeOneOverExactConstantField"
"NumberOfPlacesOfDegreeOneOverExactConstantFieldBound"
"NumberOfPlacesOfDegreeOverExactConstantField"
"NumberOfPoints"
"NumberOfPointsAtInfinity"
"NumberOfPointsOnCubicSurface"
"NumberOfPointsOnSurface"
"NumberOfPositiveRoots"
"NumberOfPrimePolynomials"
"NumberOfPrimitiveAffineGroups"
"NumberOfPrimitiveAlmostSimpleGroups"
"NumberOfPrimitiveDiagonalGroups"
"NumberOfPrimitiveGroups"
"NumberOfPrimitiveProductGroups"
"NumberOfPrimitiveSolubleGroups"
"NumberOfProjectives"
"NumberOfPunctures"
"NumberOfQubits"
"NumberOfQuotientGradings"
"NumberOfRationalPoints"
"NumberOfRelations"
"NumberOfRelationsRequired"
"NumberOfRepresentations"
"NumberOfRows"
"NumberOfSkewRows"
"NumberOfSmallGroups"
"NumberOfSmoothDivisors"
"NumberOfSolubleIrreducibleMatrixGroups"
"NumberOfStandardTableaux"
"NumberOfStandardTableauxOnWeight"
"NumberOfStrings"
"NumberOfStrongGenerators"
"NumberOfSubgroupsAbelianPGroup"
"NumberOfSymmetricForms"
"NumberOfTableauxOnAlphabet"
"NumberOfTerms"
"NumberOfTransitiveGroups"
"NumberOfTransverseIntersections"
"NumberOfVariables"
"NumberOfVertices"
"NumberOfWords"
"NumberOfhGenerators"
"NumberOfxGenerators"
"NumberOfyGenerators"
"NumberingMap"
"NumbersOfPointsOnSurface"
"Numelt"
"Numeration"
"Numerator"
"NumeratorData"
"NumeratorSequence"
"NumericalDerivative"
"NumericalEigenvectors"
"O"
"OECM"
"OEIS"
"OEISDatabase"
"ObjectMap"
"ObjectMapHasPreimage"
"ObjectiveFunction"
"Obstruction"
"ObstructionDescentBuildingBlock"
"OddGraph"
"Oddity"
"OldClassInvariants"
"OldDerksenIdeal"
"OldGOMinus"
"OldGeneralOrthogonalGroupMinus"
"OldIrreducibleModules"
"OldOmegaMinus"
"OldQuadraticSpace"
"OldQuotient"
"OldSOMinus"
"OldSpecialOrthogonalGroupMinus"
"OldSubspace"
"OldSubvariety"
"Omega"
"OmegaMinus"
"OmegaPlus"
"One"
"OneCocycle"
"OneCohomology"
"OneCohomologyAb"
"OneCohomologyFP"
"OneCohomologyFP_"
"OneParameterSubgroupsLattice"
"OneSkeleton"
"OnlyUpToIsogeny"
"Open"
"OpenGraphFile"
"OpenSmallGroupDatabase"
"OpenTest"
"Operands"
"Operation"
"Operator"
"OperatorNorm"
"OppositeAlgebra"
"OptimalEdgeColouring"
"OptimalSkewness"
"OptimalVertexColouring"
"OptimisedRepresentation"
"OptimizedRepresentation"
"Or"
"Orbit"
"OrbitAction"
"OrbitActionBounded"
"OrbitBounded"
"OrbitClosure"
"OrbitImage"
"OrbitImageBounded"
"OrbitKernel"
"OrbitKernelBounded"
"OrbitLensInternal"
"OrbitMinsInternal"
"OrbitNumbersInternal"
"OrbitPartitionIsConjugate"
"OrbitPartitionStabilizer"
"OrbitRepresentatives"
"OrbitStabilizer"
"OrbitSum"
"OrbitalGraph"
"Orbits"
"OrbitsOfSpaces"
"OrbitsOnSimples"
"OrbitsPartition"
"Order"
"OrderAutomorphismGroupAbelianPGroup"
"OrderGL"
"OrderOfImageOfComponentGroupOfJ0N"
"OrderOfRootOfUnity"
"OrderedGenerators"
"OrderedIntegerMonoid"
"OrderedMonoid"
"OrderedPartitionStack"
"OrderedPartitionStackZero"
"Ordering"
"OreConditions"
"OrientatedGraph"
"Origin"
"OriginalRing"
"OrthogonalComplement"
"OrthogonalComponent"
"OrthogonalComponents"
"OrthogonalDecomposition"
"OrthogonalDirectSum"
"OrthogonalForm"
"OrthogonalFormCS"
"OrthogonalFormMinus"
"OrthogonalFormPlus"
"OrthogonalReflection"
"OrthogonalSum"
"OrthogonalTensorProduct"
"Orthogonalize"
"OrthogonalizeGram"
"Orthonormalize"
"OutDegree"
"OutEdges"
"OutNeighbors"
"OutNeighbours"
"OuterFPGroup"
"OuterFaces"
"OuterNormal"
"OuterNormals"
"OuterOrder"
"OuterShape"
"OuterVertices"
"OvalDerivation"
"OverDimension"
"OverconvergentHeckeSeries"
"OverconvergentHeckeSeriesDegreeBound"
"Overdatum"
"Overgroup"
"P1"
"P1Action"
"P1Classes"
"P1Normalize"
"P1P2toA3Ac2over12"
"P1Reduce"
"P2"
"PALPNormalForm"
"PCAut"
"PCAutAction"
"PCAutDeriv"
"PCAutIsSol"
"PCAutPrint"
"PCBFConjByWord"
"PCBFEltNew"
"PCBFElteq"
"PCBFEltne"
"PCBFMult"
"PCBFNew"
"PCBFNormalForm"
"PCBFRevert"
"PCClass"
"PCExponents"
"PCGO"
"PCGOMinus"
"PCGOPlus"
"PCGSp"
"PCGU"
"PCGenerators"
"PCGroup"
"PCMap"
"PCPresentation"
"PCPrimes"
"PCSO"
"PCSOMinus"
"PCSOPlus"
"PCSU"
"PGL"
"PGO"
"PGOMinus"
"PGOPlus"
"PGU"
"PGammaL"
"PGammaU"
"PGroupSection"
"PGroupToForms"
"PHom"
"POmega"
"POmegaMinus"
"POmegaPlus"
"POpen"
"PSL"
"PSL2"
"PSO"
"PSOMinus"
"PSOPlus"
"PSU"
"PSigmaL"
"PSigmaSp"
"PSigmaSz"
"PSigmaU"
"PSp"
"PSz"
"PackingRadius"
"PadCode"
"PadeHermiteApproximant"
"PairReduce"
"PairReduceGram"
"PaleyGraph"
"PaleyTournament"
"ParallelClass"
"ParallelClasses"
"ParallelSort"
"ParamDeg4DPSingLie"
"Parameters"
"Parametrization"
"ParametrizationMatrix"
"ParametrizationToPuiseux"
"ParametrizeAnticanonicalP1xP1"
"ParametrizeAnticanonicalSphere"
"ParametrizeBlowup"
"ParametrizeDegree5DelPezzo"
"ParametrizeDegree6DelPezzo"
"ParametrizeDegree7DelPezzo"
"ParametrizeDegree8DelPezzo"
"ParametrizeDegree9DelPezzo"
"ParametrizeDelPezzo"
"ParametrizeDelPezzoDeg6"
"ParametrizeDelPezzoDeg9"
"ParametrizeOrdinaryCurve"
"ParametrizePencil"
"ParametrizeProjectiveHypersurface"
"ParametrizeProjectiveSurface"
"ParametrizeQuadric"
"ParametrizeRNC"
"ParametrizeRationalNormalCurve"
"ParametrizeScroll"
"ParametrizeSingularDegree3DelPezzo"
"ParametrizeSingularDegree4DelPezzo"
"Parent"
"ParentCategory"
"ParentCell"
"ParentGraph"
"ParentPlane"
"ParentRing"
"ParityCheckMatrix"
"PartialDual"
"PartialFactorization"
"PartialFractionDecomposition"
"PartialLeeWeightDistribution"
"PartialPrimaryInvariantSpaces"
"PartialWeightDistribution"
"Partition"
"Partition2WGtable"
"PartitionAction"
"PartitionCovers"
"PartitionToWeight"
"Partitions"
"PascalTriangle"
"PatchGerms"
"Path"
"PathExists"
"PathGraph"
"PathTree"
"PathTreeCyclicModule"
"Paths"
"Peakwords"
"PellEquation"
"Pencil"
"PerfectForms"
"PerfectGroupDatabase"
"PerfectSubgroups"
"PeriodMapping"
"Periods"
"PermCond"
"PermRep"
"PermRepDegrees"
"PermRepKeys"
"PermRestrict"
"PermToDualMatrix"
"PermToMatrix"
"PermToWord"
"Permutation"
"PermutationAutomorphism"
"PermutationCharacter"
"PermutationCode"
"PermutationCondensation"
"PermutationGroup"
"PermutationMatrix"
"PermutationModule"
"PermutationRepresentation"
"PermutationSupport"
"Permutations"
"PermuteSequence"
"PermuteWeights"
"Pfaffian"
"Pfaffians"
"PhaseFlip"
"Phi"
"PhiInverse"
"Pi"
"PicardClass"
"PicardGaloisModule"
"PicardGroup"
"PicardGroupGeometric"
"PicardIntersectionPairing"
"PicardLattice"
"PicardNumber"
"PicardToClassGroupsMap"
"PicardToClassLatticesMap"
"PicnDescent"
"Pipe"
"Place"
"PlaceEnumCopy"
"PlaceEnumCurrent"
"PlaceEnumInit"
"PlaceEnumNext"
"PlaceEnumPosition"
"Places"
"PlacticIntegerMonoid"
"PlacticMonoid"
"PlanarDual"
"PlanarGraphDatabase"
"PlaneCurve"
"PlaneToDisc"
"Plethysm"
"PlotkinAsymptoticBound"
"PlotkinBound"
"PlotkinSum"
"Plurigenus"
"Point"
"PointDegree"
"PointDegrees"
"PointGraph"
"PointGroup"
"PointInInterior"
"PointIndexes"
"PointOnRegularModel"
"PointSearch"
"PointSet"
"PointToBlowUp"
"Points"
"PointsAtInfinity"
"PointsCubicModel"
"PointsFiniteField"
"PointsInGeneralPosition"
"PointsKnown"
"PointsOverSplittingField"
"PointsQI"
"PointsToLaurent"
"Polar"
"PolarToComplex"
"Polarisation"
"PolarisedVariety"
"PoleDivisor"
"Poles"
"PollardRho"
"PolyMapKernel"
"PolyToSeries"
"PolycyclicByFiniteGroup"
"PolycyclicGenerators"
"PolygonGraph"
"Polyhedron"
"PolyhedronInSublattice"
"PolyhedronWithInequalities"
"Polylog"
"PolylogD"
"PolylogDold"
"PolylogP"
"Polynomial"
"PolynomialAlgebra"
"PolynomialCoefficient"
"PolynomialMap"
"PolynomialPair"
"PolynomialRing"
"PolynomialSieve"
"PolynomialToElementarySymmetric"
"PolynomialToPowerSums"
"Polynomials"
"Polytope"
"PolytopeCanonicalFanoDim2"
"PolytopeCanonicalFanoDim3"
"PolytopeLDP"
"PolytopeOfProjectiveSpace"
"PolytopeOfWPS"
"PolytopeReflexiveFanoDim2"
"PolytopeReflexiveFanoDim3"
"PolytopeSmoothFano"
"PolytopeSmoothFanoDim2"
"PolytopeSmoothFanoDim3"
"PolytopeSmoothFanoDim4"
"PolytopeSmoothFanoDim5"
"PolytopeSmoothFanoDim6"
"PolytopeSmoothFanoDim7"
"PolytopeSmoothFanoDim8"
"PolytopeTerminalFanoDim2"
"PolytopeTerminalFanoDim3"
"PolytopeToLaurent"
"PolytopelReflexiveDim2"
"Pop"
"PosRootsWeightBasis"
"Position"
"PositiveConjugates"
"PositiveConjugatesProcess"
"PositiveCoroots"
"PositiveDefiniteForm"
"PositiveGammaOrbitsOnRoots"
"PositiveQuadrant"
"PositiveRelativeRoots"
"PositiveRoots"
"PositiveRootsPerm"
"PositiveSum"
"PossibleCanonicalDissidentPoints"
"PossibleDiscriminants"
"PossibleSimpleCanonicalDissidentPoints"
"PowHom"
"Power"
"PowerFormalSet"
"PowerFreePart"
"PowerGroup"
"PowerIdeal"
"PowerIndexedSet"
"PowerMap"
"PowerMultiset"
"PowerPolynomial"
"PowerProduct"
"PowerProductSimplify"
"PowerRSpace"
"PowerRelation"
"PowerResidueCode"
"PowerSequence"
"PowerSeries"
"PowerSeriesAlgebra"
"PowerSeriesRing"
"PowerSet"
"PowerStructure"
"PowerSumToCoefficients"
"PowerSumToElementaryMatrix"
"PowerSumToElementarySymmetric"
"PowerSumToHomogeneousMatrix"
"PowerSumToMonomialMatrix"
"PowerSumToSchurMatrix"
"PrePatchMaps"
"Precision"
"PrecisionBound"
"Preimage"
"PreimageConstructorViaInverse"
"PreimageIdeal"
"PreimageRing"
"PreparataCode"
"Preprune"
"Presentation"
"PresentationIsSmall"
"PresentationLength"
"PresentationMatrix"
"PreviousPrime"
"PrimDecomp"
"PrimalityCertificate"
"Primary"
"PrimaryAbelianBasis"
"PrimaryAbelianInvariants"
"PrimaryAlgebra"
"PrimaryBasis"
"PrimaryComponents"
"PrimaryDecomposition"
"PrimaryIdeal"
"PrimaryInvariantFactors"
"PrimaryInvariants"
"PrimaryRationalForm"
"PrimaryRepresentation"
"Prime"
"PrimeBasis"
"PrimeComponents"
"PrimeDivisors"
"PrimeFactorisation"
"PrimeField"
"PrimeForm"
"PrimeIdeal"
"PrimeOrderElement"
"PrimePolynomials"
"PrimePowerKernelMatrix"
"PrimePowerNullspaceMatrix"
"PrimePowerOrderElement"
"PrimePowerRepresentation"
"PrimeRing"
"Primes"
"PrimesInInterval"
"PrimesUpTo"
"PrimitiveEisensteinSeries"
"PrimitiveElement"
"PrimitiveGroup"
"PrimitiveGroupDatabaseLimit"
"PrimitiveGroupDescription"
"PrimitiveGroupIdentification"
"PrimitiveGroupLabelFromSims"
"PrimitiveGroupLabelToSims"
"PrimitiveGroupProcess"
"PrimitiveGroupSims"
"PrimitiveGroups"
"PrimitiveIdempotentData"
"PrimitiveIdempotents"
"PrimitiveLatticeVector"
"PrimitivePart"
"PrimitivePolynomial"
"PrimitiveQuotient"
"PrimitiveRoot"
"PrimitiveWreathProduct"
"PrincipalCharacter"
"PrincipalDivisor"
"PrincipalDivisorMap"
"PrincipalIdealMap"
"PrincipalPolarisation"
"PrincipalPrimesUpTo"
"PrincipalSeriesParameters"
"PrincipalUnitGroup"
"PrincipalUnitGroupGenerators"
"PrintBase"
"PrintCategory"
"PrintCoding"
"PrintCollector"
"PrintExtensions"
"PrintFile"
"PrintFileMagma"
"PrintGenerators"
"PrintGrpLie"
"PrintGrpLieElt"
"PrintMagma"
"PrintMapping"
"PrintMatgMagma"
"PrintModuleMagma"
"PrintModules"
"PrintName"
"PrintPairs"
"PrintPrimes"
"PrintProbabilityDistribution"
"PrintProcess"
"PrintQuotient"
"PrintRelat"
"PrintRelatorLengths"
"PrintRelators"
"PrintSeries"
"PrintSortedProbabilityDistribution"
"PrintStatus"
"PrintSylowSubgroupStructure"
"PrintSymbols"
"PrintTermsOfDegree"
"PrintToPrecision"
"PrintTreesSU"
"PrintoutData"
"Probability"
"ProbabilityDistribution"
"ProbableAutomorphismGroup"
"ProbableRadicalDecomposition"
"Probit"
"ProcessLadder"
"Product"
"ProductCode"
"ProductProjectiveSpace"
"ProductRepresentation"
"ProfileGraph"
"ProfileHTMLOutput"
"ProfilePrintByTotalCount"
"ProfilePrintByTotalTime"
"ProfilePrintChildrenByCount"
"ProfilePrintChildrenByTime"
"ProfilePrintDescendantsByCount"
"ProfilePrintDescendantsByTime"
"ProfilePrintGraphByCount"
"ProfilePrintGraphByTime"
"ProfilePruneGraphByCount"
"ProfilePruneGraphByTime"
"ProfileReset"
"Proj"
"ProjKilling"
"Projection"
"ProjectionCentres"
"ProjectionCodimensions"
"ProjectionFromNonsingularPoint"
"ProjectionIndices"
"ProjectionMap"
"ProjectionMatrix"
"ProjectionOnto"
"ProjectionOntoImage"
"ProjectionSubtypes"
"ProjectionTypes"
"Projections"
"ProjectiveClosure"
"ProjectiveClosureMap"
"ProjectiveCover"
"ProjectiveDimension"
"ProjectiveEmbedding"
"ProjectiveFunction"
"ProjectiveGammaLinearGroup"
"ProjectiveGammaUnitaryGroup"
"ProjectiveGeneralLinearGroup"
"ProjectiveGeneralOrthogonalGroup"
"ProjectiveGeneralOrthogonalGroupMinus"
"ProjectiveGeneralOrthogonalGroupPlus"
"ProjectiveGeneralUnitaryGroup"
"ProjectiveIndecomposable"
"ProjectiveIndecomposableDimensions"
"ProjectiveIndecomposableModules"
"ProjectiveIndecomposables"
"ProjectiveLine"
"ProjectiveMap"
"ProjectiveModule"
"ProjectiveOmega"
"ProjectiveOmegaMinus"
"ProjectiveOmegaPlus"
"ProjectiveOrder"
"ProjectivePatchMap"
"ProjectivePlane"
"ProjectivePolynomial"
"ProjectiveRationalFunction"
"ProjectiveRepresentative"
"ProjectiveResolution"
"ProjectiveResolutionPGroup"
"ProjectiveSigmaLinearGroup"
"ProjectiveSigmaSuzukiGroup"
"ProjectiveSigmaSymplecticGroup"
"ProjectiveSigmaUnitaryGroup"
"ProjectiveSpace"
"ProjectiveSpaceAsToricVariety"
"ProjectiveSpecialLinearGroup"
"ProjectiveSpecialOrthogonalGroup"
"ProjectiveSpecialOrthogonalGroupMinus"
"ProjectiveSpecialOrthogonalGroupPlus"
"ProjectiveSpecialUnitaryGroup"
"ProjectiveSuzukiGroup"
"ProjectiveSymplecticGroup"
"Projectivity"
"Prospector"
"Prune"
"PseudoAdd"
"PseudoAddMultiple"
"PseudoBasis"
"PseudoCholeskyForm"
"PseudoCholeskyFormToCholesky"
"PseudoDimension"
"PseudoGenerators"
"PseudoInverse"
"PseudoMatrix"
"PseudoMordellWeilGroup"
"PseudoRandom"
"PseudoReflection"
"PseudoReflectionGroup"
"PseudoRemainder"
"Pseudoreflection"
"Psi"
"PthPowerMapping"
"PuiseuxExpansion"
"PuiseuxExponents"
"PuiseuxExponentsCommon"
"PuiseuxSeriesRing"
"PuiseuxToParametrization"
"Pullback"
"PunctureCode"
"PureBraidGroup"
"PureLattice"
"PureRayIndices"
"PureRays"
"PurelyRamifiedExtension"
"PushThroughIsogeny"
"Pushforward"
"Pushout"
"Put"
"PutInZ"
"Puts"
"Pyramid"
"QECC"
"QECCLowerBound"
"QECCUpperBound"
"QFactorialisation"
"QMatrix"
"QNF"
"QRCode"
"QRCodeZ4"
"QSpace"
"QUAToIntegralUEAMap"
"Qround"
"QuadeIdeal"
"QuadraticClassGroupTwoPart"
"QuadraticField"
"QuadraticForm"
"QuadraticFormCS"
"QuadraticFormMatrix"
"QuadraticFormMinus"
"QuadraticFormPlus"
"QuadraticFormPolynomial"
"QuadraticFormType"
"QuadraticForms"
"QuadraticNorm"
"QuadraticNormForm"
"QuadraticOrder"
"QuadraticSpace"
"QuadraticTransformation"
"QuadraticTwist"
"QuadraticTwists"
"QuadricIntersection"
"QuantizedUEA"
"QuantizedUEAlgebra"
"QuantizedUniversalEnvelopingAlgebra"
"QuantumBasisElement"
"QuantumBinaryErrorGroup"
"QuantumCode"
"QuantumCompactFormat"
"QuantumCyclicCode"
"QuantumDimension"
"QuantumErrorGroup"
"QuantumExtendedFormat"
"QuantumQuasiCyclicCode"
"QuantumState"
"QuantumTwistedCode"
"Quartic"
"QuarticG4Covariant"
"QuarticG6Covariant"
"QuarticHSeminvariant"
"QuarticIInvariant"
"QuarticJInvariant"
"QuarticMinimise"
"QuarticNumberOfRealRoots"
"QuarticPSeminvariant"
"QuarticQSeminvariant"
"QuarticRSeminvariant"
"QuarticReduce"
"QuasiCyclicCode"
"QuasiTwistedCyclicCode"
"QuaternaryPlotkinSum"
"Quaternion"
"QuaternionAlgebra"
"QuaternionOrder"
"QuaternionicAutomorphismGroup"
"QuaternionicComplement"
"QuaternionicDual"
"QuaternionicGModule"
"QuaternionicMatrixGroupDatabase"
"QuaternionicTranspose"
"QuickLLL"
"QuickLLLGram"
"Quotient"
"QuotientComplex"
"QuotientDimension"
"QuotientFactorization"
"QuotientGenerators"
"QuotientGradings"
"QuotientGroup"
"QuotientMap"
"QuotientModule"
"QuotientModuleAction"
"QuotientModuleImage"
"QuotientRepresentation"
"QuotientRing"
"QuotientWithPullback"
"Quotrem"
"RCLazySeries"
"RF"
"RGenerators"
"RHS"
"RMatrixSpace"
"RMatrixSpaceWithBasis"
"RModule"
"RModuleWithAction"
"RModuleWithBasis"
"RPolynomial"
"RSAModulus"
"RSKCorrespondence"
"RSpace"
"RSpaceWithBasis"
"RSpaceWithModuli"
"RWSGroup"
"RWSMonoid"
"Radical"
"RadicalDecomposition"
"RadicalExtension"
"RadicalQuotient"
"RaisePrecision"
"RamificationDegree"
"RamificationDivisor"
"RamificationField"
"RamificationGroup"
"RamificationIndex"
"RamificationPoints"
"RamifiedPlaces"
"RamifiedPrimes"
"RamifiedRepresentation"
"Ranbig"
"Random"
"RandomAdditiveCode"
"RandomAutomorphism"
"RandomBaseChange"
"RandomBits"
"RandomCFP"
"RandomCone"
"RandomConjugate"
"RandomConsecutiveBits"
"RandomCurveByGenus"
"RandomDigraph"
"RandomElementOfOrder"
"RandomExtension"
"RandomGLnZ"
"RandomGenusOneModel"
"RandomGraph"
"RandomHookWalk"
"RandomIrreduciblePolynomial"
"RandomLinearCode"
"RandomLowerTriangularMatrix"
"RandomMatrix"
"RandomModel"
"RandomNodalCurve"
"RandomPartition"
"RandomPlace"
"RandomPlaneCurve"
"RandomPlanePoints"
"RandomPolytope"
"RandomPositiveCone"
"RandomPrime"
"RandomPrimePolynomial"
"RandomProcess"
"RandomProcessWithValues"
"RandomProcessWithWords"
"RandomProcessWithWordsAndValues"
"RandomQuantumCode"
"RandomRightIdeal"
"RandomSLnZ"
"RandomSchreier"
"RandomSchreierBounded"
"RandomSchreierCoding"
"RandomSequence"
"RandomSequenceBlumBlumShub"
"RandomSequenceRSA"
"RandomSparseMatrix"
"RandomSubcomplex"
"RandomSubset"
"RandomSymplecticMatrix"
"RandomTableau"
"RandomTransformation"
"RandomTree"
"RandomUnit"
"RandomUpperTriangularMatrix"
"RandomWord"
"Rank"
"RankBound"
"RankBounds"
"RankZ2"
"RanksOfPrimitiveIdempotents"
"RationalCharacterDecomposition"
"RationalCharacterSchurIndex"
"RationalCharacterTable"
"RationalCharacterTableRSpace"
"RationalCurve"
"RationalCuspidalSubgroup"
"RationalCusps"
"RationalDifferentialField"
"RationalExtensionRepresentation"
"RationalField"
"RationalForm"
"RationalFunction"
"RationalFunctionField"
"RationalFunctions"
"RationalGCD"
"RationalHomology"
"RationalMap"
"RationalMapping"
"RationalMatrixGroupDatabase"
"RationalPart"
"RationalPoint"
"RationalPoints"
"RationalPointsByFibration"
"RationalPointsGeneric"
"RationalPuiseux"
"RationalReconstruction"
"RationalRootDecomposition"
"RationalRoundUp"
"RationalScroll"
"RationalSequence"
"RationalSolutions"
"RationalTensorSearch"
"Rationals"
"RationalsAsNumberField"
"Ratpoints"
"RawBasket"
"RawCurve"
"RawEval"
"Ray"
"RayClassField"
"RayClassGroup"
"RayClassGroupDiscLog"
"RayLattice"
"RayLatticeMap"
"RayResidueRing"
"Rays"
"Re"
"Reachable"
"Read"
"ReadAtlasMatrix"
"ReadBinary"
"ReadBytes"
"ReadEntry1"
"ReadEntryQECC"
"ReadIntegralMatrix"
"ReadTest"
"Real"
"RealEmbeddings"
"RealExtensions"
"RealField"
"RealHomology"
"RealInjection"
"RealMatrix"
"RealPeriod"
"RealPlaces"
"RealRoots"
"RealSigns"
"RealTamagawaNumber"
"RealToIntegerExponent"
"RealVectorSpace"
"RealVolume"
"RealWeakApproximation"
"Realtime"
"RecToGRBskt"
"RecToGRCrvS"
"RecToGRPtS"
"RecToGRSch"
"ReciprocalPolynomial"
"Recognise3D4"
"RecogniseAdjoint"
"RecogniseAlternating"
"RecogniseAlternatingOrSymmetric"
"RecogniseAlternatingSquare"
"RecogniseClassical"
"RecogniseClassicalSSA"
"RecogniseDelta"
"RecogniseExchangeSSA"
"RecogniseExtendedSL"
"RecogniseExtendedSp"
"RecogniseG2"
"RecogniseLargeRee"
"RecogniseRee"
"RecogniseSL"
"RecogniseSL2"
"RecogniseSL3"
"RecogniseSU3"
"RecogniseSU4"
"RecogniseSp4Even"
"RecogniseSpOdd"
"RecogniseStarAlgebra"
"RecogniseSymmetric"
"RecogniseSymmetricSquare"
"RecogniseSz"
"RecognizeClassical"
"RecognizeExtendedSL"
"RecognizeExtendedSp"
"RecognizeLargeRee"
"RecognizeRee"
"RecognizeSL"
"RecognizeSL2"
"RecognizeSpOdd"
"RecognizeStarAlgebra"
"RecognizeSz"
"Reconstruct"
"ReconstructBasis"
"ReconstructLatticeBasis"
"ReconstructionEnvironment"
"Rectify"
"RecursiveCoefficientLazySeries"
"RecursiveGrphRes"
"RedoEnumeration"
"Reduce"
"ReduceBasis"
"ReduceCharacters"
"ReduceCluster"
"ReduceCubicSurface"
"ReduceDefiningGenerators"
"ReduceGenerators"
"ReduceGroebnerBasis"
"ReducePlaneCurve"
"ReduceQuadrics"
"ReduceToTriangleVertices"
"ReduceVector"
"ReducedAteTPairing"
"ReducedBasis"
"ReducedDecomposition"
"ReducedDiscriminant"
"ReducedEtaTPairing"
"ReducedFactorisation"
"ReducedForm"
"ReducedForms"
"ReducedGramMatrix"
"ReducedLegendreEquation"
"ReducedLegendreModel"
"ReducedLegendrePolynomial"
"ReducedMinimalWeierstrassModel"
"ReducedModel"
"ReducedOrbits"
"ReducedPoint"
"ReducedSubscheme"
"ReducedTatePairing"
"ReducedWamelenModel"
"Reduction"
"ReductionOrbit"
"ReductionStep"
"ReductionType"
"Reductions"
"Reductions_Factor"
"ReductiveLieAlgebraOld"
"ReductiveMatrixLieAlgebraOld"
"ReductiveRank"
"ReductiveType"
"Reductum"
"Ree"
"ReeBNpair"
"ReeConjugacy"
"ReeConjugacyClasses"
"ReeConstructiveMembership"
"ReeCrossCharacteristicReduction"
"ReeDiagonalisation"
"ReeElementToWord"
"ReeFindOrbitPoint"
"ReeFixedPoints"
"ReeGeneralRecogniser"
"ReeGroup"
"ReeInvolutionCentraliser"
"ReeIrreducibleRepresentation"
"ReeMaximalSubgroups"
"ReeMaximalSubgroupsConjugacy"
"ReePermutationRepresentation"
"ReePointStabiliser"
"ReeRecognition"
"ReeReduction"
"ReeRedundantSLPGenerators"
"ReeResetRandomProcess"
"ReeSLPCoercion"
"ReeStabiliser"
"ReeStandardConstructiveMembership"
"ReeStandardCopy"
"ReeStandardGenerators"
"ReeStandardMaximalSubgroups"
"ReeStandardMembership"
"ReeStandardRecogniser"
"ReeSylow"
"ReeSylowConjugacy"
"ReeSymmetricSquareDecompose"
"ReeTensorDecompose"
"ReedMullerCode"
"ReedMullerCodeQRMZ4"
"ReedMullerCodeRMZ4"
"ReedMullerCodeZ4"
"ReedMullerCodesLRMZ4"
"ReedMullerCodesRMZ4"
"ReedSolomonCode"
"ReesIdeal"
"RefineSection"
"Reflection"
"ReflectionFactors"
"ReflectionGroup"
"ReflectionMatrices"
"ReflectionMatrix"
"ReflectionPermutation"
"ReflectionPermutations"
"ReflectionSubgroup"
"ReflectionTable"
"ReflectionWord"
"ReflectionWords"
"Reflections"
"Regexp"
"RegularLDPCEnsemble"
"RegularModel"
"RegularRepresentation"
"RegularSequence"
"RegularSpliceDiagram"
"RegularSubgroups"
"Regularity"
"Regulator"
"RegulatorLowerBound"
"RelationFromUnit"
"RelationIdeal"
"RelationMatrix"
"RelationModule"
"Relations"
"RelativeBasis"
"RelativeField"
"RelativeInvariant"
"RelativePrecision"
"RelativePrecisionOfDerivation"
"RelativeProj"
"RelativeRank"
"RelativeRootDatum"
"RelativeRootElement"
"RelativeRootSpace"
"RelativeRoots"
"RelativeSelmerElement"
"RelevantCosets"
"Remove"
"RemoveBasisElt"
"RemoveColumn"
"RemoveConstraint"
"RemoveCrossTerms"
"RemoveEdge"
"RemoveEdges"
"RemoveFactor"
"RemoveFiles"
"RemoveHypersurface"
"RemoveIrreducibles"
"RemovePowersInPlace"
"RemoveRow"
"RemoveRowColumn"
"RemoveRowContents"
"RemoveVertex"
"RemoveVertices"
"RemoveWeight"
"RemoveZeroRows"
"Rep"
"RepChevalleyBasis"
"RepetitionCode"
"ReplacePrimes"
"ReplaceRelation"
"ReplicationNumber"
"Representation"
"RepresentationDegree"
"RepresentationDimension"
"RepresentationMatrix"
"RepresentationMatrixOfMatrix"
"RepresentationNumber"
"RepresentationSum"
"RepresentationType"
"Representations"
"Representative"
"RepresentativeCocycles"
"RepresentativePoint"
"RepresentativePoints"
"Representatives"
"RepresentsFreeModule"
"RepsDBGet"
"RepsSmallGet"
"Res_H2_G_QmodZ"
"RescaledDual"
"ResetMaximumMemoryUsage"
"ResetMinimumWeightBounds"
"Residual"
"Residue"
"ResidueClassDegree"
"ResidueClassField"
"ResidueClassRing"
"ResidueCode"
"ResidueField"
"ResidueMatrixRing"
"ResidueSystem"
"Resolution"
"ResolutionData"
"ResolutionGraph"
"ResolutionGraphVertex"
"ResolutionSpine"
"ResolveAffineCurve"
"ResolveAffineMonicSurface"
"ResolveFanMap"
"ResolveLinearSystem"
"ResolveProjectiveCurve"
"ResolveProjectiveSurface"
"ResolvedDualFan"
"Restrict"
"RestrictDegree"
"RestrictEndomorphism"
"RestrictField"
"RestrictPartitionLength"
"RestrictParts"
"RestrictResolution"
"RestrictedPartitions"
"RestrictedSubalgebra"
"Restriction"
"RestrictionChainMap"
"RestrictionData"
"RestrictionMap"
"RestrictionMatrix"
"RestrictionOfGenerators"
"RestrictionOfScalars"
"RestrictionOfScalarsToQ"
"RestrictionToImage"
"RestrictionToPatch"
"RestrictionToSubtorus"
"Resultant"
"ResumeEnumeration"
"Retrieve"
"Reverse"
"ReverseColumns"
"ReverseRows"
"Reversion"
"RevertClass"
"Rewind"
"Rewrite"
"ReynoldsOperator"
"Rho"
"RichelotIsogenousSurface"
"RichelotIsogenousSurfaces"
"RiemannRochBasis"
"RiemannRochCoordinates"
"RiemannRochDimension"
"RiemannRochPolytope"
"RiemannRochSpace"
"RiemannZeta"
"RightAction"
"RightAdjointMatrix"
"RightAnnihilator"
"RightCancellation"
"RightCosetSpace"
"RightDescentSet"
"RightExactExtension"
"RightGCD"
"RightGcd"
"RightGreatestCommonDivisor"
"RightHandFactors"
"RightIdeal"
"RightIdealClasses"
"RightInverse"
"RightInverseMorphism"
"RightIsomorphism"
"RightLCM"
"RightLcm"
"RightLeastCommonMultiple"
"RightMixedCanonicalForm"
"RightNormalForm"
"RightOrder"
"RightRegularModule"
"RightRepresentationMatrix"
"RightRing"
"RightString"
"RightStringLength"
"RightTransversal"
"RightZeroExtension"
"Ring"
"RingClassField"
"RingClassGroup"
"RingGeneratedBy"
"RingMap"
"RingOfFractions"
"RingOfIntegers"
"RombergQuadrature"
"Root"
"RootAction"
"RootAutomorphism"
"RootClosure"
"RootDatum"
"RootDecomposition"
"RootGSet"
"RootHeight"
"RootImages"
"RootLattice"
"RootNorm"
"RootNorms"
"RootNumber"
"RootOfUnity"
"RootPermutation"
"RootPosition"
"RootSequence"
"RootSide"
"RootSpace"
"RootSystem"
"RootSystemMatrix"
"RootVertex"
"Roots"
"RootsAndCoroots"
"RootsInSplittingField"
"RootsNonExact"
"RosenhainInvariants"
"Rotate"
"RotateRows"
"RotateWord"
"Rotation"
"Round"
"RoundDownDivisor"
"RoundReal"
"RoundUpDivisor"
"Row"
"RowInsert"
"RowLength"
"RowNullSpace"
"RowReductionHomomorphism"
"RowSequence"
"RowSkewLength"
"RowSpace"
"RowSubmatrix"
"RowSubmatrixRange"
"RowWeight"
"RowWeights"
"RowWord"
"Rows"
"Rowspace"
"RowvColSplit"
"Rtest"
"RubinSilverbergPolynomials"
"RuledSurface"
"S1"
"S2"
"SAT"
"SClassGroup"
"SClassGroupAbelianInvariants"
"SClassGroupExactSequence"
"SClassNumber"
"SEA"
"SFA"
"SFAElementary"
"SFAHomogeneous"
"SFAMonomial"
"SFAPower"
"SFASchur"
"SHA1"
"SIntegralDesbovesPoints"
"SIntegralLjunggrenPoints"
"SIntegralPoints"
"SIntegralQuarticPoints"
"SL"
"SL2Characteristic"
"SL2ElementToWord"
"SL2Presentation"
"SL2Triple"
"SL3ElementToWord"
"SL4Covariants"
"SL4Invariants"
"SLPGroup"
"SLPolynomialRing"
"SO"
"SOMinus"
"SOPlus"
"SPolynomial"
"SPrimesUpTo"
"SPrincipalDivisorMap"
"SPrintCategory"
"SQUFOF"
"SQ_check"
"SQextSetup"
"SQsplitSetup"
"SRegulator"
"SU"
"SU3ElementToWord"
"SUnitAction"
"SUnitCohomologyProcess"
"SUnitDiscLog"
"SUnitGroup"
"SUnitSubGroup"
"SVMForLattAuto"
"SVMForLattIso"
"SVPermutation"
"SVWord"
"SafeInverseUniformiser"
"SafeUniformiser"
"SafeUniformizer"
"SatisfiesSL2Presentation"
"SatisfiesSzPresentation"
"Saturate"
"SaturateSheaf"
"Saturation"
"ScalarField"
"ScalarLattice"
"ScalarMatrix"
"ScalarProduct"
"ScalarSparseMatrix"
"ScaleGenerators"
"ScaleMatrix"
"ScaledIgusaInvariants"
"ScaledLattice"
"ScalingFactor"
"Scheme"
"SchemeGraphMap"
"SchemeGraphMapToSchemeMap"
"SchemeMap"
"SchemeThrough"
"SchreierGenerators"
"SchreierGraph"
"SchreierSystem"
"SchreierVector"
"SchreierVectors"
"Schur"
"SchurIndex"
"SchurIndexGroup"
"SchurIndices"
"SchurNorm"
"SchurToElementaryMatrix"
"SchurToHomogeneousMatrix"
"SchurToMonomialMatrix"
"SchurToPowerSumMatrix"
"Search"
"SearchEqual"
"SearchForDecomposition"
"SearchForIsomorphism"
"SearchPGroups"
"Sec"
"SecantVariety"
"Sech"
"SecondaryInvariants"
"SecondaryInvariantsNonModular"
"SectionCentraliser"
"SectionCentralizer"
"Sections"
"Seek"
"Self"
"SelfComplementaryGraphDatabase"
"SelfIntersection"
"Selfintersection"
"Selfintersections"
"SelmerGroup"
"SemiInvariantsOfDegree"
"SemiLinearGroup"
"SemiOrthogonalBasis"
"SemiOrthogonalBasis2"
"SemiSimpleCohomologyProcess"
"SemiSimpleType"
"Semidir"
"SemidirectProduct"
"SemisimpleEFAModuleMaps"
"SemisimpleEFAModules"
"SemisimpleEFASeries"
"SemisimpleGeneratorData"
"SemisimpleLieAlgebraOld"
"SemisimpleMatrixLieAlgebraOld"
"SemisimpleRank"
"SemisimpleSubLie"
"SemisimpleSubLieDatabase"
"SemisimpleType"
"SeparatingElement"
"SeparationVertices"
"Seq"
"SeqFact"
"Seqelt"
"Seqint"
"Seqlist"
"Seqset"
"SequenceOfRadicalGenerators"
"SequenceToCompositionFactors"
"SequenceToConjugacyClasses"
"SequenceToElement"
"SequenceToFactorization"
"SequenceToInteger"
"SequenceToList"
"SequenceToMultiset"
"SequenceToProcess"
"SequenceToSet"
"SequenceToSubgroups"
"SeriesFactors"
"SeriesProcess"
"SerreBound"
"Set"
"SetAlgorithm"
"SetAllInvariantsOfDegree"
"SetArrows"
"SetAssertions"
"SetAutoColumns"
"SetAutoCompact"
"SetAxisMultiplicities"
"SetBaseGerm"
"SetBeep"
"SetBufferSize"
"SetCanonicalClass"
"SetClassGroupBoundFactorBasis"
"SetClassGroupBoundGenerators"
"SetClassGroupBoundMaps"
"SetClassGroupBounds"
"SetColumns"
"SetConicSubfieldMethodDegreeBound"
"SetDebugOnError"
"SetDefaultRealField"
"SetDefaultRealFieldPrecision"
"SetDefining"
"SetDisplayLevel"
"SetEchoInput"
"SetElementPrintFormat"
"SetEntry"
"SetEvaluationComparison"
"SetForceCFP"
"SetFreezeAll"
"SetGaloisMultiplicities"
"SetGlobalTCParameters"
"SetHeckeBound"
"SetHelpExternalBrowser"
"SetHelpExternalSystem"
"SetHelpUseExternalBrowser"
"SetHelpUseExternalSystem"
"SetHistorySize"
"SetIgnoreEof"
"SetIgnorePrompt"
"SetIgnoreSpaces"
"SetIloadAllowEsc"
"SetIndent"
"SetIntegerSolutionVariables"
"SetKantLevel"
"SetKantPrecision"
"SetKantPrinting"
"SetKaratsubaThreshold"
"SetLMGSchreierBound"
"SetLibraries"
"SetLibraryRoot"
"SetLineEditor"
"SetLogFile"
"SetLowerBound"
"SetMS"
"SetMark"
"SetMaximiseFunction"
"SetMemoryExtensionSize"
"SetMemoryLimit"
"SetMultiplicities"
"SetNeighbouringGerms"
"SetNthreads"
"SetObjectiveFunction"
"SetOptions"
"SetOrderMaximal"
"SetOrderTorsionUnit"
"SetOrderUnitsAreFundamental"
"SetOutputFile"
"SetPath"
"SetPowerPrinting"
"SetPrePatchMaps"
"SetPrecision"
"SetPresentation"
"SetPreviousSize"
"SetPrimalityProof"
"SetPrimaryInvariants"
"SetPrimitiveElement"
"SetPrintKetsInteger"
"SetPrintLevel"
"SetProcessParameters"
"SetProfile"
"SetProjectivePatchMaps"
"SetPrompt"
"SetQuaternionOrder"
"SetQuitOnError"
"SetRationalBasis"
"SetRows"
"SetSeed"
"SetSelfintersections"
"SetShellCompletion"
"SetShowPromptAlways"
"SetSparseGCD"
"SetTargetRing"
"SetToIndexedSet"
"SetToMultiset"
"SetToSequence"
"SetTraceback"
"SetTransGroupIDMany"
"SetTransverseIntersections"
"SetUpperBound"
"SetUserProcessData"
"SetVerbose"
"SetVerboseMS"
"SetViMode"
"SetsOfSingularPlaces"
"Setseq"
"Seysen"
"SeysenGram"
"Shape"
"Sheaf"
"SheafHomomorphism"
"SheafHoms"
"SheafOfDifferentials"
"ShephardTodd"
"ShephardToddNumber"
"ShephardToddOld"
"Shift"
"ShiftLeft"
"ShiftRight"
"ShiftToDegreeZero"
"ShiftValuation"
"ShimuraConjugates"
"ShimuraReduceUnit"
"ShortBasis"
"ShortCosets"
"ShortLift"
"ShortSchreierVectorCoding"
"ShortSubset"
"ShortVectors"
"ShortVectorsMatrix"
"ShortVectorsProcess"
"ShortenCode"
"ShortenStabilizerCode"
"ShortestPath"
"ShortestPaths"
"ShortestVectors"
"ShortestVectorsMatrix"
"ShowDL"
"ShowIdentifiers"
"ShowMemoryUsage"
"ShowOptions"
"ShowPrevious"
"ShowValues"
"ShrikhandeGraph"
"ShrinkingGenerator"
"SiegelTransformation"
"Sieve"
"SieveFactorBaseBound"
"SigTable"
"Sign"
"SignDecomposition"
"Signature"
"Signatures relevant to Any:"
"SiksekBound"
"SilvermanBound"
"SimNEQ"
"SimilarityGroup"
"SimpleCanonicalDissidentPoints"
"SimpleCohomologyDimensions"
"SimpleCohomologyProcess"
"SimpleCoreflectionMatrices"
"SimpleCoroots"
"SimpleEpimorphisms"
"SimpleExtension"
"SimpleGraphDatabase"
"SimpleGroupName"
"SimpleGroupOfLieType"
"SimpleGroupOrder"
"SimpleGroupsWithOrder"
"SimpleGroupsWithOrderDividing"
"SimpleHomologyDimensions"
"SimpleLieAlgebraOld"
"SimpleMatrixLieAlgebraOld"
"SimpleModule"
"SimpleOrders"
"SimpleParameters"
"SimpleQuotientAlgebras"
"SimpleQuotientProcess"
"SimpleQuotients"
"SimpleReflectionMatrices"
"SimpleReflectionPermutations"
"SimpleReflections"
"SimpleRelativeRoots"
"SimpleRoots"
"SimpleStarAlgebra"
"SimpleSubgroups"
"Simplex"
"SimplexAlphaCodeZ4"
"SimplexBetaCodeZ4"
"SimplexCode"
"SimplicialComplex"
"SimplicialProjectivePlane"
"SimplicialSubcone"
"SimplicialSubdivision"
"SimplifiedModel"
"Simplify"
"SimplifyLength"
"SimplifyOrder"
"SimplifyPresentation"
"SimplifyRep"
"SimplyConnectedVersion"
"SimpsonQuadrature"
"SimsSchreier"
"SimsSchreierCoding"
"Sin"
"Sincos"
"SingerDifferenceSet"
"SingleSolutionTest"
"SingletonAsymptoticBound"
"SingletonBound"
"SingularCones"
"SingularFibres"
"SingularPoints"
"SingularPointsOverSplittingField"
"SingularRadical"
"SingularRank"
"SingularRankPerCodimension"
"SingularSubscheme"
"Sinh"
"SixDescent"
"Size"
"SizeDFA"
"Skeleton"
"SkewHadamardDatabase"
"SkewShape"
"SkewWeight"
"Slope"
"SlopeValuation"
"Slopes"
"SmallBasis"
"SmallGraphDatabase"
"SmallGroup"
"SmallGroup2Database"
"SmallGroupDatabase"
"SmallGroupDatabaseLimit"
"SmallGroupDecoding"
"SmallGroupEncoding"
"SmallGroupIsInsoluble"
"SmallGroupIsInsolvable"
"SmallGroupIsSoluble"
"SmallGroupIsSolvable"
"SmallGroupProcess"
"SmallGroupSF"
"SmallGroupSFId"
"SmallGroups"
"SmallModularCurve"
"SmallPeriodMatrix"
"SmallRoots"
"SmallerField"
"SmallerFieldBasis"
"SmallerFieldImage"
"SmithForm"
"Sn"
"Socket"
"SocketInformation"
"Socle"
"SocleAction"
"SocleFactor"
"SocleFactors"
"SocleImage"
"SocleKernel"
"SocleQuotient"
"SocleSeries"
"SolAutCompatible"
"SolAutDerivations"
"SolAutInducible"
"SolAutModule"
"SolubleNormalQuotient"
"SolubleQuotient"
"SolubleQuotientProcess"
"SolubleRadical"
"SolubleResidual"
"SolubleSchreier"
"SolubleSchreierCoding"
"SolubleSubgroups"
"Solution"
"SolutionSpace"
"Solutions"
"SolvableAlgebra"
"SolvableLieAlgebra"
"SolvableQuotient"
"SolvableRadical"
"SolvableResidual"
"SolvableSchreier"
"SolvableSchreierCoding"
"SolvableSubgroups"
"Solve"
"SolveByRadicals"
"SolveEquations"
"SolveForInvariants"
"SolveInProductSpace"
"SolveZeroDimIdeal"
"Sort"
"SortByMP"
"SortDecomposition"
"SortRows"
"Sp"
"SpaceOfDifferentialsFirstKind"
"SpaceOfHolomorphicDifferentials"
"Span"
"SpanZ2CodeZ4"
"SpanningFan"
"SpanningForest"
"SpanningTree"
"SparseHeckeOperator"
"SparseIrreducibleRootDatum"
"SparseMatrix"
"SparseMatrixGAP"
"SparseMatrixStructure"
"SparseRootDatum"
"SparseStandardRootDatum"
"Spec"
"SpecialEvaluate"
"SpecialLieAlgebra"
"SpecialLinearGroup"
"SpecialOrthogonalGroup"
"SpecialOrthogonalGroupMinus"
"SpecialOrthogonalGroupPlus"
"SpecialPresentation"
"SpecialUnitaryGroup"
"SpecialWeights"
"Specialization"
"SpecifyInverseMorphisms"
"SpectralRadius"
"Spectrum"
"Sphere"
"SpherePackingBound"
"SphereVolume"
"SpheresPackingBound"
"Spin"
"SpinAction"
"SpinMinus"
"SpinOrbit"
"SpinPlus"
"SpinWithImages"
"SpinorCharacters"
"SpinorGenera"
"SpinorGenerators"
"SpinorGenus"
"SpinorNorm"
"SpinorRepresentatives"
"Splice"
"SpliceDiagram"
"SpliceDiagramVertex"
"Split"
"SplitAbelianSection"
"SplitAllByValues"
"SplitCell"
"SplitCellsByValues"
"SplitCollector"
"SplitElementaryAbelianSection"
"SplitExtension"
"SplitExtensionSpace"
"SplitMaximalToralSubalgebra"
"SplitRealPlace"
"SplitRootDatum"
"SplitSection"
"SplitToralSubalgebra"
"SplitViaConic"
"SplitViaMinimalField"
"Splitcomponents"
"SplittingCartanSubalgebra"
"SplittingField"
"SplittingsOfCell"
"Sprint"
"Sqrt"
"SqrtDiscriminantPolynomial"
"SquareFree"
"SquareFreeFactorization"
"SquareLatticeGraph"
"SquareRoot"
"Squarefree"
"SquarefreeFactorization"
"SquarefreePart"
"SquarefreePartialFractionDecomposition"
"SquarefreeRoots"
"SrAutomorphism"
"SrivastavaCode"
"Stabiliser"
"StabiliserCode"
"StabiliserGroup"
"StabiliserMatrix"
"StabiliserOfSpaces"
"Stabilizer"
"StabilizerCode"
"StabilizerGroup"
"StabilizerLadder"
"StabilizerMatrix"
"StandardAction"
"StandardActionGroup"
"StandardBasis"
"StandardCusp"
"StandardForm"
"StandardFormConjugationMatrices"
"StandardFormDFA"
"StandardFormField"
"StandardFormInfo"
"StandardGenerators"
"StandardGeneratorsForLargeRee"
"StandardGraph"
"StandardGroup"
"StandardLattice"
"StandardLengthening"
"StandardMaximalTorus"
"StandardMetacyclicPGroup"
"StandardParabolicSubgroup"
"StandardPresentation"
"StandardRepresentation"
"StandardRootDatum"
"StandardRootSystem"
"StandardSimplex"
"StandardTableaux"
"StandardTableauxOfWeight"
"Star"
"StarInvolution"
"StarOnGroupAlgebra"
"StartEnumeration"
"StartNewClass"
"Stauduhar"
"SteenrodOperation"
"SteinWatkinsDatabase"
"SteinitzClass"
"SteinitzForm"
"Step1"
"Step2"
"SternsAttack"
"StirlingFirst"
"StirlingSecond"
"StitchProcesses"
"StoRModule"
"StoreClear"
"StoreFactor"
"StoreGet"
"StoreIsDefined"
"StoreKeys"
"StoreRemove"
"StoreSet"
"Stratum"
"StringToBytes"
"StringToCode"
"StringToInteger"
"StringToIntegerSequence"
"StringToLower"
"StringToRational"
"StringToUpper"
"Strings"
"Strip"
"StripWhiteSpace"
"StrippedCoding"
"StrongApproximation"
"StrongGeneratorLevel"
"StrongGenerators"
"StrongGeneratorsAtLevel"
"StronglyConnectedComponents"
"StronglyHorizontalVertices"
"StronglyIrregularValues"
"StronglyRegularGraphsDatabase"
"StructureConstant"
"StructureConstants"
"StructureSheaf"
"Sub"
"SubOrder"
"SubWeights"
"SubalgebraModule"
"SubalgebrasInclusionGraph"
"SubcanonicalCurve"
"Subcode"
"SubcodeBetweenCode"
"SubcodeWordsOfWeight"
"Subcomplex"
"SubfieldCode"
"SubfieldLattice"
"SubfieldRepresentationCode"
"SubfieldRepresentationParityCode"
"SubfieldSubcode"
"SubfieldSubplane"
"Subfields"
"Subgraph"
"Subgroup"
"SubgroupChain"
"SubgroupClasses"
"SubgroupElements"
"SubgroupElementsCT"
"SubgroupLattice"
"SubgroupLatticeOld"
"SubgroupOfTorus"
"SubgroupScheme"
"SubgroupToMatrix"
"Subgroups"
"SubgroupsData"
"SubgroupsLift"
"SubgroupsMeet"
"Sublattice"
"SublatticeClasses"
"SublatticeLattice"
"Sublattices"
"Submatrix"
"SubmatrixRange"
"Submodule"
"SubmoduleAction"
"SubmoduleClasses"
"SubmoduleImage"
"SubmoduleLattice"
"SubmoduleLatticeAbort"
"Submodules"
"SubnormalSeries"
"Subring"
"Subsequences"
"Subsets"
"Substitute"
"SubstituteCyclicJoins"
"SubstituteString"
"Substring"
"SubsystemSubgroup"
"Subword"
"SuccessiveMinima"
"SuggestedPrecision"
"Sum"
"SumNorm"
"SumOf"
"SumOfBettiNumbersOfSimpleModules"
"SumOfDivisors"
"SumOfImages"
"SumOfMorphismImages"
"Summands"
"SuperGroup"
"SuperScheme"
"SuperSummitCanonicalLength"
"SuperSummitInfimum"
"SuperSummitProcess"
"SuperSummitRepresentative"
"SuperSummitSet"
"SuperSummitSupremum"
"Superlattice"
"SupersingularEllipticCurve"
"SupersingularInvariants"
"SupersingularModule"
"SupersingularPoints"
"SupersingularPolynomial"
"Supplement"
"Supplements"
"Support"
"SupportOverSplittingField"
"SupportingCone"
"SupportingHyperplane"
"SupportsExtension"
"Supremum"
"SurjectivePart"
"Suspension"
"SuzukiBNpair"
"SuzukiConjugacy"
"SuzukiConjugateRecogniser"
"SuzukiConstructiveMembership"
"SuzukiCyclicEigenvalues"
"SuzukiFindOvoidPoints"
"SuzukiGeneralRecogniser"
"SuzukiGroup"
"SuzukiIrreducibleRepresentation"
"SuzukiMaximalSubgroups"
"SuzukiMaximalSubgroupsConjugacy"
"SuzukiNonSplit6Dim"
"SuzukiOddCharacteristicReduction"
"SuzukiPermutationRepresentation"
"SuzukiPointStabiliser"
"SuzukiRecognition"
"SuzukiReduction"
"SuzukiResetRandomProcess"
"SuzukiSmallFieldReduction"
"SuzukiStabiliser"
"SuzukiStandardConstructiveMembership"
"SuzukiStandardGeneratorsNaturalRep"
"SuzukiStandardMaximalSubgroups"
"SuzukiStandardMembership"
"SuzukiStandardRecogniser"
"SuzukiSylow"
"SuzukiSylowConjugacy"
"SuzukiTensorDecompose"
"SwapColumns"
"SwapElements"
"SwapExtension"
"SwapRows"
"SwinnertonDyerPolynomial"
"Switch"
"SwitchNullMatrix"
"Sylow"
"SylowBasis"
"SylowSubgroup"
"SylowSystem"
"SylvesterMatrix"
"Sym"
"SymmetricBilinearForm"
"SymmetricBilinearFormCS"
"SymmetricBilinearFormMinus"
"SymmetricBilinearFormPlus"
"SymmetricBilinearFormType"
"SymmetricCentralizer"
"SymmetricCharacter"
"SymmetricCharacterDegrees"
"SymmetricCharacterTable"
"SymmetricCharacterValue"
"SymmetricCharacterValues"
"SymmetricComponents"
"SymmetricElementToStandardWord"
"SymmetricElementToWord"
"SymmetricForms"
"SymmetricFunctionAlgebra"
"SymmetricFunctionAlgebraElementary"
"SymmetricFunctionAlgebraHomogeneous"
"SymmetricFunctionAlgebraMonomial"
"SymmetricFunctionAlgebraPower"
"SymmetricFunctionAlgebraSchur"
"SymmetricGroup"
"SymmetricHermitianForms"
"SymmetricMatrix"
"SymmetricNormaliser"
"SymmetricNormalizer"
"SymmetricPower"
"SymmetricPower2"
"SymmetricPowerK"
"SymmetricQuaternionicForms"
"SymmetricRepresentation"
"SymmetricRepresentationOrthogonal"
"SymmetricRepresentationSeminormal"
"SymmetricSquare"
"SymmetricSquarePreimage"
"SymmetricWeightEnumerator"
"Symmetrization"
"SymplecticComponent"
"SymplecticComponents"
"SymplecticDirectSum"
"SymplecticDual"
"SymplecticForm"
"SymplecticFormCS"
"SymplecticGroup"
"SymplecticInnerProduct"
"SymplecticMatrixGroupDatabase"
"SymplecticSpace"
"SymplecticTensorProduct"
"SymplecticTransvection"
"Syndrome"
"SyndromeSpace"
"SysAssignNamesNum"
"System"
"SystemNormaliser"
"SystemNormalizer"
"SystemOfEigenvalues"
"SyzygyMatrix"
"SyzygyModule"
"Sz"
"SzBlackBoxGenerators"
"SzBlackBoxMembership"
"SzClassMap"
"SzClassRepresentative"
"SzConjugacyClasses"
"SzElementToWord"
"SzIsConjugate"
"SzPresentation"
"SzRationalConjugacyClasses"
"SzRedundantSLPGenerators"
"SzSLPCoercion"
"TMPolyCharOdd"
"TMPolyCharOddCheck"
"Tableau"
"TableauIntegerMonoid"
"TableauMonoid"
"Tableaux"
"TableauxOfShape"
"TableauxOnShapeWithContent"
"TableauxWithContent"
"TaftDecomposition"
"TailVector"
"Tails"
"TamagawaNumber"
"TamagawaNumbers"
"TameOrder"
"Tan"
"Tangent"
"TangentAngle"
"TangentCone"
"TangentLine"
"TangentSheaf"
"TangentSpace"
"TangentVariety"
"Tanh"
"TannerGraph"
"TargetRestriction"
"TargetRing"
"TateLichtenbaumPairing"
"TatePairing"
"TeichmuellerLift"
"TeichmuellerSystem"
"Tell"
"Tempname"
"Tensor"
"TensorBasis"
"TensorCond"
"TensorCondensation"
"TensorFactors"
"TensorInducedAction"
"TensorInducedBasis"
"TensorInducedPermutations"
"TensorPower"
"TensorProduct"
"TensorProductAction"
"TensorWreathProduct"
"Term"
"TerminalIndex"
"TerminalPolarisation"
"TerminalVertex"
"Terminalisation"
"Terms"
"TestEquations"
"TestHeckeRep"
"TestHomomorphism"
"TestLists"
"TestPicnDesc"
"TestReeConjugacy"
"TestWG"
"Theta"
"ThetaOperator"
"ThetaSeries"
"ThetaSeriesIntegral"
"ThetaSeriesIntegralLimited"
"ThetaSeriesLimited"
"ThetaSeriesModularForm"
"ThetaSeriesModularFormSpace"
"ThreeDescent"
"ThreeDescentByIsogeny"
"ThreeDescentCubic"
"ThreeIsogenyDescent"
"ThreeIsogenyDescentCubic"
"ThreeIsogenySelmerGroups"
"ThreeSelmerElement"
"ThreeSelmerGroup"
"ThreeTorsionMatrices"
"ThreeTorsionOrbits"
"ThreeTorsionPoints"
"ThreeTorsionType"
"Thue"
"TietzeProcess"
"TitsGroup"
"TjurinaNumber"
"To2DUpperHalfSpaceFundamentalDomian"
"ToAnalyticJacobian"
"ToBianchiCone"
"ToLiE"
"ToddCoxeter"
"ToddCoxeterSchreier"
"ToddCoxeterSchreierCoding"
"Top"
"TopQuotients"
"Tor"
"ToralRootDatum"
"ToralRootSystem"
"ToricAffinePatch"
"ToricCode"
"ToricFunctionField"
"ToricIdentityMap"
"ToricIsAffine"
"ToricIsProjective"
"ToricLattice"
"ToricLiftRationalFunction"
"ToricRestrictRationalFunction"
"ToricVariety"
"ToricVarietyMap"
"ToroidalAutomorphism"
"TorsionBasis"
"TorsionBound"
"TorsionCoefficients"
"TorsionFreeRank"
"TorsionFreeSubgroup"
"TorsionInvariants"
"TorsionLowerBound"
"TorsionMultiple"
"TorsionSubgroup"
"TorsionSubgroupScheme"
"TorsionSubmodule"
"TorsionUnitGroup"
"Torus"
"TorusTerm"
"TotalDegree"
"TotalDegreeAbstract"
"TotalLinking"
"TotalNumberOfCosets"
"TotallyRamifiedExtension"
"TotallySingularComplement"
"TppMatrix"
"Trace"
"TraceAbs"
"TraceInnerProduct"
"TraceMatrix"
"TraceOfFrobenius"
"TraceOfProduct"
"TraceSortDecomposition"
"TraceZeroSubspace"
"Traceback"
"TracesOfFrobenius"
"TrailingCoefficient"
"TrailingTerm"
"Trans2"
"Trans32Identify"
"TransformBilinearForm"
"TransformForm"
"TransformRelations"
"Transformation"
"TransformationMatrix"
"TransitiveDirectProduct"
"TransitiveGroup"
"TransitiveGroupDatabase"
"TransitiveGroupDatabaseLimit"
"TransitiveGroupDescription"
"TransitiveGroupFundamentalInvariants"
"TransitiveGroupIdentification"
"TransitiveGroupProcess"
"TransitiveGroups"
"TransitiveQuotient"
"Transitivity"
"Translate"
"Translation"
"TranslationMap"
"TranslationOfSimplex"
"TranslationToInfinity"
"Transport"
"Transpose"
"TransposePartition"
"Transvection"
"TransvectionFactors"
"Transversal"
"TransversalElt"
"TransversalNonParabolic"
"TransversalParabolic"
"TransversalProcess"
"TransversalProcessNext"
"TransversalProcessRemaining"
"TransversalWords"
"TransverseIndex"
"TransverseIntersections"
"TransverseType"
"TrapezoidalQuadrature"
"TrialDivision"
"TriangularDecomposition"
"TriangularGraph"
"Triangulation"
"TriangulationOfBoundary"
"Trim"
"Trinomials"
"TrivialLieRepresentationDecomposition"
"TrivialModule"
"TrivialOneCocycle"
"TrivialRepresentation"
"TrivialRootDatum"
"TrivialRootSystem"
"TrivialSubgroup"
"Trivialize"
"TrivializeNew"
"Truncate"
"TruncateCoefficients"
"TruncatedHyperball"
"Truncation"
"Tuple"
"TupleToList"
"Tuplist"
"TwelveDescent"
"Twist"
"TwistedBasis"
"TwistedCartanName"
"TwistedGroup"
"TwistedGroupOfLieType"
"TwistedLieAlgebra"
"TwistedPolynomials"
"TwistedQRCode"
"TwistedRootDatum"
"TwistedTori"
"TwistedToriOrders"
"TwistedTorus"
"TwistedTorusOrder"
"TwistedWindingElement"
"TwistedWindingSubmodule"
"TwistingDegree"
"Twists"
"TwoCocycle"
"TwoCover"
"TwoCoverDescent"
"TwoCoverPullback"
"TwoDescendantsOverTwoIsogenyDescendant"
"TwoDescent"
"TwoElement"
"TwoElementNormal"
"TwoGenerators"
"TwoGenus"
"TwoIsogeny"
"TwoIsogenyDescent"
"TwoIsogenySelmerGroups"
"TwoSelmerElement"
"TwoSelmerGroup"
"TwoSelmerGroupData"
"TwoSelmerGroupNew"
"TwoSelmerGroupOld"
"TwoSelmerGroupTest"
"TwoSequencePolynomial"
"TwoSidedIdealClassGroup"
"TwoSidedIdealClasses"
"TwoTorsionMatrices"
"TwoTorsionOrbits"
"TwoTorsionPolynomial"
"TwoTorsionSubgroup"
"TwoTransitiveGroupIdentification"
"Type"
"TypeOfContraction"
"TypeOfSequence"
"Types"
"TypesOfContractions"
"UltraSummitProcess"
"UltraSummitRepresentative"
"UltraSummitSet"
"UncapacitatedGraph"
"Uncondense"
"Undefine"
"UnderlyingDigraph"
"UnderlyingElement"
"UnderlyingField"
"UnderlyingGraph"
"UnderlyingMultiDigraph"
"UnderlyingMultiGraph"
"UnderlyingNetwork"
"UnderlyingRing"
"UnderlyingSet"
"UnderlyingToriMap"
"UnderlyingVertex"
"Ungetc"
"UniformizingElement"
"UniformizingParameter"
"UnimodularExtension"
"Union"
"UnionOfLines"
"UnipotentBasis"
"UnipotentMatrixGroup"
"UnipotentStabiliser"
"UnitDisc"
"UnitEquation"
"UnitGenerators"
"UnitGroup"
"UnitGroupAsSubgroup"
"UnitGroupGenerators"
"UnitRank"
"UnitTrivialSubgroup"
"UnitVector"
"UnitalFeet"
"UnitaryDirectSum"
"UnitaryForm"
"UnitaryFormCS"
"UnitaryReflection"
"UnitarySpace"
"UnitaryTensorProduct"
"UnitaryTransvection"
"Units"
"Unity"
"UnivariateEliminationIdealGenerator"
"UnivariateEliminationIdealGenerators"
"UnivariatePolynomial"
"UniversalEnvelopingAlgebra"
"UniversalMap"
"UniversalPropertyOfCokernel"
"Universe"
"UniverseCode"
"UnlabelledCayleyGraph"
"UnlabelledGraph"
"UnlabelledSchreierGraph"
"Unnormalise"
"Unnormalize"
"UnprojectionCentres"
"UnprojectionCodimensions"
"UnprojectionIndices"
"UnprojectionSubtypes"
"UnprojectionTypes"
"Unprojections"
"UnramifiedExtension"
"UnramifiedQuotientRing"
"UnramifiedSquareSymbol"
"UnsetBounds"
"UnsetGlobalTCParameters"
"UnsetLogFile"
"UnsetOutputFile"
"UntwistedOvergroup"
"UntwistedRootDatum"
"UnweightedGraph"
"UpdateGraphLabels"
"UpdateHadamardDatabase"
"UpdateLevels"
"UpperCentralSeries"
"UpperHalfPlane"
"UpperHalfPlaneUnionCusps"
"UpperHalfPlaneWithCusps"
"UpperTriangularMatrix"
"UseFFT"
"UseFlag"
"UseIFFT"
"UseImult"
"UseSmod"
"UseTwistedHopfStructure"
"UserBasePoints"
"UserGenerators"
"UserMapCreateRaw"
"UserMapImageMapRootDtm"
"UserMapPreimageMapRootDtm"
"UserProcess"
"UserRepresentation"
"UsesBrandt"
"UsesMestre"
"VNullspace"
"Valence"
"Valency"
"ValidateCryptographicCurve"
"Valuation"
"ValuationRing"
"ValuationsOfRoots"
"ValueList"
"ValueMap"
"ValueRing"
"ValuesOnUnitGenerators"
"VanLintBound"
"VariableExtension"
"VariableWeights"
"Variant"
"Variety"
"VarietySequence"
"VarietySizeOverAlgebraicClosure"
"Vector"
"VectorSpace"
"VectorSpaceOverQ"
"VectorSpaceWithBasis"
"Verify"
"VerifyMinimumDistanceLowerBound"
"VerifyMinimumDistanceUpperBound"
"VerifyMinimumLeeDistanceLowerBound"
"VerifyMinimumLeeDistanceUpperBound"
"VerifyMinimumLeeWeightLowerBound"
"VerifyMinimumLeeWeightUpperBound"
"VerifyMinimumWeightLowerBound"
"VerifyMinimumWeightUpperBound"
"VerifyRelation"
"VerschiebungImage"
"VerschiebungMap"
"Vertex"
"VertexConnectivity"
"VertexFacetHeightMatrix"
"VertexFacetIncidenceMatrix"
"VertexLabel"
"VertexLabels"
"VertexPath"
"VertexSeparator"
"VertexSet"
"VerticalJoin"
"Vertices"
"ViewWithJavaview"
"ViewWithJmol"
"VirtualDecomposition"
"VirtualRayIndices"
"VirtualRays"
"Volume"
"VolumeOfBoundary"
"Voronoi"
"VoronoiCell"
"VoronoiData"
"VoronoiGraph"
"WG2GroupRep"
"WG2HeckeRep"
"WGelement2WGtable"
"WGidealgens2WGtable"
"WGtable2WG"
"WPS"
"WZWFusion"
"WaitForConnection"
"WaitForIO"
"WallDecomposition"
"WallForm"
"WallIsometry"
"WeakApproximation"
"WeakDegree"
"WeakOrder"
"WeakPopovForm"
"WeakValuation"
"WeberClassPolynomial"
"WeberF"
"WeberF1"
"WeberF2"
"WeberPolynomial"
"WeberToHilbertClassPolynomial"
"WedderburnDecomposition"
"WeierstrassModel"
"WeierstrassPlaces"
"WeierstrassPoints"
"WeierstrassSeries"
"Weight"
"WeightClass"
"WeightDistribution"
"WeightEnumerator"
"WeightLattice"
"WeightOneHalfData"
"WeightOrbit"
"WeightSequence"
"WeightSpace"
"WeightSpaces"
"WeightToPartition"
"WeightVectors"
"WeightedDegree"
"WeightedDynkinDiagram"
"WeightedProjectiveSpace"
"Weights"
"WeightsAndMultiplicities"
"WeightsAndVectors"
"WeightsOfFlip"
"Weil"
"WeilDescent"
"WeilDescentComposita"
"WeilDescentCompositaMap"
"WeilDescentDegree"
"WeilDescentDeltas"
"WeilDescentFrobeniusExtension"
"WeilDescentFrobeniusExtensions"
"WeilDescentGenus"
"WeilDescentPrimitiveReducedCompositum"
"WeilDescentRationalParametrization"
"WeilDescentReducedCompositum"
"WeilDescentReducedDelta_1"
"WeilHeight"
"WeilPairing"
"WeilPolynomialOverFieldExtension"
"WeilPolynomialToRankBound"
"WeilRepresentation"
"WeilRestriction"
"WeilToClassGroupsMap"
"WeilToClassLatticesMap"
"WeylGroup"
"WeylMatrix"
"WeylWord"
"WeylWordFromAction"
"WhiteheadReduction"
"Width"
"Widths"
"WindingElement"
"WindingElementProjection"
"WindingLattice"
"WindingSubmodule"
"WittDecomposition"
"WittDesign"
"WittIndex"
"WittInvariant"
"WittInvariants"
"WittLieAlgebra"
"WittRing"
"Word"
"WordAcceptor"
"WordAcceptorSize"
"WordAcceptorTable"
"WordCount"
"WordDifferenceAutomaton"
"WordDifferenceSize"
"WordDifferenceTable"
"WordDifferences"
"WordGroup"
"WordInStrongGenerators"
"WordMap"
"WordOnCorootSpace"
"WordOnRoot"
"WordOnRootSpace"
"WordProblem"
"WordProblemData"
"WordStrip"
"WordToDualMatrix"
"WordToMatrix"
"WordToPerm"
"WordToSequence"
"WordToTableau"
"WordWrap"
"Words"
"WordsGramMatrix"
"WordsMatrix"
"WordsOfBoundedLeeWeight"
"WordsOfBoundedWeight"
"WordsOfLeeWeight"
"WordsTransposedMatrix"
"WreathProduct"
"Write"
"WriteBinary"
"WriteBytes"
"WriteFanoData"
"WriteGModuleOver"
"WriteGModuleOverExtensionOf"
"WriteHadamardDatabase"
"WriteIntegralMatrix"
"WriteK3Data"
"WriteNewtonPolytopeToPSFile"
"WriteOver"
"WriteOverElement"
"WriteOverLargerField"
"WriteOverMatrix"
"WriteOverSmallerField"
"WritePolytopeToJVX"
"WritePolytopeToJmolFile"
"WritePolytopeToPALP"
"WritePolytopeToPSFile"
"WritePolytopeToSvgFile"
"WritePolytopesToJVX"
"WriteRawHadamardData"
"WriteRepresentationOver"
"WriteWG"
"WronskianDeterminant"
"WronskianMatrix"
"WronskianOrders"
"X0NQuotient"
"XGCD"
"XXX_VarietySequence"
"Xgcd"
"Xor"
"YYY_SupersingularInvariants"
"YoungSubgroup"
"YoungSubgroupLadder"
"Z4CodeFromBinaryChain"
"Z4Dimension"
"Z4Type"
"ZBasis"
"ZClasses"
"ZGenerators"
"ZSpace"
"ZariskiDecomposition"
"ZechLog"
"Zero"
"ZeroChainMap"
"ZeroCocycle"
"ZeroCode"
"ZeroComplex"
"ZeroCone"
"ZeroCoordinates"
"ZeroDivisor"
"ZeroExtension"
"ZeroFan"
"ZeroGammaOrbitsOnRoots"
"ZeroMap"
"ZeroMatrix"
"ZeroModularAbelianVariety"
"ZeroModule"
"ZeroRootLattice"
"ZeroRootSpace"
"ZeroSequence"
"ZeroSubgroup"
"ZeroSubspace"
"ZeroSubvariety"
"ZeroSumCode"
"Zeroes"
"Zeros"
"ZetaFunction"
"ZetaFunctionsByDeformation"
"ZimmertBound"
"ZinovievCode"
"aInvariants"
"all_ram_extensions_of_deg_p_m_j"
"bInvariants"
"c9LatticeRecord"
"cInvariants"
"calculateAlbertAlgebra"
"calculateBigReeTwistingMapCBMs"
"fPolynomial"
"fValue"
"fValueProof"
"fVector"
"hPolynomial"
"hVector"
"has_element_of_norm_sub"
"isValidSuzukiOrder"
"jFunction"
"jInvariant"
"jInvariantMap"
"jNInvariant"
"jParameter"
"jPoints"
"kArc"
"mainInvolution"
"mfdevel"
"myFindLieAlgebra"
"nCovering"
"nIsogeny"
"nTorsionSubgroup"
"pAdicDiagonalization"
"pAdicEllipticLogarithm"
"pAdicEllipticLogarithmOfCombination"
"pAdicEmbeddings"
"pAdicField"
"pAdicHeight"
"pAdicHeightPairingMatrix"
"pAdicLSeries"
"pAdicQuotientRing"
"pAdicRegulator"
"pAdicRing"
"pCentralSeries"
"pClass"
"pClosure"
"pCore"
"pCover"
"pCoveringGroup"
"pElementaryAbelianNormalSubgroup"
"pExcess"
"pFundamentalUnits"
"pIntegralGModule"
"pIntegralModel"
"pIsogenyDescent"
"pMap"
"pMatrixRing"
"pMaximalOrder"
"pMaximalSubmodules"
"pMinimalWeierstrassModel"
"pMinimise"
"pMinus1"
"pMultiplicator"
"pMultiplicatorRank"
"pNewModularDegree"
"pNormalModel"
"pPlus1"
"pPowerTorsion"
"pPrimaryComponent"
"pPrimaryInvariants"
"pQuotient"
"pQuotientProcess"
"pRadical"
"pRank"
"pRanks"
"pSelmerGroup"
"pSignature"
"pSubalgebra"
"pSubgroup"
"p_hom"
"qCoverDescent"
"qCoverPartialDescent"
"qEigenform"
"qEigenformReductions"
"qExpansion"
"qExpansionBasis"
"qExpansionExpressions"
"qExpansionsOfGenerators"
"qIntegralBasis"
]
commentStart: "// "
| 140463 | ".source.Magma":
editor:
increaseIndentPattern: "^\\s*(function|procedure|if|for|while|elif|else|case|when|repeat|try|catch)[^;]*$|^\\s*\\b([A-Za-z_][A-Za-z0-9_]*)\\b\\s*:=\\s*\\b(function|procedure)\\b.*$"
decreaseIndentPattern: "^\\s*((end (for|if|procedure|function|case|while|try))|else|elif|until)\\b.*"
completions: [
"AFRNumber"
"AGCode"
"AGDecode"
"AGDualCode"
"AGL"
"AGM"
"AGammaL"
"AHom"
"AHomOverCentralizingField"
"AInfinityRecord"
"AModule"
"APNCompleteGeneration"
"APNGeneration"
"APNMatrix"
"APNRationalGeneration"
"AQInvariants"
"AQPrimes"
"ASL"
"ASigmaL"
"ATLASGroup"
"ATLASGroupNames"
"AbelianBasis"
"AbelianExtension"
"AbelianGroup"
"AbelianInvariants"
"AbelianLieAlgebra"
"AbelianNormalQuotient"
"AbelianNormalSubgroup"
"AbelianNormalSubgroupSSS"
"AbelianQuotient"
"AbelianQuotientInvariants"
"AbelianQuotientRewrite"
"AbelianSection"
"AbelianSubfield"
"AbelianSubgroups"
"AbelianpExtension"
"Abs"
"AbsDenominator"
"AbsEltseq"
"AbsIrrApplyConjugation"
"AbsIrrApplyGalois"
"AbsIrrFromMap"
"AbsIrrFromModul"
"AbsoluteAffineAlgebra"
"AbsoluteAlgebra"
"AbsoluteBasis"
"AbsoluteCartanMatrix"
"AbsoluteCharacteristicPolynomial"
"AbsoluteDecomposition"
"AbsoluteDegree"
"AbsoluteDiscriminant"
"AbsoluteField"
"AbsoluteFrobenius"
"AbsoluteFunctionField"
"AbsoluteGaloisGroup"
"AbsoluteInertiaDegree"
"AbsoluteInertiaIndex"
"AbsoluteInvariants"
"AbsoluteLogarithmicHeight"
"AbsoluteMinimalPolynomial"
"AbsoluteModuleOverMinimalField"
"AbsoluteModulesOverMinimalField"
"AbsoluteNorm"
"AbsoluteOrder"
"AbsolutePolynomial"
"AbsolutePrecision"
"AbsoluteQuotientRing"
"AbsoluteRamificationDegree"
"AbsoluteRamificationIndex"
"AbsoluteRank"
"AbsoluteRationalScroll"
"AbsoluteRepresentation"
"AbsoluteRepresentationMatrix"
"AbsoluteTotallyRamifiedExtension"
"AbsoluteTrace"
"AbsoluteValue"
"AbsoluteValues"
"AbsolutelyIrreducibleConstituents"
"AbsolutelyIrreducibleModule"
"AbsolutelyIrreducibleModules"
"AbsolutelyIrreducibleModulesBurnside"
"AbsolutelyIrreducibleModulesDelete"
"AbsolutelyIrreducibleModulesInit"
"AbsolutelyIrreducibleModulesSchur"
"AbsolutelyIrreducibleRepresentationProcessDelete"
"AbsolutelyIrreducibleRepresentationsApply"
"AbsolutelyIrreducibleRepresentationsDelete"
"AbsolutelyIrreducibleRepresentationsInit"
"AbsolutelyIrreducibleRepresentationsProcess"
"AbsolutelyIrreducibleRepresentationsProcessDegree"
"AbsolutelyIrreducibleRepresentationsProcessGroup"
"AbsolutelyIrreducibleRepresentationsSchur"
"Absolutize"
"Ac"
"ActingGroup"
"ActingWord"
"Action"
"ActionGenerator"
"ActionGenerators"
"ActionGroup"
"ActionImage"
"ActionKernel"
"ActionMatrix"
"ActionOnVector"
"AdamsOperator"
"AdaptedBasis"
"AdaptedBasisIndex"
"AdaptedBasisProcessAdd"
"AdaptedBasisProcessAddTest"
"AdaptedBasisProcessInit"
"Add"
"AddAttribute"
"AddAttributes"
"AddColumn"
"AddConstraints"
"AddCovers"
"AddCrossTerms"
"AddCubics"
"AddEdge"
"AddEdges"
"AddGenerator"
"AddMult"
"AddNormalizingGenerator"
"AddPrimes"
"AddRedundantGenerators"
"AddRelation"
"AddRelator"
"AddRepresentation"
"AddRow"
"AddScaledMatrix"
"AddSimplex"
"AddStrongGenerator"
"AddStrongGeneratorToLevel"
"AddSubgroupGenerator"
"AddVectorToLattice"
"AddVertex"
"AddVertices"
"AdditiveCode"
"AdditiveConstaCyclicCode"
"AdditiveCyclicCode"
"AdditiveGroup"
"AdditiveHilbert90"
"AdditiveMacWilliamsTransform"
"AdditiveOrder"
"AdditivePermutationCode"
"AdditivePolynomialFromRoots"
"AdditiveQuasiCyclicCode"
"AdditiveQuasiTwistedCyclicCode"
"AdditiveRepetitionCode"
"AdditiveUniverseCode"
"AdditiveZeroCode"
"AdditiveZeroSumCode"
"AdjacencyMatrix"
"Adjoin"
"Adjoint"
"AdjointAlgebra"
"AdjointGraph"
"AdjointIdeal"
"AdjointIdealForNodalCurve"
"AdjointLinearSystem"
"AdjointLinearSystemForNodalCurve"
"AdjointLinearSystemFromIdeal"
"AdjointMatrix"
"AdjointModule"
"AdjointPreimage"
"AdjointRepresentation"
"AdjointRepresentationDecomposition"
"AdjointVersion"
"Adjoints"
"AdmissableTriangleGroups"
"AdmissiblePair"
"Advance"
"Af"
"AffineAction"
"AffineAlgebra"
"AffineAlgebraMapKernel"
"AffineAmbient"
"AffineDecomposition"
"AffineGammaLinearGroup"
"AffineGeneralLinearGroup"
"AffineImage"
"AffineKernel"
"AffineLieAlgebra"
"AffineNormalForm"
"AffinePatch"
"AffinePlane"
"AffineRepresentative"
"AffineSigmaLinearGroup"
"AffineSpace"
"AffineSpecialLinearGroup"
"Agemo"
"Alarm"
"AlgComb"
"Algebra"
"AlgebraGenerators"
"AlgebraMap"
"AlgebraOverCenter"
"AlgebraOverFieldOfFractions"
"AlgebraStructure"
"AlgebraicClosure"
"AlgebraicGenerators"
"AlgebraicGeometricCode"
"AlgebraicGeometricDualCode"
"AlgebraicPowerSeries"
"AlgebraicToAnalytic"
"AlgorithmicFunctionField"
"AllCliques"
"AllCompactChainMaps"
"AllCones"
"AllDefiningPolynomials"
"AllExtensions"
"AllFaces"
"AllHomomorphisms"
"AllInformationSets"
"AllInverseDefiningPolynomials"
"AllIrreduciblePolynomials"
"AllLinearRelations"
"AllNilpotentLieAlgebras"
"AllPairsShortestPaths"
"AllParallelClasses"
"AllParallelisms"
"AllPartitions"
"AllPassants"
"AllRays"
"AllReductionMaps"
"AllReductionMaps_Factor"
"AllResolutions"
"AllRoots"
"AllSecants"
"AllSlopes"
"AllSolvableLieAlgebras"
"AllSqrts"
"AllSquareRoots"
"AllTangents"
"AllVertices"
"Alldeg"
"AllowableSubgroup"
"AlmostIntegralGModule"
"AlmostInvariantForm"
"AlmostSimpleGroupDatabase"
"Alphabet"
"AlphabetExtensionDegree"
"Alt"
"AlternantCode"
"AlternatingCharacter"
"AlternatingCharacterTable"
"AlternatingCharacterValue"
"AlternatingDominant"
"AlternatingElementToStandardWord"
"AlternatingElementToWord"
"AlternatingGroup"
"AlternatingPower"
"AlternatingSquarePreimage"
"AlternatingSum"
"AlternatingWeylSum"
"AlternativePatches"
"Ambient"
"AmbientLieAlgebra"
"AmbientMatrix"
"AmbientModule"
"AmbientSpace"
"AmbientVariety"
"AmbiguousForms"
"AnalyticDrinfeldModule"
"AnalyticHomomorphisms"
"AnalyticInformation"
"AnalyticJacobian"
"AnalyticModule"
"AnalyticRank"
"AnalyticRankNumberOfTerms"
"AnalyticRankQuadraticTwist"
"And"
"Angle"
"AnisotropicSubdatum"
"Annihilator"
"AntiAutomorphismTau"
"Antipode"
"AntisymmetricForms"
"AntisymmetricHermitianForms"
"AntisymmetricMatrix"
"AntisymmetricQuaternionicForms"
"AnyDescription"
"ApparentCodimension"
"ApparentEquationDegrees"
"ApparentSyzygyDegrees"
"Append"
"AppendBasePoint"
"AppendModule"
"Apply"
"ApplyAutomorphism"
"ApplyForAutgCoerce"
"ApplyTransformation"
"ApplyWeylElement"
"Approx"
"ApproximateByTorsionGroup"
"ApproximateByTorsionPoint"
"ApproximateOrder"
"ApproximateStabiliser"
"Arccos"
"Arccosec"
"Arccot"
"Arcsec"
"Arcsin"
"Arctan"
"Arctan2"
"AreCohomologous"
"AreCollinear"
"AreEqualMorphisms"
"AreEqualObjects"
"AreGenerators"
"AreIdentical"
"AreInvolutionsConjugate"
"AreLinearlyEquivalent"
"AreProportional"
"ArfInvariant"
"Arg"
"Argcosech"
"Argcosh"
"Argcoth"
"Argsech"
"Argsinh"
"Argtanh"
"Argument"
"ArithmeticGenus"
"ArithmeticGenusOfDesingularization"
"ArithmeticGeometricMean"
"ArithmeticLSeries"
"ArithmeticTriangleGroup"
"ArithmeticVolume"
"ArrowWeights"
"Arrows"
"ArtRepCreate"
"ArtinMap"
"ArtinRepresentation"
"ArtinRepresentations"
"ArtinSchreierExtension"
"ArtinSchreierImage"
"ArtinSchreierMap"
"ArtinSchreierSymbol"
"ArtinTateFormula"
"AsExtensionOf"
"AssertAttribute"
"AssertEmbedding"
"AssignBase"
"AssignCapacities"
"AssignCapacity"
"AssignEdgeLabels"
"AssignLDPCMatrix"
"AssignLabel"
"AssignLabels"
"AssignNamePrefix"
"AssignNames"
"AssignNamesBase"
"AssignVertexLabels"
"AssignWeight"
"AssignWeights"
"AssociatedEllipticCurve"
"AssociatedHyperellipticCurve"
"AssociatedNewSpace"
"AssociatedPrimitiveCharacter"
"AssociatedPrimitiveGrossencharacter"
"AssociativeAlgebra"
"AssociativeArray"
"AtEof"
"AteTPairing"
"AteqPairing"
"AtkinLehner"
"AtkinLehnerDecomposition"
"AtkinLehnerEigenvalue"
"AtkinLehnerInvolution"
"AtkinLehnerNumberOfFixedPoints"
"AtkinLehnerOperator"
"AtkinLehnerOperatorOverQ"
"AtkinLehnerPrimes"
"AtkinLehnerSubspace"
"AtkinModularEquation"
"AtkinModularPolynomial"
"AtlasGroup"
"AtlasGroupNames"
"AtlasVersionInfo"
"Attach"
"AttachSpec"
"AugmentCode"
"Augmentation"
"AugmentationIdeal"
"AugmentationMap"
"Aut"
"AutGpSG"
"AutPSp"
"AutoCorrelation"
"AutoDD"
"AutoDR"
"AutoDW"
"AutomaticGroup"
"Automorphism"
"AutomorphismGroup"
"AutomorphismGroupAsMatrixGroup"
"AutomorphismGroupFF"
"AutomorphismGroupOverCyclotomicExtension"
"AutomorphismGroupOverExtension"
"AutomorphismGroupOverQ"
"AutomorphismGroupPGroup"
"AutomorphismGroupPGroup2"
"AutomorphismGroupStabilizer"
"AutomorphismOmega"
"AutomorphismSubgroup"
"AutomorphismTalpha"
"AutomorphismWorld"
"Automorphisms"
"AutomorphousClasses"
"AuxiliaryLevel"
"AxisMultiplicities"
"BBSModulus"
"BCHBound"
"BCHCode"
"BDLC"
"BDLCLowerBound"
"BDLCUpperBound"
"BFSTree"
"BKLC"
"BKLCLowerBound"
"BKLCUpperBound"
"BKQC"
"BKZ"
"BLLC"
"BLLCLowerBound"
"BLLCUpperBound"
"BQPlotkinSum"
"BSGS"
"BSGSProcess"
"BString"
"BachBound"
"BacherPolynomialInternal"
"BacherPolynomialTestInternal"
"BadPlaces"
"BadPrimes"
"BaerDerivation"
"BaerSubplane"
"Ball"
"Bang"
"BarAutomorphism"
"Barvinok"
"BarycentricSubdivision"
"Base"
"Base64Decode"
"Base64DecodeFile"
"Base64Encode"
"Base64EncodeFile"
"BaseBlowupContribution"
"BaseCategory"
"BaseChange"
"BaseChangeMatrix"
"BaseChangedDefiningEquations"
"BaseComponent"
"BaseCurve"
"BaseElement"
"BaseExtend"
"BaseExtension"
"BaseExtensionMorphisms"
"BaseField"
"BaseGerm"
"BaseImage"
"BaseImageWordStrip"
"BaseLocus"
"BaseMPolynomial"
"BaseModule"
"BaseObject"
"BasePoint"
"BasePoints"
"BaseRing"
"BaseScheme"
"BaseSize"
"BaseSpace"
"BasicAlgebra"
"BasicAlgebraGrpPToBasicAlgebra"
"BasicAlgebraOfEndomorphismAlgebra"
"BasicAlgebraOfExtAlgebra"
"BasicAlgebraOfGroupAlgebra"
"BasicAlgebraOfHeckeAlgebra"
"BasicAlgebraOfMatrixAlgebra"
"BasicAlgebraOfSchurAlgebra"
"BasicAlgebraPGroup"
"BasicCodegrees"
"BasicDegrees"
"BasicOrbit"
"BasicOrbitLength"
"BasicOrbitLengths"
"BasicOrbits"
"BasicParameters"
"BasicRootMatrices"
"BasicStabiliser"
"BasicStabiliserChain"
"BasicStabilizer"
"BasicStabilizerChain"
"Basis"
"BasisChange"
"BasisDenominator"
"BasisElement"
"BasisMatrix"
"BasisMinus"
"BasisOfDegree0CoxMonomials"
"BasisOfDifferentialsFirstKind"
"BasisOfHolomorphicDifferentials"
"BasisOfRationalFunctionField"
"BasisPlus"
"BasisProduct"
"BasisProducts"
"BasisReduction"
"Basket"
"Bell"
"BerlekampMassey"
"Bernoulli"
"BernoulliApproximation"
"BernoulliNumber"
"BernoulliPolynomial"
"BesselFunction"
"BesselFunctionSecondKind"
"BestApproximation"
"BestDimensionLinearCode"
"BestKnownLinearCode"
"BestKnownQuantumCode"
"BestLengthLinearCode"
"BestTranslation"
"BetaFunction"
"BettiNumber"
"BettiNumbers"
"BettiTable"
"BianchiCuspForms"
"Bicomponents"
"BigO"
"BigPeriodMatrix"
"BigTorus"
"BilinearFormSign"
"BilinearFormType"
"BinaryCodedForm"
"BinaryForms"
"BinaryQuadraticForms"
"BinaryResidueCode"
"BinaryString"
"BinaryToBytes"
"BinaryTorsionCode"
"Binomial"
"BinomialPolynomial"
"BinomialToricEmbedding"
"BipartiteGraph"
"Bipartition"
"BiquadraticResidueSymbol"
"BitFlip"
"BitPrecision"
"BitwiseAnd"
"BitwiseNot"
"BitwiseOr"
"BitwiseXor"
"BlackboxGroup"
"Block"
"BlockDegree"
"BlockDegrees"
"BlockDiagMat"
"BlockDiagScalarMat"
"BlockGraph"
"BlockGroup"
"BlockMatrix"
"BlockSet"
"BlockSize"
"BlockSizes"
"BlockTranspose"
"Blocks"
"BlocksAction"
"BlocksImage"
"BlocksKernel"
"Blowup"
"BlumBlumShub"
"BlumBlumShubModulus"
"BogomolovNumber"
"BooleanPolynomial"
"BooleanPolynomialRing"
"Booleans"
"BorderedDoublyCirculantQRCode"
"Borel"
"BorelSubgroup"
"Bottom"
"Bound"
"Boundary"
"BoundaryIntersection"
"BoundaryMap"
"BoundaryMapGrpP"
"BoundaryMaps"
"BoundaryMatrix"
"BoundaryPoints"
"BoundedFSubspace"
"BoundingBox"
"BoxElements"
"BraidGroup"
"Branch"
"BranchVertexPath"
"BrandtModule"
"BrandtModuleDimension"
"BrauerCharacter"
"BrauerCharacterTable"
"BrauerClass"
"BravaisGroup"
"BreadthFirstSearchTree"
"Bruhat"
"BruhatDescendants"
"BruhatLessOrEqual"
"BuildHom"
"BurauRepresentation"
"BurnsideMatrix"
"BytesToString"
"C6Action"
"C6Basis"
"C6Image"
"C6Kernel"
"C6Parameters"
"C9AlternatingElementToStandardWord"
"C9AlternatingElementToWord"
"C9RecogniseAlternating"
"CFP"
"CGO"
"CGOMinus"
"CGOPlus"
"CGSp"
"CGU"
"CMPoints"
"CMTwists"
"CO"
"COMinus"
"COPlus"
"CRT"
"CSO"
"CSOMinus"
"CSOPlus"
"CSSCode"
"CSU"
"CSp"
"CU"
"CacheClearToricLattice"
"CacheClearToricVariety"
"CalabiYau"
"CalculateCanonicalClass"
"CalculateMultiplicities"
"CalculateRegularSpliceDiagram"
"CalculateTransverseIntersections"
"CalderbankShorSteaneCode"
"CambridgeMatrix"
"CanChangeRing"
"CanChangeUniverse"
"CanContinueEnumeration"
"CanDetermineIsomorphism"
"CanIdentifyGroup"
"CanMakeIntegral"
"CanMakeIntegralGModule"
"CanNormalize"
"CanReallyMakeIntegral"
"CanRedoEnumeration"
"CanSignNormalize"
"CanWriteOver"
"CanonicalBasis"
"CanonicalClass"
"CanonicalDegree"
"CanonicalDissidentPoints"
"CanonicalDivisor"
"CanonicalElements"
"CanonicalEmbedding"
"CanonicalFactorRepresentation"
"CanonicalGenerators"
"CanonicalGraph"
"CanonicalHeight"
"CanonicalImage"
"CanonicalInvolution"
"CanonicalLength"
"CanonicalLinearSystem"
"CanonicalLinearSystemFromIdeal"
"CanonicalMap"
"CanonicalModularEquation"
"CanonicalModularPolynomial"
"CanonicalModule"
"CanonicalMultiplicity"
"CanonicalRepresentation"
"CanonicalSheaf"
"Canonicalisation"
"CanteautChabaudsAttack"
"CantorComposition1"
"CantorComposition2"
"Capacities"
"Capacity"
"CarlitzModule"
"CarmichaelLambda"
"CartanInteger"
"CartanMatrix"
"CartanName"
"CartanSubalgebra"
"CarterSubgroup"
"CartesianPower"
"CartesianProduct"
"Cartier"
"CartierRepresentation"
"CartierToWeilMap"
"CasimirValue"
"CasselsMap"
"CasselsTatePairing"
"Catalan"
"Category"
"CayleyGraph"
"Ceiling"
"Cell"
"CellNumber"
"CellSize"
"CellSizeByPoint"
"Center"
"CenterDensity"
"CenterPolynomials"
"CentralCharacter"
"CentralCollineationGroup"
"CentralEndomorphisms"
"CentralExtension"
"CentralExtensionProcess"
"CentralExtensions"
"CentralIdempotents"
"CentralOrder"
"CentralProductDecomposition"
"CentralSumDecomposition"
"CentralValue"
"Centraliser"
"CentraliserOfInvolution"
"CentralisingMatrix"
"CentralisingRoots"
"Centralizer"
"CentralizerGLZ"
"CentralizerOfNormalSubgroup"
"Centre"
"CentreDensity"
"CentreOfEndomorphismAlgebra"
"CentreOfEndomorphismRing"
"CentrePolynomials"
"CentredAffinePatch"
"Chabauty"
"Chabauty0"
"ChabautyEquations"
"ChainComplex"
"ChainMap"
"ChainmapToCohomology"
"ChangGraphs"
"ChangeAmbient"
"ChangeBase"
"ChangeBasis"
"ChangeBasisCSAlgebra"
"ChangeDerivation"
"ChangeDifferential"
"ChangeDirectory"
"ChangeExponentDenominator"
"ChangeField"
"ChangeModel"
"ChangeN"
"ChangeOfBasisMatrix"
"ChangeOrder"
"ChangePrecision"
"ChangeRepresentationType"
"ChangeRing"
"ChangeRingAlgLie"
"ChangeSign"
"ChangeSupport"
"ChangeUniverse"
"Char"
"Character"
"CharacterDegrees"
"CharacterDegreesPGroup"
"CharacterField"
"CharacterFromTraces"
"CharacterMultiset"
"CharacterOfImage"
"CharacterRing"
"CharacterTable"
"CharacterTableConlon"
"CharacterTableDS"
"CharacterToModular"
"CharacterToRepresentation"
"Characteristic"
"CharacteristicPolynomial"
"CharacteristicPolynomialFromTraces"
"CharacteristicSeries"
"CharacteristicVector"
"Characters"
"CharpolyOfFrobenius"
"ChebyshevFirst"
"ChebyshevSecond"
"ChebyshevT"
"ChebyshevU"
"CheckBasket"
"CheckCharacterTable"
"CheckCodimension"
"CheckEmbed"
"CheckFunctionalEquation"
"CheckIdeal"
"CheckOrder"
"CheckPoint"
"CheckPolynomial"
"CheckSparseRootDatum"
"CheckWeilPolynomial"
"ChevalleyBasis"
"ChevalleyBasisOld"
"ChevalleyGroup"
"ChevalleyGroupOrder"
"ChevalleyOrderPolynomial"
"ChiefFactors"
"ChiefFactorsToString"
"ChiefSeries"
"ChienChoyCode"
"ChineseRemainderTheorem"
"Cholesky"
"ChromaticIndex"
"ChromaticNumber"
"ChromaticPolynomial"
"ChtrLiftInternal"
"Class"
"ClassAction"
"ClassCentraliser"
"ClassCentralizer"
"ClassField"
"ClassFunctionSpace"
"ClassGroup"
"ClassGroupAbelianInvariants"
"ClassGroupChecks"
"ClassGroupCyclicFactorGenerators"
"ClassGroupExactSequence"
"ClassGroupGenerationBound"
"ClassGroupGetUseMemory"
"ClassGroupPRank"
"ClassGroupPrimeRepresentatives"
"ClassGroupSetUseMemory"
"ClassGroupStructure"
"ClassImage"
"ClassMap"
"ClassMatrix"
"ClassNumber"
"ClassNumberApproximation"
"ClassNumberApproximationBound"
"ClassPowerCharacter"
"ClassPowerGroup"
"ClassRepresentative"
"ClassRepresentativeFromInvariants"
"ClassTwo"
"ClassUnion"
"Classes"
"ClassesAHInternal"
"ClassesAlmostSimpleInternal"
"ClassesData"
"ClassesInductive"
"ClassesInductiveSetup"
"ClassesLiftCentPMSetup"
"ClassesTF"
"ClassesTFOrbitReps"
"ClassicalConstructiveRecognition"
"ClassicalElementToWord"
"ClassicalForms"
"ClassicalFormsCS"
"ClassicalGroupOrder"
"ClassicalGroupQuotient"
"ClassicalIntersection"
"ClassicalMaximals"
"ClassicalModularEquation"
"ClassicalModularPolynomial"
"ClassicalMultiplication"
"ClassicalMultiplierMap"
"ClassicalPeriod"
"ClassicalStandardGenerators"
"ClassicalStandardPresentation"
"ClassicalSylow"
"ClassicalSylowConjugation"
"ClassicalSylowNormaliser"
"ClassicalSylowToPC"
"ClassicalType"
"ClassifyProjectiveSurface"
"Clean"
"CleanCompositionTree"
"ClearDenominator"
"ClearDenominators"
"ClearIdentificationTree"
"ClearPrevious"
"ClearRowDenominators"
"ClearVerbose"
"ClebschGraph"
"ClebschInvariants"
"ClebschToIgusaClebsch"
"CliffordAlgebra"
"CliqueComplex"
"CliqueNumber"
"ClockCycles"
"CloseSmallGroupDatabase"
"CloseVectors"
"CloseVectorsMatrix"
"CloseVectorsProcess"
"ClosestUnit"
"ClosestVectors"
"ClosestVectorsMatrix"
"ClosureGraph"
"ClosureLiE"
"Cluster"
"CoblesRadicand"
"CoboundaryMapImage"
"Cocycle"
"CocycleMap"
"CodeComplement"
"CodeEntry"
"CodeEntryQECC"
"CodePermutationToMatrix"
"CodeToString"
"Codegree"
"Codifferent"
"Codimension"
"Codomain"
"Coefficient"
"CoefficientField"
"CoefficientHeight"
"CoefficientIdeal"
"CoefficientIdeals"
"CoefficientLength"
"CoefficientMap"
"CoefficientMorphism"
"CoefficientRing"
"CoefficientSpace"
"Coefficients"
"CoefficientsAndMonomials"
"CoefficientsNonSpiral"
"CoefficientsToElementarySymmetric"
"CoerceByClassAction"
"CoerceGrpLie"
"Coercion"
"CoercionGrpLie"
"Coercions"
"Cofactor"
"Cofactors"
"CohenCoxeterName"
"CohomologicalDimension"
"CohomologicalDimensions"
"Cohomology"
"CohomologyClass"
"CohomologyDimension"
"CohomologyElementToChainMap"
"CohomologyElementToCompactChainMap"
"CohomologyGeneratorToChainMap"
"CohomologyGroup"
"CohomologyLeftModuleGenerators"
"CohomologyModule"
"CohomologyRelations"
"CohomologyRightModuleGenerators"
"CohomologyRing"
"CohomologyRingGenerators"
"CohomologyRingQuotient"
"CohomologyToChainmap"
"Coincidence"
"CoisogenyGroup"
"Cokernel"
"ColinearPointsOnPlane"
"CollateWhiteSpace"
"Collect"
"CollectRelations"
"CollineationGroup"
"CollineationGroupStabilizer"
"CollineationSubgroup"
"Colon"
"ColonIdeal"
"ColonIdealEquivalent"
"ColonModule"
"Column"
"ColumnLength"
"ColumnMatrix"
"ColumnSkewLength"
"ColumnSubmatrix"
"ColumnSubmatrixRange"
"ColumnWeight"
"ColumnWeights"
"ColumnWord"
"Columns"
"CombineIdealFactorisation"
"CombineInvariants"
"CommonComplement"
"CommonComponent"
"CommonDenominator"
"CommonEigenspaces"
"CommonModularStructure"
"CommonOverfield"
"CommonZeros"
"Commutator"
"CommutatorGraph"
"CommutatorGroup"
"CommutatorIdeal"
"CommutatorModule"
"CommutatorSubgroup"
"CompactDeletedProjectiveResolution"
"CompactInjectiveResolution"
"CompactPart"
"CompactPresentation"
"CompactProjectiveResolution"
"CompactProjectiveResolutionPGroup"
"CompactProjectiveResolutionsOfAllSimpleModules"
"CompactSystemOfEigenvalues"
"CompactSystemOfEigenvaluesOverQ"
"CompactSystemOfEigenvaluesVector"
"CompanionMatrix"
"Complement"
"ComplementBasis"
"ComplementDFA"
"ComplementEquationsMatrix"
"ComplementOfImage"
"ComplementVectors"
"ComplementaryDivisor"
"ComplementaryErrorFunction"
"Complements"
"Complete"
"CompleteClassGroup"
"CompleteDescription"
"CompleteDigraph"
"CompleteGraph"
"CompleteKArc"
"CompleteTheSquare"
"CompleteTupleList"
"CompleteUnion"
"CompleteWeightEnumerator"
"Completion"
"Complex"
"ComplexCartanMatrix"
"ComplexConjugate"
"ComplexEmbeddings"
"ComplexField"
"ComplexReflectionGroup"
"ComplexReflectionGroupOld"
"ComplexRootDatum"
"ComplexRootMatrices"
"ComplexToPolar"
"ComplexValue"
"Component"
"ComponentGroup"
"ComponentGroupOfIntersection"
"ComponentGroupOfKernel"
"ComponentGroupOrder"
"ComponentProduct"
"Components"
"ComposeQuotients"
"ComposeTransformations"
"Composite"
"CompositeFields"
"Composition"
"CompositionFactors"
"CompositionSequence"
"CompositionSeries"
"CompositionSeriesMatrix"
"CompositionTree"
"CompositionTreeCBM"
"CompositionTreeElementToWord"
"CompositionTreeFactorNumber"
"CompositionTreeFastVerification"
"CompositionTreeNiceGroup"
"CompositionTreeNiceToUser"
"CompositionTreeOrder"
"CompositionTreeReductionInfo"
"CompositionTreeSLPGroup"
"CompositionTreeSeries"
"CompositionTreeVerify"
"Compositum"
"ComputePreImageRule"
"ComputePrimeFactorisation"
"ComputeReducedFactorisation"
"ComputeSubgroupLattice"
"Comultiplication"
"ConcatenateProcesses"
"ConcatenatedCode"
"CondensationMatrices"
"CondensedAlgebra"
"CondensedAlgebraSimpleModules"
"CondensedModule"
"ConditionNumber"
"ConditionalClassGroup"
"ConditionedGroup"
"Conductor"
"ConductorOfCharacterField"
"ConductorRange"
"Cone"
"ConeInSublattice"
"ConeIndices"
"ConeIntersection"
"ConeQuotientByLinearSubspace"
"ConeToPolyhedron"
"ConeWithInequalities"
"Cones"
"ConesOfCodimension"
"ConformalClassicalGroup"
"ConformalHamiltonianLieAlgebra"
"ConformalOrthogonalGroup"
"ConformalOrthogonalGroupMinus"
"ConformalOrthogonalGroupPlus"
"ConformalSpecialLieAlgebra"
"ConformalSymplecticGroup"
"ConformalUnitaryGroup"
"CongruenceGroup"
"CongruenceGroupAnemic"
"CongruenceImage"
"CongruenceIndices"
"CongruenceModulus"
"CongruenceSubgroup"
"Conic"
"ConicOverSubfield"
"ConjecturalRegulator"
"ConjecturalSha"
"ConjugacyClasses"
"Conjugate"
"ConjugateComplementSubspace"
"ConjugateIntoBorel"
"ConjugateIntoTorus"
"ConjugatePartition"
"ConjugateTranspose"
"Conjugates"
"ConjugatesToPowerSums"
"ConjugatingElement"
"ConjugationClassLength"
"Connect"
"ConnectedCenter"
"ConnectedCentre"
"ConnectedComponents"
"ConnectedKernel"
"ConnectingHomomorphism"
"ConnectionNumber"
"ConnectionPolynomial"
"Conorm"
"Consistency"
"ConstaCyclicCode"
"ConstantCoefficient"
"ConstantField"
"ConstantFieldExtension"
"ConstantMap"
"ConstantRing"
"ConstantTerm"
"ConstantWords"
"Constituent"
"Constituents"
"ConstituentsWithMultiplicities"
"Constraint"
"ConstructBasicOrbit"
"ConstructBasicOrbits"
"ConstructOneOrbitInternal"
"ConstructOrbitsInternal"
"ConstructPermsInternal"
"ConstructTable"
"Construction"
"ConstructionX"
"ConstructionX3"
"ConstructionX3u"
"ConstructionXChain"
"ConstructionXX"
"ConstructionXXu"
"ConstructionY1"
"ContactLieAlgebra"
"ContainsQuadrangle"
"ContainsZero"
"Content"
"ContentAndPrimitivePart"
"Continuations"
"ContinueEnumeration"
"ContinuedFraction"
"Contpp"
"Contract"
"Contraction"
"Contravariants"
"ControlledNot"
"Convergents"
"ConvergentsSequence"
"Converse"
"ConvertFromManinSymbol"
"ConvertToCWIFormat"
"Convolution"
"ConwayPolynomial"
"Coordelt"
"Coordinate"
"CoordinateLattice"
"CoordinateMatrix"
"CoordinateRing"
"CoordinateSpace"
"CoordinateSubvariety"
"CoordinateVector"
"Coordinates"
"CoordinatesToElement"
"Coppersmith"
"CoprimeBasis"
"CoprimeBasisInsert"
"CoprimeRepresentative"
"Copy"
"CopyNames"
"CopyRepresentation"
"CordaroWagnerCode"
"Core"
"CoreflectionGroup"
"CoreflectionMatrices"
"CoreflectionMatrix"
"CorestrictCocycle"
"CorestrictionMapImage"
"Coroot"
"CorootAction"
"CorootGSet"
"CorootHeight"
"CorootLattice"
"CorootNorm"
"CorootNorms"
"CorootPosition"
"CorootSpace"
"Coroots"
"CorrectForm"
"Correlation"
"CorrelationGroup"
"CorrespondingResolutionGraph"
"CorrespondingVertices"
"Cos"
"Cosec"
"Cosech"
"CosetAction"
"CosetDistanceDistribution"
"CosetEnumerationProcess"
"CosetGeometry"
"CosetGraphIntersect"
"CosetImage"
"CosetIntersection"
"CosetKernel"
"CosetLeaders"
"CosetNumber"
"CosetRepresentatives"
"CosetSatisfying"
"CosetSpace"
"CosetTable"
"CosetTableToDFA"
"CosetTableToPermutationGroup"
"CosetTableToRepresentation"
"CosetsSatisfying"
"Cosh"
"Cot"
"Coth"
"Cotrace"
"Counit"
"CountEntriesEqual"
"CountPGroups"
"Covalence"
"Covariant"
"CoveringCovariants"
"CoveringMap"
"CoveringRadius"
"CoveringStructure"
"CoveringSubgroup"
"Coverlattice"
"CoweightLattice"
"CoxMonomialLattice"
"CoxRing"
"CoxeterDiagram"
"CoxeterElement"
"CoxeterForm"
"CoxeterGraph"
"CoxeterGroup"
"CoxeterGroupFactoredOrder"
"CoxeterGroupOrder"
"CoxeterLength"
"CoxeterMatrix"
"CoxeterNumber"
"Cputime"
"CreateCharacterFile"
"CreateCycleFile"
"CreateElement"
"CreateFanoData"
"CreateK3Data"
"CreateLieGroup"
"CreateLieGroupElement"
"CreateNilpOrbAlgLie"
"CreateRootVectorSpace"
"CreateVirtualRays"
"Create_SmallCrvMod_Structure"
"CremonaDatabase"
"CremonaReference"
"CremonaReferenceData"
"CriticalStrip"
"CrossCorrelation"
"CrossPolytope"
"CrvGenericGroup"
"CryptographicCurve"
"CrystalGraph"
"CubicFromPoint"
"CubicModel"
"CubicModelSearch"
"CubicSurfaceByHexahedralCoefficients"
"Cunningham"
"Current"
"CurrentLabel"
"Curve"
"CurveDifferential"
"CurveDivisor"
"CurvePlace"
"CurveQuotient"
"Curves"
"Cusp"
"CuspForms"
"CuspIsSingular"
"CuspPlaces"
"CuspWidth"
"CuspidalInducingDatum"
"CuspidalProjection"
"CuspidalSubgroup"
"CuspidalSubspace"
"Cusps"
"CutVertices"
"Cycle"
"CycleCount"
"CycleDecomposition"
"CycleIndexPolynomial"
"CycleStructure"
"CycleStructureToSeq"
"CyclicCode"
"CyclicGroup"
"CyclicPolytope"
"CyclicShiftsMatrix"
"CyclicSubgroups"
"CyclicToRadical"
"CyclotomicAutomorphismGroup"
"CyclotomicClassNumbers"
"CyclotomicEmbedding"
"CyclotomicFactors"
"CyclotomicField"
"CyclotomicOrder"
"CyclotomicPolynomial"
"CyclotomicQuadraticExtensions"
"CyclotomicRelativeField"
"CyclotomicUnitGroup"
"CyclotomicUnits"
"CyclotomicUnramifiedExtension"
"Cylinder"
"DFSTree"
"Darstellungsgruppe"
"Data"
"DataAutLie"
"DatabaseID"
"DatabaseType"
"DawsonIntegral"
"DecimalToBitPrecision"
"Decimation"
"Decode"
"DecodeML"
"DecodingAttack"
"DecomposeAutomorphism"
"DecomposeCharacter"
"DecomposeExteriorPower"
"DecomposeKronecker"
"DecomposeSymmetricPower"
"DecomposeTensorProduct"
"DecomposeUsing"
"DecomposeVector"
"Decomposition"
"DecompositionField"
"DecompositionGroup"
"DecompositionMatrix"
"DecompositionMultiset"
"DecompositionOldAndNew"
"DecompositionType"
"DecompositionTypeFrequency"
"Decycle"
"DedekindEta"
"DedekindTest"
"DeepHoles"
"DefRing"
"Defect"
"DefectGroup"
"DefinedInDegrees"
"DefinesAbelianSubvariety"
"DefinesHomomorphism"
"DefinesTableau"
"DefiningConstantField"
"DefiningEquation"
"DefiningEquations"
"DefiningIdeal"
"DefiningMap"
"DefiningMatrix"
"DefiningModularSymbolsSpace"
"DefiningModulusIsConductor"
"DefiningMonomial"
"DefiningPoints"
"DefiningPolynomial"
"DefiningPolynomials"
"DefiningSubschemePolynomial"
"DefiniteClassNumber"
"DefiniteGramMatrix"
"DefiniteNorm"
"DefinitionSets"
"DegeneracyCosetRepsInner"
"DegeneracyMap"
"DegeneracyMatrix"
"Degree"
"Degree2Subcovers"
"Degree3Subcovers"
"Degree6DelPezzoType2_1"
"Degree6DelPezzoType2_2"
"Degree6DelPezzoType2_3"
"Degree6DelPezzoType3"
"Degree6DelPezzoType4"
"Degree6DelPezzoType6"
"DegreeMap"
"DegreeOfCharacterField"
"DegreeOfExactConstantField"
"DegreeOfFieldExtension"
"DegreeOnePrimeIdeals"
"DegreeRange"
"DegreeReduction"
"DegreeSequence"
"Degrees"
"DegreesOfCohomologyGenerators"
"DegreesOfGenerators"
"DelPezzoSurface"
"Delaunay"
"DelaunayMesh"
"DeleteAllAssociatedData"
"DeleteAttributes"
"DeleteCapacities"
"DeleteCapacity"
"DeleteCollector"
"DeleteData"
"DeleteEdgeLabels"
"DeleteGenerator"
"DeleteGlobalModularFormsData"
"DeleteHeckePrecomputation"
"DeleteLabel"
"DeleteLabels"
"DeleteNonsplitCollector"
"DeleteNonsplitSolutionspace"
"DeleteProcess"
"DeleteProcessComplete"
"DeleteProcessDown"
"DeleteRelation"
"DeleteSplitCollector"
"DeleteSplitSolutionspace"
"DeleteStoredWords"
"DeleteVertexLabels"
"DeleteWeight"
"DeleteWeights"
"DeletedProjectiveResolution"
"DelsarteGoethalsCode"
"Delta"
"DeltaPreimage"
"Demazure"
"Denominator"
"Density"
"DensityEvolutionBinarySymmetric"
"DensityEvolutionGaussian"
"Depth"
"DepthFirstSearchTree"
"Derivation"
"Derivative"
"DerivedGroup"
"DerivedGroupMonteCarlo"
"DerivedLength"
"DerivedSeries"
"DerivedSubgroup"
"DerksenIdeal"
"Descendants"
"DescentInformation"
"DescentMaps"
"Design"
"Detach"
"DetachSpec"
"Determinant"
"Development"
"Diagonal"
"DiagonalAutomorphism"
"DiagonalBlockDecomposition"
"DiagonalBlockStructure"
"DiagonalBlocks"
"DiagonalBlocksStructure"
"DiagonalForm"
"DiagonalJoin"
"DiagonalMatrix"
"DiagonalModel"
"DiagonalSparseMatrix"
"DiagonalSum"
"Diagonalisation"
"DiagonalisingMatrix"
"Diagonalization"
"Diagram"
"DiagramAutomorphism"
"Diameter"
"DiameterPath"
"DickmanRho"
"DicksonFirst"
"DicksonInvariant"
"DicksonSecond"
"DicyclicGroup"
"Difference"
"DifferenceSet"
"Different"
"DifferentDivisor"
"Differential"
"DifferentialBasis"
"DifferentialField"
"DifferentialFieldExtension"
"DifferentialIdeal"
"DifferentialLaurentSeriesRing"
"DifferentialOperator"
"DifferentialOperatorRing"
"DifferentialRing"
"DifferentialRingExtension"
"DifferentialSpace"
"Differentiation"
"DifferentiationSequence"
"DihedralForms"
"DihedralGroup"
"Dilog"
"Dimension"
"DimensionBoundTest"
"DimensionByFormula"
"DimensionComplexTorus"
"DimensionCuspForms"
"DimensionCuspFormsGamma0"
"DimensionCuspFormsGamma1"
"DimensionNewCuspForms"
"DimensionNewCuspFormsGamma0"
"DimensionNewCuspFormsGamma1"
"DimensionOfAlgebra"
"DimensionOfCentreOfEndomorphismRing"
"DimensionOfEndomorphismRing"
"DimensionOfExactConstantField"
"DimensionOfFieldOfGeometricIrreducibility"
"DimensionOfHighestWeightModule"
"DimensionOfHom"
"DimensionOfHomology"
"DimensionOfKernelZ2"
"DimensionOfNonQFactorialLocus"
"DimensionOfSpanZ2"
"DimensionsEstimate"
"DimensionsOfHomology"
"DimensionsOfInjectiveModules"
"DimensionsOfProjectiveModules"
"DimensionsOfTerms"
"DirectProduct"
"DirectSum"
"DirectSumDecomposition"
"DirectSumRestrictionOfScalarsToQ"
"DirichletCharacter"
"DirichletCharacterFromValuesOnUnitGenerators"
"DirichletCharacters"
"DirichletGroup"
"DirichletGroupCopy"
"DirichletGroupFull"
"DirichletRestriction"
"DiscToPlane"
"Disconnect"
"DiscreteLogMapSmooth"
"Discriminant"
"DiscriminantDivisor"
"DiscriminantOfHeckeAlgebra"
"DiscriminantRange"
"DisownChildren"
"Display"
"DisplayBurnsideMatrix"
"DisplayCompTreeNodes"
"DisplayFareySymbolDomain"
"DisplayPolyMap"
"DisplayPolygons"
"Distance"
"DistanceMatrix"
"DistancePartition"
"Distances"
"DistinctDegreeFactorization"
"DistinctExtensions"
"DistinguishedOrbitsOnSimples"
"DistinguishedRoot"
"DivideOutIntegers"
"DivisionFunction"
"DivisionPoints"
"DivisionPolynomial"
"DivisionPsi"
"Divisor"
"DivisorBasis"
"DivisorClassGroup"
"DivisorClassLattice"
"DivisorGroup"
"DivisorIdeal"
"DivisorMap"
"DivisorOfDegreeOne"
"DivisorSigma"
"DivisorToPoint"
"DivisorToSheaf"
"Divisors"
"Dodecacode"
"DoesDefineFan"
"Domain"
"DominantCharacter"
"DominantDiagonalForm"
"DominantLSPath"
"DominantWeight"
"DotProduct"
"Double"
"DoubleCoset"
"DoubleCosetRepresentatives"
"DoubleCosets"
"DoubleDual"
"DoubleGenusOneModel"
"DoublePlotkinSum"
"DoubleSpaceQuartic"
"DoublyCirculantQRCode"
"DoublyCirculantQRCodeGF4"
"Dual"
"DualAtkinLehner"
"DualAtkinLehnerOperator"
"DualBasisLattice"
"DualCoxeterForm"
"DualEuclideanWeightDistribution"
"DualFaceInDualFan"
"DualFan"
"DualGraphCanonical"
"DualGraphMultiplicities"
"DualHeckeOperator"
"DualIsogeny"
"DualKroneckerZ4"
"DualLattice"
"DualLeeWeightDistribution"
"DualMatrixToPerm"
"DualMatrixToWord"
"DualModularSymbol"
"DualMorphism"
"DualPartition"
"DualPrintName"
"DualQuotient"
"DualRepresentation"
"DualStarInvolution"
"DualVectorSpace"
"DualWeightDistribution"
"DualityAutomorphism"
"DumpVerbose"
"DuvalPuiseuxExpansion"
"DynkinDiagram"
"DynkinDigraph"
"E2NForm"
"E4Form"
"E6Form"
"E8gens"
"EARNS"
"ECCanonicalLiftTraceGen"
"ECCanonicalLiftTraceGenus0"
"ECCanonicalLiftTraceHyp"
"ECDeformationTrace"
"ECM"
"ECMFactoredOrder"
"ECMOrder"
"ECMSteps"
"ECPCShanks"
"EFAModuleMaps"
"EFAModules"
"EFASeries"
"EIS"
"EISDatabase"
"EModule"
"EVALInternal"
"Ealpha"
"EasyBasis"
"EasyClean"
"EasyIdeal"
"EchRat"
"EchelonForm"
"EchelonNullspace"
"EcheloniseWord"
"Echelonize"
"EchelonizeWord"
"EckardtPoints"
"EdgeCapacities"
"EdgeConnectivity"
"EdgeDeterminant"
"EdgeGroup"
"EdgeIndices"
"EdgeLabel"
"EdgeLabels"
"EdgeMultiplicity"
"EdgeSeparator"
"EdgeSet"
"EdgeUnion"
"EdgeWeights"
"Edges"
"EffectivePossibilities"
"EffectiveSubcanonicalCurves"
"EhrhartCoefficient"
"EhrhartCoefficients"
"EhrhartDeltaVector"
"EhrhartPolynomial"
"EhrhartSeries"
"EichlerInvariant"
"Eigenform"
"Eigenforms"
"Eigenspace"
"Eigenvalues"
"EigenvectorInTermsOfExpansionBasis"
"EightCoverings"
"EightDescent"
"Eisenstein"
"EisensteinData"
"EisensteinIntegerRing"
"EisensteinIntegers"
"EisensteinProjection"
"EisensteinSeries"
"EisensteinSubspace"
"EisensteinTwo"
"Element"
"ElementOfNorm"
"ElementOfOrder"
"ElementOffset"
"ElementSequence"
"ElementSet"
"ElementToMonoidSequence"
"ElementToSequence"
"ElementToSequencePad"
"ElementToTuple"
"ElementType"
"ElementaryAbelianGroup"
"ElementaryAbelianNormalSubgroup"
"ElementaryAbelianQuotient"
"ElementaryAbelianSection"
"ElementaryAbelianSeries"
"ElementaryAbelianSeriesCanonical"
"ElementaryAbelianSubgroups"
"ElementaryDivisors"
"ElementaryDivisorsMultiset"
"ElementarySymmetricPolynomial"
"ElementarySymmetricToCoefficients"
"ElementarySymmetricToPowerSums"
"ElementaryToHomogeneousMatrix"
"ElementaryToMonomialMatrix"
"ElementaryToPowerSumMatrix"
"ElementaryToSchurMatrix"
"Elements"
"EliasAsymptoticBound"
"EliasBound"
"Eliminate"
"EliminateGenerators"
"EliminateRedundancy"
"EliminateRedundantBasePoints"
"Elimination"
"EliminationIdeal"
"EllipticCurve"
"EllipticCurveDatabase"
"EllipticCurveDatabaseLarge"
"EllipticCurveFromjInvariant"
"EllipticCurveSearch"
"EllipticCurveWithGoodReductionSearch"
"EllipticCurveWithjInvariant"
"EllipticCurves"
"EllipticExponential"
"EllipticFactors"
"EllipticInvariants"
"EllipticLogarithm"
"EllipticPeriods"
"EllipticPoints"
"EltTup"
"Eltlist"
"Eltnum"
"Eltseq"
"EltseqPad"
"Embed"
"EmbedIntoMinimalCyclotomicField"
"EmbedPlaneCurveInP3"
"Embedding"
"EmbeddingMap"
"EmbeddingMatrix"
"EmbeddingSpace"
"Embeddings"
"EmptyBasket"
"EmptyCohomologyModule"
"EmptyDigraph"
"EmptyGraph"
"EmptyMultiDigraph"
"EmptyMultiGraph"
"EmptyNetwork"
"EmptyPolyhedron"
"EmptyScheme"
"EmptySubscheme"
"End"
"EndVertices"
"EndomorphismAlgebra"
"EndomorphismRing"
"Endomorphisms"
"EndpointWeight"
"EnterStauduhar"
"EntriesInterpolation"
"EntriesInterpolationExpansion"
"Entropy"
"Entry"
"Enumerate"
"EnumerationCost"
"EnumerationCostArray"
"Eof"
"EqualDFA"
"EqualDegreeFactorization"
"Equality"
"EqualizeDegrees"
"Equation"
"EquationOrder"
"EquationOrderFinite"
"EquationOrderInfinite"
"Equations"
"EquiDecomposition"
"EquidimensionalDecomposition"
"EquidimensionalPart"
"EquidimensionalRadical"
"EquitablePartition"
"EquivalentPoint"
"EquivalentQuotients"
"Erf"
"Erfc"
"Error"
"ErrorFunction"
"EstimateOrbit"
"Eta"
"EtaTPairing"
"EtaqPairing"
"EuclideanLeftDivision"
"EuclideanNorm"
"EuclideanRightDivision"
"EuclideanWeight"
"EuclideanWeightDistribution"
"EuclideanWeightEnumerator"
"EulerCharacteristic"
"EulerFactor"
"EulerFactorModChar"
"EulerFactorsByDeformation"
"EulerGamma"
"EulerGraphDatabase"
"EulerPhi"
"EulerPhiInverse"
"EulerProduct"
"EulerianGraphDatabase"
"EulerianNumber"
"Evaluate"
"EvaluateAt"
"EvaluateByPowerSeries"
"EvaluateClassGroup"
"EvaluateDerivatives"
"EvaluatePolynomial"
"EvaluationPowerSeries"
"EvenOrderElement"
"EvenSublattice"
"EvenWeightCode"
"EvenWeightSubcode"
"ExactConstantField"
"ExactExtension"
"ExactLattice"
"ExactLength"
"ExactQuotient"
"ExactScalarProduct"
"ExactValue"
"ExceptionalCurveIntersection"
"ExceptionalSelfIntersection"
"ExceptionalUnitOrbit"
"ExceptionalUnits"
"ExchangeElement"
"Exclude"
"ExcludedConjugate"
"ExcludedConjugates"
"ExistsConwayPolynomial"
"ExistsCosetSatisfying"
"ExistsCoveringStructure"
"ExistsExcludedConjugate"
"ExistsGroupData"
"ExistsModularCurveDatabase"
"ExistsNormalisingCoset"
"ExistsNormalizingCoset"
"Exp"
"Expand"
"ExpandBasis"
"ExpandQuaternionicBasis"
"ExpandToPrecision"
"ExpandZ"
"Experimental_InnerTwistOperator"
"ExplicitCoset"
"Exponent"
"ExponentDenominator"
"ExponentLattice"
"ExponentLaw"
"ExponentSum"
"ExponentialFieldExtension"
"ExponentialIntegral"
"ExponentialIntegralE1"
"Exponents"
"ExpurgateCode"
"ExpurgateWeightCode"
"Ext"
"ExtAlgebra"
"ExtGenerators"
"Extcont"
"Extend"
"ExtendBasicOrbit"
"ExtendBasicOrbits"
"ExtendBasis"
"ExtendCode"
"ExtendDynkinDiagramPermutation"
"ExtendEchelonForm"
"ExtendField"
"ExtendFieldCode"
"ExtendGaloisCocycle"
"ExtendGeodesic"
"ExtendIsometry"
"ExtendMultiplicativeGroup"
"ExtendPrimaryInvariants"
"ExtendedCategory"
"ExtendedCohomologyClass"
"ExtendedGreatestCommonDivisor"
"ExtendedGreatestCommonLeftDivisor"
"ExtendedGreatestCommonRightDivisor"
"ExtendedLeastCommonLeftMultiple"
"ExtendedOneCocycle"
"ExtendedPerfectCodeZ4"
"ExtendedReals"
"ExtendedRing"
"ExtendedSL"
"ExtendedSp"
"ExtendedType"
"ExtendedValuationRing"
"Extends"
"Extension"
"ExtensionCategory"
"ExtensionClasses"
"ExtensionExponents"
"ExtensionMorphism"
"ExtensionNumbers"
"ExtensionPrimes"
"ExtensionProcess"
"ExtensionsOfElementaryAbelianGroup"
"ExtensionsOfSolubleGroup"
"Exterior"
"ExteriorAlgebra"
"ExteriorPower"
"ExteriorPowerNaturalModule"
"ExteriorSquare"
"ExternalLines"
"ExtraAutomorphism"
"ExtraSpecialAction"
"ExtraSpecialBasis"
"ExtraSpecialGroup"
"ExtraSpecialNormaliser"
"ExtraSpecialParameters"
"ExtractBlock"
"ExtractBlockRange"
"ExtractDiagonalBlocks"
"ExtractGenerators"
"ExtractGroup"
"ExtractRep"
"ExtraspecialPair"
"ExtraspecialPairs"
"ExtraspecialSigns"
"ExtremalLieAlgebra"
"ExtremalRayContraction"
"ExtremalRayContractionDivisor"
"ExtremalRayContractions"
"ExtremalRays"
"F4O"
"FFPatchIndex"
"FGIntersect"
"FPGroup"
"FPGroupColouring"
"FPGroupStrong"
"FPQuotient"
"Face"
"FaceFunction"
"FaceIndices"
"FaceSupportedBy"
"Faces"
"FacesContaining"
"FacetIndices"
"Facets"
"Facint"
"Facpol"
"Factor"
"FactorBasis"
"FactorBasisCreate"
"FactorBasisVerify"
"FactoredCarmichaelLambda"
"FactoredCharacteristicPolynomial"
"FactoredChevalleyGroupOrder"
"FactoredClassicalGroupOrder"
"FactoredDefiningPolynomials"
"FactoredDiscriminant"
"FactoredEulerPhi"
"FactoredEulerPhiInverse"
"FactoredHeckePolynomial"
"FactoredIndex"
"FactoredInverseDefiningPolynomials"
"FactoredMCPolynomials"
"FactoredMinimalAndCharacteristicPolynomials"
"FactoredMinimalPolynomial"
"FactoredModulus"
"FactoredOrder"
"FactoredOrderGL"
"FactoredProjectiveOrder"
"Factorial"
"FactorialValuation"
"Factorisation"
"FactorisationOverSplittingField"
"FactorisationToInteger"
"FactorisationToPolynomial"
"Factorization"
"FactorizationOfQuotient"
"FactorizationOverSplittingField"
"FactorizationToInteger"
"FaithfulModule"
"FakeIsogenySelmerSet"
"FakeProjectiveSpace"
"Falpha"
"FaltingsHeight"
"FamilyOfMultivaluedSections"
"Fan"
"FanOfAffineSpace"
"FanOfFakeProjectiveSpace"
"FanOfWPS"
"Fano"
"FanoBaseGenus"
"FanoBaskets"
"FanoDatabase"
"FanoGenus"
"FanoIndex"
"FanoIsolatedBaskets"
"FanoToRecord"
"FareySymbol"
"FastRoots"
"FewGenerators"
"Fibonacci"
"FibonacciGroup"
"Field"
"FieldAutomorphism"
"FieldCategory"
"FieldCharacteristic"
"FieldExponent"
"FieldMorphism"
"FieldOfDefinition"
"FieldOfFractions"
"FieldOfGeometricIrreducibility"
"FieldSize"
"FileProcess"
"FilterProcess"
"FilterVector"
"FindAsocAlgebraRep"
"FindChevalleyBasis"
"FindChevalleyBasisDiagonal"
"FindChevalleyBasisQuad"
"FindCommonEmbeddings"
"FindDependencies"
"FindEntries"
"FindFirstGenerators"
"FindGenerators"
"FindIndexes"
"FindLieAlgebra"
"FindN"
"FindPowerSeries"
"FindPowerSeriesForChabauty"
"FindRelations"
"FindRelationsInCWIFormat"
"FindSplitElement"
"FindWord"
"FindXYH"
"FineEquidimensionalDecomposition"
"FiniteAffinePlane"
"FiniteDivisor"
"FiniteField"
"FiniteLieAlgebra"
"FiniteProjectivePlane"
"FiniteSplit"
"FireCode"
"FirstCohomology"
"FirstIndexOfColumn"
"FirstIndexOfRow"
"FirstPoleElement"
"FirstWeights"
"FischerSubgroup"
"FittingGroup"
"FittingIdeal"
"FittingIdeals"
"FittingLength"
"FittingSeries"
"FittingSubgroup"
"Fix"
"FixedArc"
"FixedField"
"FixedGroup"
"FixedPoints"
"FixedSubspaceToPolyhedron"
"FlagComplex"
"Flat"
"FlatProduct"
"FlatsNullMatrix"
"Flexes"
"Flip"
"Floor"
"Flow"
"Flush"
"Form"
"FormType"
"FormalChain"
"FormalGroupHomomorphism"
"FormalGroupLaw"
"FormalLog"
"FormalPoint"
"FormalSet"
"Format"
"FourCoverPullback"
"FourDescent"
"FourToTwoCovering"
"FourierMotzkin"
"FractionalPart"
"FrattiniQuotientRank"
"FrattiniSubgroup"
"FreeAbelianGroup"
"FreeAbelianQuotient"
"FreeAlgebra"
"FreeGenerators"
"FreeGroup"
"FreeGroupIndex"
"FreeGroupIsIn"
"FreeLieAlgebra"
"FreeMonoid"
"FreeNilpotentGroup"
"FreeProduct"
"FreeResolution"
"FreeSemigroup"
"FreefValues"
"Frobenius"
"FrobeniusActionOnPoints"
"FrobeniusActionOnReducibleFiber"
"FrobeniusActionOnTrivialLattice"
"FrobeniusAutomorphism"
"FrobeniusAutomorphisms"
"FrobeniusElement"
"FrobeniusEndomorphism"
"FrobeniusForm"
"FrobeniusFormAlternating"
"FrobeniusImage"
"FrobeniusMap"
"FrobeniusPolynomial"
"FrobeniusTraceDirect"
"FrobeniusTracesToWeilPolynomials"
"FromAnalyticJacobian"
"FromLiE"
"FuchsianGroup"
"FuchsianMatrixRepresentation"
"FullCharacteristicPolynomial"
"FullCone"
"FullCorootLattice"
"FullDimension"
"FullDirichletGroup"
"FullMinimalPolynomialTest"
"FullModule"
"FullPrimaryInvariantSpaces"
"FullRootLattice"
"Function"
"FunctionDegree"
"FunctionField"
"FunctionFieldCategory"
"FunctionFieldDatabase"
"FunctionFieldDifferential"
"FunctionFieldDivisor"
"FunctionFieldPlace"
"FunctionFields"
"Functor"
"FundamentalClassGroup"
"FundamentalClassGroupStructure"
"FundamentalClassNumber"
"FundamentalClosure"
"FundamentalCoweights"
"FundamentalDiscriminant"
"FundamentalDomain"
"FundamentalElement"
"FundamentalGroup"
"FundamentalInvariants"
"FundamentalInvariantsKing"
"FundamentalKernel"
"FundamentalQuotient"
"FundamentalUnit"
"FundamentalUnits"
"FundamentalVolume"
"FundamentalWeights"
"G2"
"G2Invariants"
"G2Reduced"
"G2ToIgusaInvariants"
"GCD"
"GCDSup"
"GCLD"
"GCRD"
"GF"
"GHom"
"GHomOverCentralizingField"
"GL"
"GLB"
"GLNormaliser"
"GModule"
"GModuleAction"
"GModuleConductorOfCoefficientField"
"GModuleLinear"
"GModulePrimes"
"GO"
"GOMinus"
"GOPlus"
"GPCGroup"
"GR"
"GRBsktToRec"
"GRCrvSToRec"
"GRHBound"
"GRPtSToRec"
"GRSCode"
"GRSchToRec"
"GSShortOrbitSubset"
"GSShortSubset"
"GSet"
"GSetFromIndexed"
"GU"
"GabidulinCode"
"GallagerCode"
"GaloisActionOnLines"
"GaloisCohomology"
"GaloisConjugacyRepresentatives"
"GaloisConjugate"
"GaloisData"
"GaloisField"
"GaloisGroup"
"GaloisGroupInvariant"
"GaloisImage"
"GaloisMultiplicities"
"GaloisOrbit"
"GaloisProof"
"GaloisQuotient"
"GaloisRepresentation"
"GaloisRing"
"GaloisRoot"
"GaloisSplittingField"
"GaloisSubfieldTower"
"GaloisSubgroup"
"Gamma"
"Gamma0"
"Gamma1"
"GammaAction"
"GammaActionOnSimples"
"GammaCorootSpace"
"GammaD"
"GammaFactors"
"GammaGroup"
"GammaOrbitOnRoots"
"GammaOrbitsOnRoots"
"GammaOrbitsRepresentatives"
"GammaRootSpace"
"GammaUpper0"
"GammaUpper1"
"GapNumbers"
"GaussNumber"
"GaussReduce"
"GaussReduceGram"
"GaussSum"
"GaussianBinomial"
"GaussianFactorial"
"GaussianIntegerRing"
"GaussianIntegers"
"Gcd"
"GcdSup"
"GcdWithLoss"
"GegenbauerPolynomial"
"GenCrvGrpData"
"GenModuleProject"
"GeneralLinearGroup"
"GeneralOrthogonalGroup"
"GeneralOrthogonalGroupMinus"
"GeneralOrthogonalGroupPlus"
"GeneralReeTorusElement"
"GeneralUnitaryGroup"
"GeneralisedEquationOrder"
"GeneralisedNorm"
"GeneralisedRowReduction"
"GeneralisedWallForm"
"GeneralizedAGCode"
"GeneralizedAlgebraicGeometricCode"
"GeneralizedFibonacciNumber"
"GeneralizedNorm"
"GeneralizedSrivastavaCode"
"GenerateGraphs"
"GeneratepGroups"
"GeneratingPolynomial"
"GeneratingSet"
"GeneratingSubfields"
"GeneratingSubfieldsLattice"
"GeneratingWords"
"Generator"
"GeneratorMatrix"
"GeneratorNumber"
"GeneratorOrder"
"GeneratorPolynomial"
"GeneratorStructure"
"Generators"
"GeneratorsOverBaseRing"
"GeneratorsSequence"
"GeneratorsSequenceOverBaseRing"
"Generic"
"GenericAbelianGroup"
"GenericDatabase"
"GenericGenus"
"GenericGroup"
"GenericModel"
"GenericPoint"
"GenericPolynomial"
"Genus"
"GenusContribution"
"GenusDistribution"
"GenusField"
"GenusOneModel"
"GenusRepresentatives"
"GenusX0N"
"GenusX0NQuotient"
"GenusX1N"
"Geodesic"
"GeodesicExists"
"Geodesics"
"GeodesicsIntersection"
"GeometricAutomorphismGroup"
"GeometricAutomorphismGroupClassification"
"GeometricGenus"
"GeometricGenusOfDesingularization"
"GeometricGenusUsingToricGeometry"
"GeometricMordellWeilLattice"
"GeometricPicardGroup"
"GeometricSupport"
"GeometricTorsionBound"
"Germ"
"GetAssertions"
"GetAttributes"
"GetAutoColumns"
"GetAutoCompact"
"GetBeep"
"GetBraidRelations"
"GetCells"
"GetChild"
"GetChildren"
"GetClassGroupBoundFactorBasis"
"GetClassGroupBoundGenerators"
"GetColumns"
"GetConicSubfieldMethodDegreeBound"
"GetCurrentDirectory"
"GetDefaultRealField"
"GetEchoInput"
"GetElementPrintFormat"
"GetEnv"
"GetEnvironmentValue"
"GetEvaluationComparison"
"GetForceCFP"
"GetHelpExternalBrowser"
"GetHelpExternalSystem"
"GetHelpUseExternal"
"GetHistorySize"
"GetIgnoreEof"
"GetIgnorePrompt"
"GetIgnoreSpaces"
"GetIloadAllowEsc"
"GetIndent"
"GetIntegerNewtonPolygon"
"GetIntrinsicName"
"GetKantPrecision"
"GetKaratsubaThreshold"
"GetLibraries"
"GetLibraryRoot"
"GetLineEditor"
"GetMPCVersion"
"GetMPFRVersion"
"GetMS"
"GetMaximumMemoryUsage"
"GetMemoryExtensionSize"
"GetMemoryLimit"
"GetMemoryUsage"
"GetModule"
"GetModules"
"GetMonoidNewtonPolygon"
"GetNthreads"
"GetParent"
"GetPath"
"GetPrecision"
"GetPresentation"
"GetPreviousSize"
"GetPrimes"
"GetPrintLevel"
"GetPrompt"
"GetQuotient"
"GetRep"
"GetRows"
"GetSeed"
"GetShellCompletion"
"GetShowPromptAlways"
"GetStoredFactors"
"GetTempDir"
"GetTraceback"
"GetTransGroupIDMany"
"GetUserProcessData"
"GetVerbose"
"GetVersion"
"GetViMode"
"Getc"
"Getpid"
"Gets"
"Getuid"
"Getvecs"
"GewirtzGraph"
"GilbertVarshamovAsymptoticBound"
"GilbertVarshamovBound"
"GilbertVarshamovLinearBound"
"Girth"
"GirthCycle"
"GlobalSectionSubmodule"
"GlobalUnitGroup"
"Glue"
"GoethalsCode"
"GoethalsDelsarteCode"
"GolayCode"
"GolayCodeZ4"
"GoodBasePoints"
"GoodDescription"
"GoodLDPCEnsemble"
"GoppaCode"
"GoppaDesignedDistance"
"GorensteinClosure"
"GorensteinIndex"
"Graded"
"GradedBettiTable"
"GradedCokernel"
"GradedCommutativeRing"
"GradedCone"
"GradedDirectSum"
"GradedDual"
"GradedDualComplex"
"GradedDualWithHoms"
"GradedFreeModule"
"GradedHoms"
"GradedIdentityMap"
"GradedImage"
"GradedKernel"
"GradedMinimalFreeResolution"
"GradedModule"
"GradedRingData"
"GradedRingDatabase"
"GradedToricLattice"
"GradientVector"
"GradientVectors"
"Grading"
"Gradings"
"GramIsomorphismInvariants"
"GramLength"
"GramMatrix"
"GramReduction"
"GramSchmidtProcess"
"GramSchmidtReduce"
"GramSchmidtReduction"
"Graph"
"GraphAutomorphism"
"GraphInBytes"
"GraphSizeInBytes"
"Graphs"
"GrayMap"
"GrayMapImage"
"GreatestCommonDivisor"
"GreatestCommonLeftDivisor"
"GreatestCommonRightDivisor"
"GreatestLowerBound"
"GriesmerBound"
"GriesmerLengthBound"
"GriesmerMinimumWeightBound"
"Groebner"
"GroebnerBasis"
"GroebnerBasisUnreduced"
"GroebnerWalk"
"GrossenCheck"
"Grossencharacter"
"GroundField"
"Group"
"GroupAlgebra"
"GroupAlgebraAsStarAlgebra"
"GroupData"
"GroupGenerators"
"GroupIdeal"
"GroupOfLieType"
"GroupOfLieTypeFactoredOrder"
"GroupOfLieTypeHomomorphism"
"GroupOfLieTypeOrder"
"GroupType"
"Groupsp7"
"GrowthFunction"
"GrowthFunctionDFA"
"GrowthFunctionOld"
"GrpFPToCox"
"GrpPermToCox"
"GuessAltsymDegree"
"H2_G_A"
"H2_G_QmodZ"
"HBChevalleyGroupOrder"
"HBClassicalGroupOrder"
"HBinomial"
"HKZ"
"HKZGram"
"HadamardAutomorphismGroup"
"HadamardCanonicalForm"
"HadamardCodeZ4"
"HadamardColumnDesign"
"HadamardDatabase"
"HadamardDatabaseInformation"
"HadamardDatabaseInformationEmpty"
"HadamardEltseq"
"HadamardGraph"
"HadamardInvariant"
"HadamardMatrixFromInteger"
"HadamardMatrixToInteger"
"HadamardNormalize"
"HadamardRowDesign"
"HadamardTransformation"
"HalfIntegralWeightForms"
"HalfspaceToPolyhedron"
"HallSubgroup"
"HamiltonianLieAlgebra"
"HammingAsymptoticBound"
"HammingCode"
"HammingWeightEnumerator"
"HarmonicNumber"
"HasAdditionAlgorithm"
"HasAffinePatch"
"HasAllPQuotientsMetacyclic"
"HasAllRootsOnUnitCircle"
"HasAlmostUniqueLocalParametrization"
"HasAlmostUniqueLocalUniformizer"
"HasAssociatedNewSpace"
"HasAttribute"
"HasAutomorphisms"
"HasBSGS"
"HasBaseExtension"
"HasBaseExtensionMorphisms"
"HasBlockDiagMat"
"HasC6Decomposition"
"HasCM"
"HasClique"
"HasClosedCosetTable"
"HasCoercion"
"HasComplement"
"HasCompleteCosetTable"
"HasComplexConjugate"
"HasComplexMultiplication"
"HasComposition"
"HasCompositionSequence"
"HasCompositionTree"
"HasComputableAbelianQuotient"
"HasComputableLCS"
"HasComputableSubgroups"
"HasConic"
"HasCoordinates"
"HasDecomposition"
"HasDefinedModuleMap"
"HasDefinedTerm"
"HasDefiningMap"
"HasDenseAndSparseRep"
"HasDenseRep"
"HasDenseRepOnly"
"HasEasyIdeal"
"HasEchelonForm"
"HasElementaryBasis"
"HasEmbedding"
"HasExtension"
"HasFactorisation"
"HasFactorization"
"HasFiniteAQ"
"HasFiniteAbelianQuotient"
"HasFiniteDimension"
"HasFiniteKernel"
"HasFiniteOrder"
"HasFixedBaseObject"
"HasFrobeniusEndomorphism"
"HasFunctionField"
"HasGCD"
"HasGNB"
"HasGrevlexOrder"
"HasGroebnerBasis"
"HasHomogeneousBasis"
"HasIdentity"
"HasImage"
"HasInclusion"
"HasIndexOne"
"HasIndexOneEverywhereLocally"
"HasInfiniteComputableAbelianQuotient"
"HasInfinitePSL2Quotient"
"HasIntegralPoint"
"HasIntersectionProperty"
"HasIntersectionPropertyN"
"HasInverse"
"HasIrregularFibres"
"HasIsomorphismExtension"
"HasIsomorphismExtensions"
"HasIsomorphisms"
"HasIsotropicVector"
"HasKnownInverse"
"HasLeviSubalgebra"
"HasLine"
"HasLinearGrayMapImage"
"HasMatrix"
"HasMonomialBasis"
"HasMorphism"
"HasMorphismAutomorphism"
"HasMorphismAutomorphisms"
"HasMorphismFromImages"
"HasMorphismFromImagesAndBaseMorphism"
"HasMultiplicityOne"
"HasNegativeWeightCycle"
"HasNonSingularFibres"
"HasNonsingularPoint"
"HasOddDegreeModel"
"HasOne"
"HasOnlyOrdinarySingularities"
"HasOnlyOrdinarySingularitiesMonteCarlo"
"HasOrder"
"HasOutputFile"
"HasPRoot"
"HasParallelClass"
"HasParallelism"
"HasPlace"
"HasPoint"
"HasPointsEverywhereLocally"
"HasPointsOverExtension"
"HasPolynomial"
"HasPolynomialFactorization"
"HasPolynomialGroebnerBasis"
"HasPolynomialResultant"
"HasPowerSumBasis"
"HasPreimage"
"HasPreimageFunction"
"HasProjectiveDerivation"
"HasRandomPlace"
"HasRationalPoint"
"HasRationalPointUsingSubfield"
"HasRationalSolutions"
"HasReducedFibres"
"HasResolution"
"HasRestriction"
"HasResultant"
"HasRightCancellation"
"HasRoot"
"HasRootOfUnity"
"HasSchurBasis"
"HasSignature"
"HasSingularPointsOverExtension"
"HasSingularVector"
"HasSparseRep"
"HasSparseRepOnly"
"HasSquareSha"
"HasSupplement"
"HasTwistedHopfStructure"
"HasValidCosetTable"
"HasValidIndex"
"HasWeakIntersectionProperty"
"HasZeroDerivation"
"Hash"
"HasseMinkowskiInvariant"
"HasseMinkowskiInvariants"
"HasseWittInvariant"
"HeckeAlgebra"
"HeckeAlgebraFields"
"HeckeAlgebraZBasis"
"HeckeBound"
"HeckeCharacter"
"HeckeCharacterGroup"
"HeckeCharacteristicPolynomial"
"HeckeCorrespondence"
"HeckeEigenvalue"
"HeckeEigenvalueBound"
"HeckeEigenvalueField"
"HeckeEigenvalueRing"
"HeckeFieldSpan"
"HeckeImages"
"HeckeImagesAll"
"HeckeLift"
"HeckeMatrix"
"HeckeMatrixBianchi"
"HeckeOperator"
"HeckeOperatorModSym"
"HeckePolynomial"
"HeckeSpan"
"HeckeTrace"
"HeegnerDiscriminants"
"HeegnerForms"
"HeegnerIndex"
"HeegnerPoint"
"HeegnerPointNumberOfTerms"
"HeegnerPoints"
"HeegnerTorsionElement"
"Height"
"HeightConstant"
"HeightOnAmbient"
"HeightPairing"
"HeightPairingLattice"
"HeightPairingMatrix"
"HeightZeroSublattice"
"HeilbronnCremona"
"HeilbronnMerel"
"HenselLift"
"HenselProcess"
"HermiteConstant"
"HermiteForm"
"HermiteNormalFormProcess"
"HermiteNormalForms"
"HermiteNumber"
"HermitePolynomial"
"HermitianAutomorphismGroup"
"HermitianCartanMatrix"
"HermitianCode"
"HermitianCurve"
"HermitianDual"
"HermitianFunctionField"
"HermitianTranspose"
"HesseCovariants"
"HesseModel"
"HessePolynomials"
"HessenbergForm"
"Hessian"
"HessianMatrix"
"Hexacode"
"HighMap"
"HighProduct"
"HighRankExceptionalStdGens"
"HighestCoroot"
"HighestLongCoroot"
"HighestLongRoot"
"HighestRoot"
"HighestShortCoroot"
"HighestShortRoot"
"HighestWeightModule"
"HighestWeightRepresentation"
"HighestWeightSpace"
"HighestWeightVectors"
"HighestWeights"
"HighestWeightsAndVectors"
"Hilbert90"
"HilbertBasis"
"HilbertCharacterSubgroup"
"HilbertClassField"
"HilbertClassPolynomial"
"HilbertCoefficient"
"HilbertCoefficients"
"HilbertCuspForms"
"HilbertDeltaVector"
"HilbertDenominator"
"HilbertFunction"
"HilbertGroebnerBasis"
"HilbertIdeal"
"HilbertMatrix"
"HilbertNumerator"
"HilbertNumeratorBettiNumbers"
"HilbertPolynomial"
"HilbertPolynomialOfCurve"
"HilbertSeries"
"HilbertSeriesApproximation"
"HilbertSeriesMultipliedByMinimalDenominator"
"HilbertSpace"
"HilbertSymbol"
"HirschNumber"
"HirzebruchSurface"
"Holes"
"Holomorph"
"Hom"
"HomAdjoints"
"HomGenerators"
"HomogeneousBlock"
"HomogeneousComponent"
"HomogeneousComponents"
"HomogeneousModuleTest"
"HomogeneousModuleTestBasis"
"HomogeneousRadical"
"HomogeneousToElementaryMatrix"
"HomogeneousToMonomialMatrix"
"HomogeneousToPowerSumMatrix"
"HomogeneousToSchurMatrix"
"Homogenization"
"HomologicalDimension"
"Homology"
"HomologyBasis"
"HomologyData"
"HomologyGenerators"
"HomologyGroup"
"HomologyOfChainComplex"
"Homomorphism"
"Homomorphisms"
"HomomorphismsLM"
"HomomorphismsProcess"
"HookLength"
"HorizontalFunction"
"HorizontalJoin"
"HorizontalVertices"
"Hull"
"HyperbolicBasis"
"HyperbolicCoxeterGraph"
"HyperbolicCoxeterMatrix"
"HyperbolicPair"
"HyperbolicSplitting"
"Hypercenter"
"Hypercentre"
"HyperellipticCurve"
"HyperellipticCurveFromG2Invariants"
"HyperellipticCurveFromIgusaClebsch"
"HyperellipticCurveOfGenus"
"HyperellipticInfiniteIntegral0"
"HyperellipticIntegral"
"HyperellipticInvolution"
"HyperellipticPolynomial"
"HyperellipticPolynomials"
"HypergeometricSeries"
"HypergeometricSeries2F1"
"HypergeometricU"
"Hyperplane"
"HyperplaneAtInfinity"
"HyperplaneSectionDivisor"
"HyperplaneToPolyhedron"
"ISA"
"ISABaseField"
"Id"
"IdDataNLAC"
"IdDataSLAC"
"Ideal"
"IdealFactorisation"
"IdealOfSupport"
"IdealQuotient"
"IdealWithFixedBasis"
"Idealiser"
"Idealizer"
"Ideals"
"IdealsAreEqual"
"IdealsUpTo"
"Idempotent"
"IdempotentActionGenerators"
"IdempotentGenerators"
"IdempotentPositions"
"Idempotents"
"IdenticalAmbientSpace"
"IdentificationNumber"
"IdentifyAlmostSimpleGroup"
"IdentifyGroup"
"IdentifyOneCocycle"
"IdentifyTwoCocycle"
"IdentifyZeroCocycle"
"Identity"
"IdentityAutomorphism"
"IdentityFieldMorphism"
"IdentityHomomorphism"
"IdentityIsogeny"
"IdentityMap"
"IdentityMatrix"
"IdentityMorphism"
"IdentitySparseMatrix"
"IdentityTransformation"
"IgusaClebschInvariants"
"IgusaClebschToClebsch"
"IgusaInvariants"
"IgusaToG2Invariants"
"IharaBound"
"Ilog"
"Ilog2"
"Im"
"Image"
"ImageBasis"
"ImageFan"
"ImageFromMat"
"ImageFunction"
"ImageOfComponentGroupOfJ0N"
"ImageSystem"
"ImageWithBasis"
"Imaginary"
"ImplicitFunction"
"Implicitization"
"ImportExternalMorphism"
"ImprimitiveAction"
"ImprimitiveBasis"
"ImprimitiveReflectionGroup"
"ImprimitiveReflectionGroupOld"
"ImproveAutomorphismGroup"
"InDegree"
"InEdge"
"InNeighbors"
"InNeighbours"
"IncidenceDigraph"
"IncidenceGeometry"
"IncidenceGraph"
"IncidenceMatrix"
"IncidenceStructure"
"IncidentEdges"
"Include"
"IncludeAutomorphism"
"IncludeWeight"
"InclusionMap"
"Inclusions"
"IndCond"
"IndecomposableSummands"
"IndentPop"
"IndentPush"
"IndependenceNumber"
"IndependentGenerators"
"IndependentUnits"
"IndeterminacyLocus"
"Index"
"IndexCalculus"
"IndexCalculusMatrix"
"IndexFormEquation"
"IndexOfFirstWhiteSpace"
"IndexOfNonWhiteSpace"
"IndexOfPartition"
"IndexOfSpeciality"
"IndexToElement"
"IndexedCoset"
"IndexedSet"
"IndexedSetToSequence"
"IndexedSetToSet"
"Indicator"
"Indices"
"IndicialPolynomial"
"IndivisibleSubdatum"
"IndivisibleSubsystem"
"InduceWG"
"InduceWGtable"
"InducedAutomorphism"
"InducedDivisorMap"
"InducedDivisorMap_old"
"InducedGammaGroup"
"InducedMap"
"InducedMapOnHomology"
"InducedOneCocycle"
"InducedPermutation"
"Induction"
"InductionCondensation"
"InductionSpin"
"IneffectiveDivisorToSheaf"
"IneffectivePossibilities"
"IneffectiveRiemannRochBasis"
"IneffectiveSubcanonicalCurves"
"Inequalities"
"InertiaDegree"
"InertiaField"
"InertiaGroup"
"InertialElement"
"Infimum"
"InfiniteDivisor"
"InfiniteOrderTest"
"InfinitePart"
"InfinitePlaces"
"InfinitePolynomial"
"InfiniteSum"
"Infinity"
"InflationMap"
"InflationMapImage"
"InflectionPoints"
"InformationRate"
"InformationSet"
"InformationSpace"
"InitProspector"
"InitialCoefficients"
"InitialVertex"
"InitialiseProspector"
"Initialize"
"InitializeBase"
"InitializeEvaluation"
"InitializeGaussianQuadrature"
"Injection"
"Injections"
"InjectiveHull"
"InjectiveModule"
"InjectiveResolution"
"InjectiveSyzygyModule"
"InnerAutomorphism"
"InnerAutomorphismGroup"
"InnerFaces"
"InnerGenerators"
"InnerNormal"
"InnerNormals"
"InnerProduct"
"InnerProductMatrix"
"InnerShape"
"InnerSlopes"
"InnerTwistOperator"
"InnerTwists"
"InnerVertices"
"InseparableDegree"
"Insert"
"InsertBasePoint"
"InsertBlock"
"InsertVertex"
"InstallInverseConstructor"
"Instance"
"InstancesForDimensions"
"IntegerMatrixEntryBound"
"IntegerRelation"
"IntegerRing"
"IntegerSolutionVariables"
"IntegerToSequence"
"IntegerToString"
"Integers"
"Integral"
"IntegralBasis"
"IntegralBasisLattice"
"IntegralBasisMinus"
"IntegralBasisPlus"
"IntegralClosure"
"IntegralDecomposition"
"IntegralGramMatrix"
"IntegralGroup"
"IntegralHeckeOperator"
"IntegralHomology"
"IntegralMapping"
"IntegralMatrix"
"IntegralMatrixByRows"
"IntegralMatrixGroupDatabase"
"IntegralMatrixOverQ"
"IntegralModel"
"IntegralModule"
"IntegralMultiple"
"IntegralNormEquation"
"IntegralPart"
"IntegralPoints"
"IntegralQuarticPoints"
"IntegralRepresentation"
"IntegralSplit"
"IntegralUEA"
"IntegralUEAlgebra"
"IntegralUniversalEnvelopingAlgebra"
"IntegralVector"
"Interior"
"InteriorPoints"
"Interpolation"
"IntersectKernels"
"Intersection"
"IntersectionArray"
"IntersectionCardinality"
"IntersectionForm"
"IntersectionForms"
"IntersectionGroup"
"IntersectionMatrix"
"IntersectionNumber"
"IntersectionOfImages"
"IntersectionPairing"
"IntersectionPairingIntegral"
"IntersectionPoints"
"IntersectionWithNormalSubgroup"
"IntersectionZBasis"
"Intseq"
"InvHom"
"InvariantBasis"
"InvariantFactors"
"InvariantField"
"InvariantForm"
"InvariantForms"
"InvariantHermitianForms"
"InvariantModule"
"InvariantQuaternionicForms"
"InvariantRepresentation"
"InvariantRing"
"Invariants"
"InvariantsMetacyclicPGroup"
"InvariantsOfDegree"
"Inverse"
"InverseDefiningPolynomials"
"InverseErf"
"InverseJeuDeTaquin"
"InverseKrawchouk"
"InverseMattsonSolomonTransform"
"InverseMod"
"InverseRSKCorrespondenceDoubleWord"
"InverseRSKCorrespondenceMatrix"
"InverseRSKCorrespondenceSingleWord"
"InverseRoot"
"InverseRowInsert"
"InverseSqrt"
"InverseSquareRoot"
"InverseTransformation"
"InverseWordMap"
"Involution"
"InvolutionClassicalGroupEven"
"Iroot"
"IrrationalPart"
"IrreducibleCartanMatrix"
"IrreducibleComponents"
"IrreducibleCoxeterGraph"
"IrreducibleCoxeterGroup"
"IrreducibleCoxeterMatrix"
"IrreducibleDynkinDigraph"
"IrreducibleFiniteStandardParabolicSubgroups"
"IrreducibleLowTermGF2Polynomial"
"IrreducibleMatrix"
"IrreducibleMatrixGroup"
"IrreducibleModule"
"IrreducibleModules"
"IrreducibleModulesBurnside"
"IrreducibleModulesInit"
"IrreducibleModulesSchur"
"IrreduciblePolynomial"
"IrreducibleReflectionGroup"
"IrreducibleRepresentationsInit"
"IrreducibleRepresentationsSchur"
"IrreducibleRootDatum"
"IrreducibleRootSystem"
"IrreducibleSecondaryInvariants"
"IrreducibleSimpleSubalgebraTreeSU"
"IrreducibleSimpleSubalgebrasOfSU"
"IrreducibleSolubleSubgroups"
"IrreducibleSparseGF2Polynomial"
"IrreducibleSubgroups"
"IrreducibleTrinomialsDatabase"
"IrreducibleWord"
"IrregularLDPCEnsemble"
"IrregularValues"
"IrregularVertices"
"Irregularity"
"IrrelevantComponents"
"IrrelevantGenerators"
"IrrelevantIdeal"
"Is2T1"
"IsAModule"
"IsAPN"
"IsAbelian"
"IsAbelianByFinite"
"IsAbelianVariety"
"IsAbsoluteField"
"IsAbsoluteOrder"
"IsAbsolutelyIrreducible"
"IsAbstractCartanMatrix"
"IsAcceptedWordDFA"
"IsAdditive"
"IsAdditiveOrder"
"IsAdditiveProjective"
"IsAdjoint"
"IsAffine"
"IsAffineLinear"
"IsAlgebraic"
"IsAlgebraicDifferentialField"
"IsAlgebraicField"
"IsAlgebraicGeometric"
"IsAlgebraicallyDependent"
"IsAlgebraicallyIsomorphic"
"IsAlmostIntegral"
"IsAlternating"
"IsAltsym"
"IsAmbient"
"IsAmbientSpace"
"IsAmple"
"IsAnalyticallyIrreducible"
"IsAnisotropic"
"IsAnticanonical"
"IsAntisymmetric"
"IsArc"
"IsArithmeticWeight"
"IsArithmeticallyCohenMacaulay"
"IsAssociative"
"IsAttachedToModularSymbols"
"IsAttachedToNewform"
"IsAutomatic"
"IsAutomaticGroup"
"IsAutomorphism"
"IsBalanced"
"IsBase64Encoded"
"IsBasePointFree"
"IsBiconnected"
"IsBig"
"IsBijective"
"IsBipartite"
"IsBlock"
"IsBlockTransitive"
"IsBogomolovUnstable"
"IsBoundary"
"IsBravaisEquivalent"
"IsCM"
"IsCalabiYauNumericalSeries"
"IsCanonical"
"IsCanonicalWithTwist"
"IsCapacitated"
"IsCartanEquivalent"
"IsCartanMatrix"
"IsCartanSubalgebra"
"IsCartier"
"IsCategory"
"IsCentral"
"IsCentralByFinite"
"IsCentralCollineation"
"IsChainMap"
"IsCharacter"
"IsChevalleyBasis"
"IsClassicalType"
"IsCluster"
"IsCoercible"
"IsCoercibleGrpLie"
"IsCohenMacaulay"
"IsCokernelTorsionFree"
"IsCollinear"
"IsCommutative"
"IsCompactHyperbolic"
"IsCompatible"
"IsComplete"
"IsCompletelyReducible"
"IsComplex"
"IsComponent"
"IsConcurrent"
"IsConditioned"
"IsConfluent"
"IsCongruence"
"IsCongruent"
"IsConic"
"IsConjugate"
"IsConjugateSubgroup"
"IsConnected"
"IsConnectedFibre"
"IsConsistent"
"IsConstaCyclic"
"IsConstant"
"IsConstantCurve"
"IsConway"
"IsCoprime"
"IsCorootSpace"
"IsCoxeterAffine"
"IsCoxeterCompactHyperbolic"
"IsCoxeterFinite"
"IsCoxeterGraph"
"IsCoxeterHyperbolic"
"IsCoxeterIrreducible"
"IsCoxeterIsomorphic"
"IsCoxeterMatrix"
"IsCrystallographic"
"IsCubeHeuristically"
"IsCubicModel"
"IsCurve"
"IsCusp"
"IsCuspidal"
"IsCuspidalNewform"
"IsCyclic"
"IsCyclotomic"
"IsCyclotomicPolynomial"
"IsDecomposable"
"IsDefault"
"IsDeficient"
"IsDefined"
"IsDefinedByQuadric"
"IsDefinedByQuadrics"
"IsDefinite"
"IsDegenerate"
"IsDelPezzo"
"IsDenselyRepresented"
"IsDesarguesian"
"IsDesign"
"IsDiagonal"
"IsDifferenceSet"
"IsDifferentialField"
"IsDifferentialIdeal"
"IsDifferentialLaurentSeriesRing"
"IsDifferentialOperatorRing"
"IsDifferentialRing"
"IsDifferentialRingElement"
"IsDifferentialSeriesRing"
"IsDirectSum"
"IsDirectSummand"
"IsDirected"
"IsDiscriminant"
"IsDisjoint"
"IsDistanceRegular"
"IsDistanceTransitive"
"IsDivisible"
"IsDivisibleBy"
"IsDivisionAlgebra"
"IsDivisionRing"
"IsDivisorialContraction"
"IsDomain"
"IsDominant"
"IsDoublePoint"
"IsDoublyEven"
"IsDualComputable"
"IsDynkinDigraph"
"IsEdgeCapacitated"
"IsEdgeLabelled"
"IsEdgeTransitive"
"IsEdgeWeighted"
"IsEffective"
"IsEichler"
"IsEigenform"
"IsEisenstein"
"IsEisensteinSeries"
"IsElementaryAbelian"
"IsEllipticCurve"
"IsEllipticWeierstrass"
"IsEmbedded"
"IsEmpty"
"IsEmptySimpleQuotientProcess"
"IsEmptyWord"
"IsEndomorphism"
"IsEof"
"IsEqual"
"IsEquationOrder"
"IsEquidistant"
"IsEquitable"
"IsEquivalent"
"IsEuclideanDomain"
"IsEuclideanRing"
"IsEulerian"
"IsEven"
"IsExact"
"IsExactlyDivisible"
"IsExceptionalUnit"
"IsExport"
"IsExtensionCategory"
"IsExtensionOf"
"IsExtraSpecial"
"IsExtraSpecialNormaliser"
"IsFTGeometry"
"IsFace"
"IsFactorial"
"IsFactorisationPrime"
"IsFaithful"
"IsFakeWeightedProjectiveSpace"
"IsFanMap"
"IsFano"
"IsField"
"IsFieldCategory"
"IsFinite"
"IsFiniteOrder"
"IsFirm"
"IsFixedAtLevel"
"IsFlag"
"IsFlex"
"IsFlexFast"
"IsFlipping"
"IsForest"
"IsFree"
"IsFrobenius"
"IsFuchsianOperator"
"IsFull"
"IsFunctionFieldCategory"
"IsFunctor"
"IsFundamental"
"IsFundamentalDiscriminant"
"IsGE"
"IsGHom"
"IsGL2Equivalent"
"IsGLConjugate"
"IsGLConjugateBigClassical"
"IsGLConjugateClassical"
"IsGLConjugateExtraspecial"
"IsGLConjugateImprimitive"
"IsGLConjugateReducible"
"IsGLConjugateSemilinear"
"IsGLConjugateSubfield"
"IsGLConjugateTensor"
"IsGLConjugateTensorInduced"
"IsGLQConjugate"
"IsGLZConjugate"
"IsGLattice"
"IsGamma"
"IsGamma0"
"IsGamma1"
"IsGammaUpper0"
"IsGammaUpper1"
"IsGe"
"IsGeneralizedCartanMatrix"
"IsGeneralizedCharacter"
"IsGenuineWeightedDynkinDiagram"
"IsGenus"
"IsGenusComputable"
"IsGenusOneModel"
"IsGeometricallyHyperelliptic"
"IsGerm"
"IsGlobal"
"IsGlobalUnit"
"IsGlobalUnitWithPreimage"
"IsGloballySplit"
"IsGorenstein"
"IsGorensteinSurface"
"IsGraded"
"IsGraph"
"IsGroebner"
"IsHadamard"
"IsHadamardEquivalent"
"IsHadamardEquivalentLeon"
"IsHeckeAlgebra"
"IsHeckeOperator"
"IsHereditary"
"IsHilbertNumerator"
"IsHolzerReduced"
"IsHomeomorphic"
"IsHomogeneous"
"IsHomomorphism"
"IsHyperbolic"
"IsHyperelliptic"
"IsHyperellipticCurve"
"IsHyperellipticCurveOfGenus"
"IsHyperellipticWeierstrass"
"IsHyperplane"
"IsHypersurface"
"IsHypersurfaceDivisor"
"IsId"
"IsIdeal"
"IsIdempotent"
"IsIdentical"
"IsIdenticalPresentation"
"IsIdentity"
"IsIdentityProduct"
"IsInArtinSchreierRepresentation"
"IsInBasicOrbit"
"IsInCorootSpace"
"IsInDual"
"IsInImage"
"IsInInterior"
"IsInKummerRepresentation"
"IsInRadical"
"IsInRootSpace"
"IsInSecantVariety"
"IsInSmallGroupDatabase"
"IsInSmallModularCurveDatabase"
"IsInSupport"
"IsInTangentVariety"
"IsInTwistedForm"
"IsIndecomposable"
"IsIndefinite"
"IsIndependent"
"IsIndivisibleRoot"
"IsInduced"
"IsInert"
"IsInertial"
"IsInfinite"
"IsInflectionPoint"
"IsInjective"
"IsInner"
"IsInnerAutomorphism"
"IsInt"
"IsInteger"
"IsIntegral"
"IsIntegralDomain"
"IsIntegralModel"
"IsIntegrallyClosed"
"IsInterior"
"IsIntrinsic"
"IsInvariant"
"IsInvertible"
"IsIrreducible"
"IsIrreducibleFiniteNilpotent"
"IsIrregularSingularPlace"
"IsIsogenous"
"IsIsogenousPeriodMatrices"
"IsIsogeny"
"IsIsolated"
"IsIsometric"
"IsIsometry"
"IsIsomorphic"
"IsIsomorphicBigPeriodMatrices"
"IsIsomorphicCubicSurface"
"IsIsomorphicFF"
"IsIsomorphicOverBase"
"IsIsomorphicOverQt"
"IsIsomorphicPGroups"
"IsIsomorphicSmallPeriodMatrices"
"IsIsomorphicWithTwist"
"IsIsomorphism"
"IsJacobianPencil"
"IsKEdgeConnected"
"IsKVertexConnected"
"IsKnownIsomorphic"
"IsKnuthEquivalent"
"IsLDPC"
"IsLE"
"IsLabelled"
"IsLabelledEdge"
"IsLabelledVertex"
"IsLargeReeGroup"
"IsLaurent"
"IsLe"
"IsLeaf"
"IsLeftIdeal"
"IsLeftIsomorphic"
"IsLeftModule"
"IsLehmerCode"
"IsLexicographicallyOrdered"
"IsLie"
"IsLineRegular"
"IsLineTransitive"
"IsLinear"
"IsLinearGroup"
"IsLinearScheme"
"IsLinearSpace"
"IsLinearSystemNonEmpty"
"IsLinearlyDependent"
"IsLinearlyEquivalent"
"IsLinearlyEquivalentToCartier"
"IsLinearlyIndependent"
"IsLittleWoodRichardson"
"IsLocal"
"IsLocalNorm"
"IsLocallyFree"
"IsLocallySoluble"
"IsLocallySolvable"
"IsLocallyTwoTransitive"
"IsLongRoot"
"IsLowerTriangular"
"IsMDS"
"IsMagmaEuclideanRing"
"IsMatrixRing"
"IsMaximal"
"IsMaximalAtRamifiedPrimes"
"IsMaximisingFunction"
"IsMaximumDimensional"
"IsMaximumDistanceSeparable"
"IsMemberBasicOrbit"
"IsMetacyclicPGroup"
"IsMinimal"
"IsMinimalModel"
"IsMinimalTwist"
"IsMinusOne"
"IsMinusQuotient"
"IsMixed"
"IsMobile"
"IsModular"
"IsModularCurve"
"IsModuleHomomorphism"
"IsMonic"
"IsMonomial"
"IsMonomialIsomorphic"
"IsMonomialRepresentation"
"IsMoriFibreSpace"
"IsMorphism"
"IsMorphismCategory"
"IsMultiChar"
"IsNarrowlyPrincipal"
"IsNearLinearSpace"
"IsNearlyPerfect"
"IsNeat"
"IsNef"
"IsNefAndBig"
"IsNegative"
"IsNegativeDefinite"
"IsNegativeSemiDefinite"
"IsNew"
"IsNewform"
"IsNewtonPolygonOf"
"IsNilpotent"
"IsNilpotentByFinite"
"IsNodalCurve"
"IsNode"
"IsNonSingular"
"IsNonsingular"
"IsNorm"
"IsNormal"
"IsNormalised"
"IsNormalising"
"IsNormalized"
"IsNull"
"IsNullHomotopy"
"IsNumberField"
"IsObject"
"IsOdd"
"IsOddDegree"
"IsOnBoundary"
"IsOne"
"IsOneCoboundary"
"IsOneCocycle"
"IsOnlyMotivic"
"IsOptimal"
"IsOrbit"
"IsOrder"
"IsOrderTerm"
"IsOrdered"
"IsOrdinary"
"IsOrdinaryProjective"
"IsOrdinarySingularity"
"IsOrthogonalGroup"
"IsOuter"
"IsOverQ"
"IsOverSmallerField"
"IsPID"
"IsPIR"
"IsPRI"
"IsPSaturated"
"IsParabolic"
"IsParallel"
"IsParallelClass"
"IsParallelWeight"
"IsParallelism"
"IsPartialRoot"
"IsPartition"
"IsPartitionRefined"
"IsPath"
"IsPerfect"
"IsPerfectlyCentered"
"IsPermutationModule"
"IsPlanar"
"IsPlaneCurve"
"IsPlusQuotient"
"IsPoint"
"IsPointRegular"
"IsPointTransitive"
"IsPointed"
"IsPolycyclic"
"IsPolycyclicByFinite"
"IsPolygon"
"IsPolynomial"
"IsPolytope"
"IsPositive"
"IsPositiveDefinite"
"IsPositiveSemiDefinite"
"IsPower"
"IsPowerOf"
"IsPrimary"
"IsPrime"
"IsPrimeCertificate"
"IsPrimeField"
"IsPrimePower"
"IsPrimitive"
"IsPrimitiveFiniteNilpotent"
"IsPrincipal"
"IsPrincipalIdealDomain"
"IsPrincipalIdealRing"
"IsPrincipalSeries"
"IsProbablePrime"
"IsProbablyMaximal"
"IsProbablyPerfect"
"IsProbablyPermutationPolynomial"
"IsProbablyPrime"
"IsProbablySupersingular"
"IsProductOfParallelDescendingCycles"
"IsProjective"
"IsProjectivelyIrreducible"
"IsProper"
"IsProperChainMap"
"IsProportional"
"IsPseudoReflection"
"IsPure"
"IsPureOrder"
"IsPureQuantumCode"
"IsPyramid"
"IsQCartier"
"IsQFactorial"
"IsQGorenstein"
"IsQPrincipal"
"IsQuadratic"
"IsQuadraticSpace"
"IsQuadraticTwist"
"IsQuadricIntersection"
"IsQuasiCyclic"
"IsQuasiSimpleTwistedCyclic"
"IsQuasiTwistedCyclic"
"IsQuasisplit"
"IsQuaternionAlgebra"
"IsQuaternionic"
"IsQuotient"
"IsRC"
"IsRPRI"
"IsRWP"
"IsRWPRI"
"IsRadical"
"IsRamified"
"IsRational"
"IsRationalCurve"
"IsRationalFunctionField"
"IsRationalPoint"
"IsRawCurve"
"IsReal"
"IsRealReflectionGroup"
"IsRealisableOverSmallerField"
"IsRealisableOverSubfield"
"IsReduced"
"IsReductive"
"IsReeGroup"
"IsReflection"
"IsReflectionGroup"
"IsReflectionSubgroup"
"IsReflexive"
"IsRegular"
"IsRegularLDPC"
"IsRegularPlace"
"IsRegularSingularOperator"
"IsRegularSingularPlace"
"IsRepresentation"
"IsResiduallyConnected"
"IsResiduallyPrimitive"
"IsResiduallyWeaklyPrimitive"
"IsResolution"
"IsRestrictable"
"IsRestricted"
"IsRestrictedLieAlgebra"
"IsRestrictedSubalgebra"
"IsReverseLatticeWord"
"IsRightIdeal"
"IsRightIsomorphic"
"IsRightModule"
"IsRing"
"IsRingHomomorphism"
"IsRingOfAllModularForms"
"IsRoot"
"IsRootOfUnity"
"IsRootSpace"
"IsRootedTree"
"IsSIntegral"
"IsSPrincipal"
"IsSUnit"
"IsSUnitWithPreimage"
"IsSatisfied"
"IsSaturated"
"IsScalar"
"IsScalarGroup"
"IsSelfDual"
"IsSelfNormalising"
"IsSelfNormalizing"
"IsSelfOrthogonal"
"IsSemiLinear"
"IsSemiregular"
"IsSemisimple"
"IsSeparable"
"IsSeparating"
"IsServerSocket"
"IsSharplyTransitive"
"IsShortExactSequence"
"IsShortRoot"
"IsSimilar"
"IsSimple"
"IsSimpleOrder"
"IsSimpleStarAlgebra"
"IsSimplex"
"IsSimplicial"
"IsSimplifiedModel"
"IsSimplyConnected"
"IsSimplyLaced"
"IsSinglePrecision"
"IsSingular"
"IsSkew"
"IsSmooth"
"IsSmoothHyperSurface"
"IsSoluble"
"IsSolubleByFinite"
"IsSolvable"
"IsSpecial"
"IsSpinorGenus"
"IsSpinorNorm"
"IsSplit"
"IsSplitAsIdealAt"
"IsSplitToralSubalgebra"
"IsSplittingCartanSubalgebra"
"IsSplittingField"
"IsSquare"
"IsSquarefree"
"IsStandard"
"IsStandardAffinePatch"
"IsStandardGF"
"IsStandardParabolicSubgroup"
"IsStarAlgebra"
"IsSteiner"
"IsStratum"
"IsStrictlyConvex"
"IsStrictlyNef"
"IsStronglyAG"
"IsStronglyConnected"
"IsSubcanonicalCurve"
"IsSubfield"
"IsSubgraph"
"IsSubgroup"
"IsSublattice"
"IsSubmodule"
"IsSubnormal"
"IsSubscheme"
"IsSubsequence"
"IsSuitableQuaternionOrder"
"IsSuperSummitRepresentative"
"IsSupercuspidal"
"IsSuperlattice"
"IsSupersingular"
"IsSupersoluble"
"IsSupportingHyperplane"
"IsSurjective"
"IsSuzukiGroup"
"IsSymmetric"
"IsSymplecticGroup"
"IsSymplecticMatrix"
"IsSymplecticSelfDual"
"IsSymplecticSelfOrthogonal"
"IsSymplecticSpace"
"IsTIrreducible"
"IsTSelfdual"
"IsTamelyRamified"
"IsTangent"
"IsTensor"
"IsTensorInduced"
"IsTerminal"
"IsTerminalThreefold"
"IsThick"
"IsThin"
"IsToralSubalgebra"
"IsTorsionUnit"
"IsTorusInvariant"
"IsTotallyComplex"
"IsTotallyEven"
"IsTotallyPositive"
"IsTotallyRamified"
"IsTotallyReal"
"IsTotallySingular"
"IsTotallySplit"
"IsTransformation"
"IsTransitive"
"IsTransvection"
"IsTransverse"
"IsTree"
"IsTriangleGroup"
"IsTriangulable"
"IsTriconnected"
"IsTrivial"
"IsTrivialOnUnits"
"IsTwist"
"IsTwisted"
"IsTwoCoboundary"
"IsTwoSidedIdeal"
"IsUFD"
"IsUltraSummitRepresentative"
"IsUndirected"
"IsUniform"
"IsUnipotent"
"IsUniqueFactorisationDomain"
"IsUniqueFactorizationDomain"
"IsUniquePartialRoot"
"IsUnit"
"IsUnitWithPreimage"
"IsUnital"
"IsUnitary"
"IsUnitaryGroup"
"IsUnitaryRepresentation"
"IsUnitarySpace"
"IsUnivariate"
"IsUnramified"
"IsUpperTriangular"
"IsValid"
"IsValidLargeReeOrder"
"IsVerbose"
"IsVertex"
"IsVertexLabelled"
"IsVertexTransitive"
"IsWGsymmetric"
"IsWP"
"IsWPRI"
"IsWeaklyAG"
"IsWeaklyAGDual"
"IsWeaklyAdjoint"
"IsWeaklyConnected"
"IsWeaklyEqual"
"IsWeaklyMonic"
"IsWeaklyPrimitive"
"IsWeaklySimplyConnected"
"IsWeaklyZero"
"IsWeierstrassModel"
"IsWeierstrassPlace"
"IsWeight"
"IsWeightVector"
"IsWeighted"
"IsWeightedProjectiveSpace"
"IsWeil"
"IsWildlyRamified"
"IsWindows"
"IsWreathProduct"
"IsZero"
"IsZeroAt"
"IsZeroComplex"
"IsZeroDimensional"
"IsZeroDivisor"
"IsZeroMap"
"IsZeroTerm"
"Isetseq"
"Isetset"
"Iso"
"IsogeniesAreEqual"
"IsogenousCurves"
"Isogeny"
"IsogenyFromKernel"
"IsogenyFromKernelFactored"
"IsogenyGroup"
"IsogenyMapOmega"
"IsogenyMapPhi"
"IsogenyMapPhiMulti"
"IsogenyMapPsi"
"IsogenyMapPsiMulti"
"IsogenyMapPsiSquared"
"IsogenyMu"
"IsolGroup"
"IsolGroupDatabase"
"IsolGroupOfDegreeFieldSatisfying"
"IsolGroupOfDegreeSatisfying"
"IsolGroupSatisfying"
"IsolGroupsOfDegreeFieldSatisfying"
"IsolGroupsOfDegreeSatisfying"
"IsolGroupsSatisfying"
"IsolGuardian"
"IsolInfo"
"IsolIsPrimitive"
"IsolMinBlockSize"
"IsolNumberOfDegreeField"
"IsolOrder"
"IsolProcess"
"IsolProcessGroup"
"IsolProcessInfo"
"IsolProcessIsEmpty"
"IsolProcessLabel"
"IsolProcessNext"
"IsolProcessOfDegree"
"IsolProcessOfDegreeField"
"IsolProcessOfField"
"IsolateRoots"
"IsolatedGorensteinSingularitiesOfIndex"
"IsolatedPointsFinder"
"IsolatedPointsLiftToMinimalPolynomials"
"IsolatedPointsLifter"
"IsometricCircle"
"IsometryGroup"
"IsomorphicCopy"
"IsomorphicMatrixLieAlgebra"
"IsomorphicProjectionToSubspace"
"IsomorphicSubmodules"
"Isomorphism"
"IsomorphismData"
"IsomorphismExtension"
"IsomorphismExtensions"
"IsomorphismToIsogeny"
"IsomorphismToStandardCopy"
"IsomorphismToStandardSCDtm"
"Isomorphisms"
"IsomorphismsOverBase"
"IsotropicSubspace"
"IsotropicVector"
"IspGroup"
"IspIntegral"
"IspLieAlgebra"
"IspMaximal"
"IspMinimal"
"IspNormal"
"IspSubalgebra"
"Isqrt"
"Itest"
"JBessel"
"JH"
"JInvariants"
"JOne"
"JZero"
"Jacobi"
"JacobiSymbol"
"JacobiTheta"
"JacobiThetaNullK"
"Jacobian"
"JacobianIdeal"
"JacobianMatrix"
"JacobianOrdersByDeformation"
"JacobianPoint"
"JacobianSequence"
"JacobianSubrankScheme"
"JacobsonRadical"
"JacobsonRadicalAlgBas"
"JacobsonRadicalOverFiniteField"
"JellyfishConstruction"
"JellyfishImage"
"JellyfishPreimage"
"JenningsLieAlgebra"
"JenningsSeries"
"JeuDeTaquin"
"JohnsonBound"
"Join"
"JoinDFA"
"JordanBlock"
"JordanDecomposition"
"JordanForm"
"Js"
"JustesenCode"
"Juxtaposition"
"K3Baskets"
"K3Copy"
"K3Database"
"K3Surface"
"K3SurfaceFromRawData"
"K3SurfaceRawData"
"K3SurfaceToRecord"
"K3SurfaceWithCodimension"
"KBessel"
"KBessel2"
"KBinomial"
"KCubeGraph"
"KDegree"
"KLPolynomial"
"KMatrixSpace"
"KMatrixSpaceWithBasis"
"KModule"
"KModuleWithBasis"
"KSpace"
"KSpaceWithBasis"
"KacMoodyClass"
"KacMoodyClasses"
"KappaLattice"
"KaratsubaMultiplication"
"KeepAbelian"
"KeepElementary"
"KeepElementaryAbelian"
"KeepGeneratorAction"
"KeepGeneratorOrder"
"KeepGroupAction"
"KeepPGroupWeights"
"KeepPrimePower"
"KeepSplit"
"KeepSplitAbelian"
"KeepSplitElementaryAbelian"
"KerdockCode"
"Kernel"
"KernelBasis"
"KernelEmbedding"
"KernelMatrix"
"KernelOrder"
"KernelZ2CodeZ4"
"Kernels"
"Keys"
"KillingDifferentialModp"
"KillingForm"
"KillingMatrix"
"KissingNumber"
"KleinBottle"
"KleinQuartic"
"KnapsackSolutions"
"Knot"
"KnownAutomorphismSubgroup"
"KnownCoefficient"
"KnownFactors"
"KnownFactorsAndCoefficient"
"KnownIrreducibles"
"KodairaSymbol"
"KodairaSymbols"
"KostkaNumber"
"KrawchoukPolynomial"
"KrawchoukTransform"
"KroneckerCharacter"
"KroneckerDelta"
"KroneckerProduct"
"KroneckerSymbol"
"KummerSurface"
"KummerSurfacePointRaw"
"KummerSurfaceRaw"
"LCLM"
"LCM"
"LCT"
"LCfRequired"
"LDPCBinarySymmetricThreshold"
"LDPCCode"
"LDPCDecode"
"LDPCDensity"
"LDPCEnsembleRate"
"LDPCGaussianThreshold"
"LDPCGirth"
"LDPCMatrix"
"LDPCSimulate"
"LFSRSequence"
"LFSRStep"
"LFunction"
"LGetCoefficients"
"LHS"
"LLL"
"LLLBasis"
"LLLBasisMatrix"
"LLLBlock"
"LLLGram"
"LLLGramMatrix"
"LLLReducedModel"
"LMGCenter"
"LMGCentre"
"LMGChiefFactors"
"LMGChiefSeries"
"LMGCommutatorSubgroup"
"LMGCompositionFactors"
"LMGCompositionSeries"
"LMGDerivedGroup"
"LMGEqual"
"LMGFactoredOrder"
"LMGFittingSubgroup"
"LMGIndex"
"LMGIsIn"
"LMGIsNilpotent"
"LMGIsNormal"
"LMGIsSoluble"
"LMGIsSolvable"
"LMGIsSubgroup"
"LMGNormalClosure"
"LMGOrder"
"LMGSocleStar"
"LMGSocleStarAction"
"LMGSocleStarActionKernel"
"LMGSocleStarFactors"
"LMGSocleStarQuotient"
"LMGSolubleRadical"
"LMGSolvableRadical"
"LMGSylow"
"LMGUnipotentRadical"
"LPProcess"
"LPolynomial"
"LProduct"
"LRatio"
"LRatioOddPart"
"LSeries"
"LSeriesData"
"LSeriesLeadingCoefficient"
"LSetCoefficients"
"LSetPrecision"
"LStar"
"LTaylor"
"LUB"
"Label"
"LabelToMatrixInternal"
"Labelling"
"Labels"
"LaguerrePolynomial"
"LaminatedLattice"
"Lang"
"LanguageCountInternal"
"LanguageDFA"
"Laplace"
"LargeRee"
"LargeReeBNpair"
"LargeReeConjugacy"
"LargeReeDiagonalisation"
"LargeReeElementToWord"
"LargeReeGeneralRecogniser"
"LargeReeGroup"
"LargeReeInvolutionCentraliser"
"LargeReeInvolutionClass"
"LargeReeIrreducibleRepresentation"
"LargeReeMaximalSubgroups"
"LargeReeRecognition"
"LargeReeReduction"
"LargeReeRedundantSLPGenerators"
"LargeReeResetRandomProcess"
"LargeReeSLPCoercion"
"LargeReeStandardConstructiveMembership"
"LargeReeStandardMaximalSubgroups"
"LargeReeStandardMembership"
"LargeReeStandardRecogniser"
"LargeReeSylow"
"LargeReeSzInvolution"
"LargestConductor"
"LargestDimension"
"LargestOrder"
"LastColumnEntry"
"LastIndexOfRow"
"Lattice"
"LatticeBasisInCone"
"LatticeBasisMatrix"
"LatticeCoordinates"
"LatticeData"
"LatticeDatabase"
"LatticeElementToMonomial"
"LatticeMap"
"LatticeMinkowskiDecomposition"
"LatticeName"
"LatticeToZGram"
"LatticeVector"
"LatticeVectorsInBox"
"LatticeWithBasis"
"LatticeWithGram"
"LaurentSeriesAlgebra"
"LaurentSeriesRing"
"LayerBoundary"
"LayerLength"
"LazyPowerSeriesRing"
"LazySeries"
"Lcm"
"LeadingCoefficient"
"LeadingExponent"
"LeadingGenerator"
"LeadingMonomial"
"LeadingMonomialIdeal"
"LeadingTerm"
"LeadingTotalDegree"
"LeadingWeightedDegree"
"LeastCommonLeftMultiple"
"LeastCommonMultiple"
"LeastUpperBound"
"LeeBrickellsAttack"
"LeeDistance"
"LeeDistance1"
"LeeWeight"
"LeeWeight1"
"LeeWeightDistribution"
"LeeWeightEnumerator"
"LeftAnnihilator"
"LeftComplex"
"LeftConjugate"
"LeftCosetSpace"
"LeftDescentSet"
"LeftDiv"
"LeftExactExtension"
"LeftGCD"
"LeftGcd"
"LeftGreatestCommonDivisor"
"LeftIdeal"
"LeftIdealClasses"
"LeftInverse"
"LeftInverseMorphism"
"LeftIsomorphism"
"LeftLCM"
"LeftLcm"
"LeftLeastCommonMultiple"
"LeftMixedCanonicalForm"
"LeftNormalForm"
"LeftOrder"
"LeftRepresentationMatrix"
"LeftString"
"LeftStringLength"
"LeftZeroExtension"
"LegendreEquation"
"LegendreModel"
"LegendrePolynomial"
"LegendreSymbol"
"LehmerCode"
"LehmerCodeToPerm"
"Length"
"LengthenCode"
"Lengths"
"LensSpace"
"LeonsAttack"
"LetterCreate"
"LetterDelete"
"LetterPreImage"
"LetterPrint"
"LetterVarAlgebra"
"LetterVarCalc"
"LetterVarCheck"
"LetterVarCocycles"
"LetterVarConsistency"
"LetterVarConsistencyProc"
"LetterVarCreate"
"LetterVarDelete"
"LetterVarEquations"
"LetterVarFpRelsProc"
"LetterVarGroup"
"LetterVarPreImage"
"LetterVarPrint"
"Level"
"Levels"
"LevenshteinBound"
"LexProduct"
"LexicographicalOrdering"
"LiEMaximalSubgroups"
"LiERootDatum"
"LiESymmetricCharacterValue"
"LibFileOpen"
"LieAlgebra"
"LieAlgebraHomomorphism"
"LieAlgebraOfDerivations"
"LieBracket"
"LieCharacteristic"
"LieConstant_C"
"LieConstant_M"
"LieConstant_N"
"LieConstant_epsilon"
"LieConstant_eta"
"LieConstant_p"
"LieConstant_q"
"LieRepresentationDecomposition"
"LieType"
"Lift"
"LiftCharacter"
"LiftCharacters"
"LiftCocycle"
"LiftDescendant"
"LiftHomomorphism"
"LiftHomomorphismGroupP"
"LiftIsogeny"
"LiftIsomorphism"
"LiftMap"
"LiftModule"
"LiftModules"
"LiftNonsplitExtension"
"LiftNonsplitExtensionRow"
"LiftPoint"
"LiftSplitExtension"
"LiftSplitExtensionRow"
"LiftToChainmap"
"Line"
"LineAtInfinity"
"LineGraph"
"LineGroup"
"LineOrbits"
"LineSet"
"LinearCharacters"
"LinearCode"
"LinearCombinationOfEigenformsOverC"
"LinearConeGenerators"
"LinearElimination"
"LinearGraph"
"LinearRelation"
"LinearRelations"
"LinearRepresentationSetup"
"LinearRepresentations"
"LinearShift"
"LinearSpace"
"LinearSpanEquations"
"LinearSpanGenerators"
"LinearSubspaceGenerators"
"LinearSystem"
"LinearSystemAtPhi"
"LinearSystemTrace"
"LinearlyEquivalentDivisorWithNoSupportOn"
"Lines"
"LinesInScheme"
"Linking"
"LinkingNumbers"
"ListAttributes"
"ListCategories"
"ListEntriesEqual"
"ListSignatures"
"ListTypes"
"ListVerbose"
"LittlewoodRichardsonTensor"
"LocalComponent"
"LocalCoxeterGroup"
"LocalDegree"
"LocalFactorization"
"LocalField"
"LocalGenera"
"LocalGlobalSelmerDiagram"
"LocalHeight"
"LocalInformation"
"LocalIntersectionData"
"LocalPolynomialRing"
"LocalRing"
"LocalRootNumber"
"LocalTwoSelmerMap"
"LocalUniformizer"
"Localisation"
"Localization"
"Log"
"LogCanonicalThreshold"
"LogCanonicalThresholdAtOrigin"
"LogCanonicalThresholdOverExtension"
"LogDerivative"
"LogGamma"
"LogIntegral"
"LogNorms"
"LogarithmicFieldExtension"
"Logs"
"LongBits"
"LongDivision"
"LongExactSequenceOnHomology"
"LongWords"
"LongestElement"
"LongestWeylWord"
"Lookup"
"LookupPrime"
"LowDimSubmodules"
"LowIndexNormalSubgroups"
"LowIndexProcess"
"LowIndexSubgroups"
"LowIndexSubgroupsSn"
"LowIndexSubmodules"
"LowerCentralSeries"
"LowerFaces"
"LowerSlopes"
"LowerTriangularMatrix"
"LowerVertices"
"Lucas"
"MAXSGPInternal"
"MCPolynomials"
"MCSplit"
"MDSCode"
"MEANS"
"MGCD"
"MMP"
"MPQS"
"MSQLetternonsplit"
"MSQLettersplit"
"MSQnonsplit"
"MSQnonsplitBase"
"MSQsplit"
"MSQsplitBase"
"MSetPolynomial"
"MSumPolynomial"
"MacWilliamsTransform"
"MagicNumber"
"Main"
"MainInvolution"
"MakeBasket"
"MakeCoprime"
"MakeCyclotomic"
"MakeDirected"
"MakeHomWithPreimageHandler"
"MakeIsSquare"
"MakeMapWithPreimageHandler"
"MakeModCubes"
"MakePCMap"
"MakeProjectiveClosureMap"
"MakeRepsDB"
"MakeRepsSmall"
"MakeResiduesSEA"
"MakeResolutionGraph"
"MakeSpliceDiagram"
"MakeType"
"Manifold"
"ManifoldDatabase"
"ManinConstant"
"ManinSymbol"
"MantissaExponent"
"MapToMatrix"
"Mapping"
"Maps"
"MargulisCode"
"MarkGroebner"
"Mass"
"MasseyProduct"
"MatRep"
"MatRepCharacteristics"
"MatRepDegrees"
"MatRepFieldSizes"
"MatRepKeys"
"Match"
"Matrices"
"Matrix"
"MatrixAlgebra"
"MatrixGroup"
"MatrixLieAlgebra"
"MatrixOfElement"
"MatrixOfInequalities"
"MatrixOfIsomorphism"
"MatrixQuotient"
"MatrixRepresentation"
"MatrixRing"
"MatrixToLabelInternal"
"MatrixToPerm"
"MatrixToWord"
"MatrixUnit"
"MatrixWithGivenCharacteristicPolynomial"
"MattsonSolomonTransform"
"Max"
"MaxCones"
"MaxNorm"
"MaxOrthPCheck"
"MaxParabolics"
"MaxSub"
"MaxSubKeys"
"MaxSubsTF2"
"MaxSubsTF4"
"Maxdeg"
"MaximalAbelianSubfield"
"MaximalCoefficientCode"
"MaximalExtension"
"MaximalIdeals"
"MaximalIncreasingSequence"
"MaximalIncreasingSequences"
"MaximalIntegerSolution"
"MaximalLeftIdeals"
"MaximalNormalSubgroup"
"MaximalNumberOfCosets"
"MaximalOddOrderNormalSubgroup"
"MaximalOrder"
"MaximalOrderBasis"
"MaximalOrderFinite"
"MaximalOrderInfinite"
"MaximalOvergroup"
"MaximalParabolics"
"MaximalPartition"
"MaximalRightIdeals"
"MaximalSingularSubspace"
"MaximalSolution"
"MaximalSubfields"
"MaximalSubgroups"
"MaximalSubgroupsAlt"
"MaximalSubgroupsAltSym"
"MaximalSubgroupsData"
"MaximalSubgroupsH"
"MaximalSubgroupsSym"
"MaximalSubgroupsTF"
"MaximalSublattices"
"MaximalSubmodules"
"MaximalTotallyIsotropicSubspace"
"MaximalVertexFacetHeightMatrix"
"MaximalZeroOneSolution"
"Maximum"
"MaximumBettiDegree"
"MaximumClique"
"MaximumDegree"
"MaximumFlow"
"MaximumInDegree"
"MaximumIndependentSet"
"MaximumMatching"
"MaximumNorm"
"MaximumOutDegree"
"MaximumStoredIrreducibleDegree"
"Maxindeg"
"Maxoutdeg"
"McElieceEtAlAsymptoticBound"
"McEliecesAttack"
"Meataxe"
"MeetDFA"
"MelikianLieAlgebra"
"MemCompact"
"MemProfile"
"Memory"
"MergeFields"
"MergeFiles"
"MergeUnits"
"MetacyclicPGroups"
"Mij2EltRootTable"
"Mike1"
"MilnorNumber"
"Min"
"MinParabolics"
"MinRowsGeneratorMatrix"
"Mindeg"
"MinimalAlgebraGenerators"
"MinimalAndCharacteristicPolynomials"
"MinimalBaseRingCharacter"
"MinimalBasis"
"MinimalBlocks"
"MinimalCoefficientDegree"
"MinimalCyclotomicField"
"MinimalDecomposition"
"MinimalDegreeModel"
"MinimalElementConjugatingToPositive"
"MinimalElementConjugatingToSuperSummit"
"MinimalElementConjugatingToUltraSummit"
"MinimalExtensionBasis"
"MinimalField"
"MinimalFreeResolution"
"MinimalHeckePolynomial"
"MinimalIdeals"
"MinimalInequalities"
"MinimalInteger"
"MinimalIntegerSolution"
"MinimalIsogeny"
"MinimalLeeWords"
"MinimalLeftIdeals"
"MinimalModel"
"MinimalNormalSubgroup"
"MinimalNormalSubgroups"
"MinimalOverfields"
"MinimalOvergroup"
"MinimalOvergroups"
"MinimalParabolics"
"MinimalPartition"
"MinimalPartitions"
"MinimalPolynomial"
"MinimalPolynomialFrobenius"
"MinimalPositiveGenerators"
"MinimalPrimeComponents"
"MinimalQuadraticTwist"
"MinimalRGenerators"
"MinimalRelations"
"MinimalRightIdeals"
"MinimalSolution"
"MinimalSubmodule"
"MinimalSubmodules"
"MinimalSuperlattices"
"MinimalSupermodules"
"MinimalSyzygyModule"
"MinimalTwist"
"MinimalVectorSequence"
"MinimalWeierstrassModel"
"MinimalWords"
"MinimalZeroOneSolution"
"MinimisationMatrix"
"Minimise"
"MinimiseConicToMatrix"
"MinimiseReduce"
"MinimiseWeights"
"Minimize"
"MinimizeCubicSurface"
"MinimizeDFA"
"MinimizeDeg4delPezzo"
"MinimizeGenerators"
"MinimizePlaneQuartic"
"MinimizeReduce"
"MinimizeReduceCubicSurface"
"MinimizeReduceDeg4delPezzo"
"MinimizeReducePlaneQuartic"
"Minimum"
"MinimumCut"
"MinimumDegree"
"MinimumDistance"
"MinimumDominatingSet"
"MinimumEuclideanDistance"
"MinimumEuclideanWeight"
"MinimumInDegree"
"MinimumLeeDistance"
"MinimumLeeWeight"
"MinimumLeeWeightBounds"
"MinimumLeeWords"
"MinimumOutDegree"
"MinimumWeight"
"MinimumWeightBounds"
"MinimumWeightTree"
"MinimumWord"
"MinimumWords"
"Minindeg"
"MinkowskiBound"
"MinkowskiDecomposition"
"MinkowskiGramReduction"
"MinkowskiLattice"
"MinkowskiReduction"
"MinkowskiSpace"
"Minor"
"MinorBoundary"
"MinorLength"
"Minors"
"Minoutdeg"
"Minus"
"MinusInfinity"
"MinusTamagawaNumber"
"MinusVolume"
"MixedCanonicalForm"
"ModByPowerOf2"
"ModelParent"
"ModelToSequence"
"ModelToString"
"ModelType"
"Modexp"
"ModifyProcess"
"ModifySelfintersection"
"ModifyTransverseIntersection"
"Modinv"
"Modorder"
"Modsqrt"
"ModularAbelianVariety"
"ModularComposition"
"ModularCompositionApply"
"ModularCompositionSetup"
"ModularCompositions"
"ModularCurve"
"ModularCurveDatabase"
"ModularCurveQuotient"
"ModularCurves"
"ModularDegree"
"ModularEmbedding"
"ModularEquation"
"ModularForm"
"ModularForms"
"ModularHyperellipticCurve"
"ModularKernel"
"ModularNonHyperellipticCurveGenus3"
"ModularParameterization"
"ModularParametrisation"
"ModularParametrization"
"ModularPolarization"
"ModularSolution"
"ModularSymbol"
"ModularSymbolApply"
"ModularSymbolEven"
"ModularSymbolOdd"
"ModularSymbolRepresentation"
"ModularSymbolToIntegralHomology"
"ModularSymbolToRationalHomology"
"ModularSymbols"
"ModularSymbolsH"
"ModularSymbolsModSmallPrime"
"Module"
"ModuleExtension"
"ModuleExtensionComplement"
"ModuleHomomorphism"
"ModuleMap"
"ModuleMaps"
"ModuleOverSmallerField"
"ModuleProject"
"ModuleProjectM"
"ModuleSaturation"
"ModuleToZModule"
"ModuleWithBasis"
"Modules"
"ModulesOverCommonField"
"ModulesOverSmallerField"
"Moduli"
"ModuliPoints"
"Modulus"
"ModulusIsFinite"
"MoebiusMu"
"MoebiusStrip"
"MolienSeries"
"MolienSeriesApproximation"
"MonicDifferentialOperator"
"MonicModel"
"MonodromyPairing"
"MonodromyWeights"
"Monoid"
"Monomial"
"MonomialAutomorphismGroup"
"MonomialBasis"
"MonomialCoefficient"
"MonomialDivisionList"
"MonomialGroup"
"MonomialGroupStabilizer"
"MonomialLattice"
"MonomialMatrix"
"MonomialOrder"
"MonomialOrderWeightVectors"
"MonomialSubgroup"
"MonomialToCoxMonomialsLattice"
"MonomialToElementaryMatrix"
"MonomialToHomogeneousMatrix"
"MonomialToPowerSumMatrix"
"MonomialToSchurMatrix"
"Monomials"
"MonomialsOfDegree"
"MonomialsOfDegreeZero"
"MonomialsOfWeightedDegree"
"MooreDeterminant"
"MordellWeilGroup"
"MordellWeilLattice"
"MordellWeilRank"
"MordellWeilRankBounds"
"MordellWeilShaInformation"
"MordellWeilSubgroup"
"MoriCone"
"Morphism"
"MorphismAutomorphism"
"MorphismAutomorphisms"
"MorphismCategory"
"MorphismFromImages"
"MorphismFromImagesAndBaseMorphism"
"MorphismMap"
"MorphismMapHasPreimage"
"MotivicWeight"
"MovablePart"
"Mult"
"MultiKnapsackSolutions"
"MultiQuotientMaps"
"MultiRank"
"MultiSpaces"
"MultiTuple"
"Multidegree"
"Multinomial"
"MultipartiteGraph"
"MultiplicationByMMap"
"MultiplicationTable"
"MultiplicativeGroup"
"MultiplicativeJordanDecomposition"
"MultiplicativeOrder"
"MultiplicatorRing"
"Multiplicities"
"Multiplicity"
"Multiplier"
"MultiplyByTranspose"
"MultiplyColumn"
"MultiplyDivisor"
"MultiplyFrobenius"
"MultiplyRow"
"MultiplyTransformations"
"Multiset"
"MultisetToSet"
"Multisets"
"MultivaluedSection"
"MultivariatePolynomial"
"MurphyAlphaApproximation"
"MyAbelianGroup"
"MyBasis"
"MyCompletion"
"MyDumbExpand"
"MyEval"
"MyExpand"
"MyExtOrder"
"MyFPGroup"
"MyGCD"
"MyGetLowPrecisionExpand"
"MyGetLowPrecisionExpandAS"
"MyGradedMap"
"MyInvars"
"MyIsConjugate"
"MyIsConjugateQuotient"
"MyIsConjugateSubgroup"
"MyIsMaximal"
"MyIsSquare"
"MyMaximalOrder"
"MyPrimitivePart"
"MyRationalPoints"
"MyRelativeInvariant"
"NFS"
"NFSProcess"
"NFaces"
"NGrad"
"NMS"
"NMatReps"
"NMaxSubs"
"NNZEntries"
"NP"
"NPCGenerators"
"NPCgens"
"NPermReps"
"NSpin"
"NagataAutomorphism"
"Nagens"
"NaiveHeight"
"Nalggens"
"Name"
"Name2Mij"
"NameSimple"
"Names"
"NarrowClassGroup"
"NarrowClassNumber"
"NaturalActionGenerator"
"NaturalBlackBoxGroup"
"NaturalFreeAlgebraCover"
"NaturalGroup"
"NaturalMap"
"NaturalMaps"
"Nclasses"
"Ncols"
"NearLinearSpace"
"NefCone"
"NegationMap"
"Negative"
"NegativeGammaOrbitsOnRoots"
"NegativePrimeDivisors"
"NegativeRelativeRoots"
"Neighbor"
"NeighborClosure"
"Neighbors"
"Neighbour"
"NeighbourClosure"
"NeighbouringGerms"
"Neighbours"
"NewAndOldSubspacesUsingHeckeAction"
"NewEnv"
"NewLLLBasis"
"NewLevel"
"NewModularHyperellipticCurve"
"NewModularHyperellipticCurves"
"NewModularNonHyperellipticCurveGenus3"
"NewModularNonHyperellipticCurvesGenus3"
"NewQuotient"
"NewSaturation"
"NewStore"
"NewSubspace"
"NewSubvariety"
"Newform"
"NewformDecomposition"
"Newforms"
"NewtonPolygon"
"NewtonPolynomial"
"NewtonPolynomials"
"NewtonPolytope"
"NextClass"
"NextElement"
"NextExtension"
"NextFactor"
"NextGraph"
"NextModule"
"NextPermutation"
"NextPrime"
"NextRepresentation"
"NextSimpleQuotient"
"NextSubgroup"
"NextVector"
"Ngens"
"Nice"
"NiceRepresentativeModuloPowers"
"NiceRepresentativesModuloPowers"
"NicerQuaternionAlgebra"
"NilRadical"
"NilpotencyClass"
"NilpotentBoundary"
"NilpotentLength"
"NilpotentLieAlgebra"
"NilpotentOrbit"
"NilpotentOrbits"
"NilpotentPresentation"
"NilpotentQuotient"
"NilpotentSection"
"NilpotentSubgroups"
"Nilradical"
"NineDescent"
"NineSelmerSet"
"NoCommonComponent"
"NoetherNormalisation"
"NoetherNormalization"
"NoetherNumerator"
"NoetherWeights"
"NonCuspidalQRationalPoints"
"NonIdempotentActionGenerators"
"NonIdempotentGenerators"
"NonNilpotentElement"
"NonNormalizedLcm"
"NonPrimitiveAlternant"
"NonPrincipalPrimesUpTo"
"NonQFactorialLocus"
"NonReducedFibres"
"NonSimplicialCones"
"NonSpecialDivisor"
"NonZeroCoordinates"
"NonsolvableSubgroups"
"NonsplitAbelianSection"
"NonsplitCollector"
"NonsplitElementaryAbelianSection"
"NonsplitExtensionSpace"
"NonsplitSection"
"Nonsquare"
"NonvanishingForm"
"Norm"
"NormAbs"
"NormEquation"
"NormGroup"
"NormGroupDiscriminant"
"NormKernel"
"NormModule"
"NormOneGroup"
"NormOneSubgroup"
"NormResidueSymbol"
"NormSpace"
"NormalBasisGenerator"
"NormalClosure"
"NormalClosureMonteCarlo"
"NormalComplements"
"NormalCone"
"NormalElement"
"NormalFan"
"NormalForm"
"NormalLattice"
"NormalNumber"
"NormalSubfields"
"NormalSubgroup"
"NormalSubgroupRandomElement"
"NormalSubgroups"
"Normalisation"
"NormalisationCoefficient"
"Normalise"
"NormalisedCone"
"Normaliser"
"NormaliserCode"
"NormaliserMatrix"
"Normalization"
"NormalizationCoefficient"
"Normalize"
"NormalizeIdeals"
"Normalizer"
"NormalizerCode"
"NormalizerGLZ"
"NormalizerMatrix"
"Norms"
"Not"
"Nqubits"
"Nrels"
"Nrows"
"Nsgens"
"NthPrime"
"NuclearRank"
"NullGraph"
"NullHomotopy"
"NullSpace"
"Nullity"
"Nullspace"
"NullspaceMatrix"
"NullspaceOfTranspose"
"NullspaceOfTransposeMatrix"
"NumExtraspecialPairs"
"NumPosRoots"
"Number"
"NumberField"
"NumberFieldDatabase"
"NumberFieldSieve"
"NumberFields"
"NumberOfActionGenerators"
"NumberOfAffinePatches"
"NumberOfAlgebraicGenerators"
"NumberOfAntisymmetricForms"
"NumberOfBlocks"
"NumberOfBoundaryPoints"
"NumberOfCells"
"NumberOfClasses"
"NumberOfColumns"
"NumberOfComponents"
"NumberOfConstantWords"
"NumberOfConstraints"
"NumberOfCoordinates"
"NumberOfCurves"
"NumberOfDivisors"
"NumberOfEGenerators"
"NumberOfEdges"
"NumberOfElements"
"NumberOfExtensions"
"NumberOfFGenerators"
"NumberOfFaces"
"NumberOfFacets"
"NumberOfFields"
"NumberOfFixedSpaces"
"NumberOfGenerators"
"NumberOfGradings"
"NumberOfGraphs"
"NumberOfGroups"
"NumberOfGroupsSF"
"NumberOfGroupsp7"
"NumberOfInclusions"
"NumberOfInteriorPoints"
"NumberOfInvariantForms"
"NumberOfIrreducibleMatrixGroups"
"NumberOfIsogenyClasses"
"NumberOfK3Surfaces"
"NumberOfKGenerators"
"NumberOfLabels"
"NumberOfLattices"
"NumberOfLevels"
"NumberOfLines"
"NumberOfMatrices"
"NumberOfMetacyclicPGroups"
"NumberOfNames"
"NumberOfNewformClasses"
"NumberOfNonZeroEntries"
"NumberOfOperations"
"NumberOfPCGenerators"
"NumberOfPartitions"
"NumberOfPermutations"
"NumberOfPlacesDegECF"
"NumberOfPlacesOfDegreeOne"
"NumberOfPlacesOfDegreeOneECF"
"NumberOfPlacesOfDegreeOneECFBound"
"NumberOfPlacesOfDegreeOneOverExactConstantField"
"NumberOfPlacesOfDegreeOneOverExactConstantFieldBound"
"NumberOfPlacesOfDegreeOverExactConstantField"
"NumberOfPoints"
"NumberOfPointsAtInfinity"
"NumberOfPointsOnCubicSurface"
"NumberOfPointsOnSurface"
"NumberOfPositiveRoots"
"NumberOfPrimePolynomials"
"NumberOfPrimitiveAffineGroups"
"NumberOfPrimitiveAlmostSimpleGroups"
"NumberOfPrimitiveDiagonalGroups"
"NumberOfPrimitiveGroups"
"NumberOfPrimitiveProductGroups"
"NumberOfPrimitiveSolubleGroups"
"NumberOfProjectives"
"NumberOfPunctures"
"NumberOfQubits"
"NumberOfQuotientGradings"
"NumberOfRationalPoints"
"NumberOfRelations"
"NumberOfRelationsRequired"
"NumberOfRepresentations"
"NumberOfRows"
"NumberOfSkewRows"
"NumberOfSmallGroups"
"NumberOfSmoothDivisors"
"NumberOfSolubleIrreducibleMatrixGroups"
"NumberOfStandardTableaux"
"NumberOfStandardTableauxOnWeight"
"NumberOfStrings"
"NumberOfStrongGenerators"
"NumberOfSubgroupsAbelianPGroup"
"NumberOfSymmetricForms"
"NumberOfTableauxOnAlphabet"
"NumberOfTerms"
"NumberOfTransitiveGroups"
"NumberOfTransverseIntersections"
"NumberOfVariables"
"NumberOfVertices"
"NumberOfWords"
"NumberOfhGenerators"
"NumberOfxGenerators"
"NumberOfyGenerators"
"NumberingMap"
"NumbersOfPointsOnSurface"
"Numelt"
"Numeration"
"Numerator"
"NumeratorData"
"NumeratorSequence"
"NumericalDerivative"
"NumericalEigenvectors"
"O"
"OECM"
"OEIS"
"OEISDatabase"
"ObjectMap"
"ObjectMapHasPreimage"
"ObjectiveFunction"
"Obstruction"
"ObstructionDescentBuildingBlock"
"OddGraph"
"Oddity"
"OldClassInvariants"
"OldDerksenIdeal"
"OldGOMinus"
"OldGeneralOrthogonalGroupMinus"
"OldIrreducibleModules"
"OldOmegaMinus"
"OldQuadraticSpace"
"OldQuotient"
"OldSOMinus"
"OldSpecialOrthogonalGroupMinus"
"OldSubspace"
"OldSubvariety"
"Omega"
"OmegaMinus"
"OmegaPlus"
"One"
"OneCocycle"
"OneCohomology"
"OneCohomologyAb"
"OneCohomologyFP"
"OneCohomologyFP_"
"OneParameterSubgroupsLattice"
"OneSkeleton"
"OnlyUpToIsogeny"
"Open"
"OpenGraphFile"
"OpenSmallGroupDatabase"
"OpenTest"
"Operands"
"Operation"
"Operator"
"OperatorNorm"
"OppositeAlgebra"
"OptimalEdgeColouring"
"OptimalSkewness"
"OptimalVertexColouring"
"OptimisedRepresentation"
"OptimizedRepresentation"
"Or"
"Orbit"
"OrbitAction"
"OrbitActionBounded"
"OrbitBounded"
"OrbitClosure"
"OrbitImage"
"OrbitImageBounded"
"OrbitKernel"
"OrbitKernelBounded"
"OrbitLensInternal"
"OrbitMinsInternal"
"OrbitNumbersInternal"
"OrbitPartitionIsConjugate"
"OrbitPartitionStabilizer"
"OrbitRepresentatives"
"OrbitStabilizer"
"OrbitSum"
"OrbitalGraph"
"Orbits"
"OrbitsOfSpaces"
"OrbitsOnSimples"
"OrbitsPartition"
"Order"
"OrderAutomorphismGroupAbelianPGroup"
"OrderGL"
"OrderOfImageOfComponentGroupOfJ0N"
"OrderOfRootOfUnity"
"OrderedGenerators"
"OrderedIntegerMonoid"
"OrderedMonoid"
"OrderedPartitionStack"
"OrderedPartitionStackZero"
"Ordering"
"OreConditions"
"OrientatedGraph"
"Origin"
"OriginalRing"
"OrthogonalComplement"
"OrthogonalComponent"
"OrthogonalComponents"
"OrthogonalDecomposition"
"OrthogonalDirectSum"
"OrthogonalForm"
"OrthogonalFormCS"
"OrthogonalFormMinus"
"OrthogonalFormPlus"
"OrthogonalReflection"
"OrthogonalSum"
"OrthogonalTensorProduct"
"Orthogonalize"
"OrthogonalizeGram"
"Orthonormalize"
"OutDegree"
"OutEdges"
"OutNeighbors"
"OutNeighbours"
"OuterFPGroup"
"OuterFaces"
"OuterNormal"
"OuterNormals"
"OuterOrder"
"OuterShape"
"OuterVertices"
"OvalDerivation"
"OverDimension"
"OverconvergentHeckeSeries"
"OverconvergentHeckeSeriesDegreeBound"
"Overdatum"
"Overgroup"
"P1"
"P1Action"
"P1Classes"
"P1Normalize"
"P1P2toA3Ac2over12"
"P1Reduce"
"P2"
"PALPNormalForm"
"PCAut"
"PCAutAction"
"PCAutDeriv"
"PCAutIsSol"
"PCAutPrint"
"PCBFConjByWord"
"PCBFEltNew"
"PCBFElteq"
"PCBFEltne"
"PCBFMult"
"PCBFNew"
"PCBFNormalForm"
"PCBFRevert"
"PCClass"
"PCExponents"
"PCGO"
"PCGOMinus"
"PCGOPlus"
"PCGSp"
"PCGU"
"PCGenerators"
"PCGroup"
"PCMap"
"PCPresentation"
"PCPrimes"
"PCSO"
"PCSOMinus"
"PCSOPlus"
"PCSU"
"PGL"
"PGO"
"PGOMinus"
"PGOPlus"
"PGU"
"PGammaL"
"PGammaU"
"PGroupSection"
"PGroupToForms"
"PHom"
"POmega"
"POmegaMinus"
"POmegaPlus"
"POpen"
"PSL"
"PSL2"
"PSO"
"PSOMinus"
"PSOPlus"
"PSU"
"PSigmaL"
"PSigmaSp"
"PSigmaSz"
"PSigmaU"
"PSp"
"PSz"
"PackingRadius"
"PadCode"
"PadeHermiteApproximant"
"PairReduce"
"PairReduceGram"
"PaleyGraph"
"PaleyTournament"
"ParallelClass"
"ParallelClasses"
"ParallelSort"
"ParamDeg4DPSingLie"
"Parameters"
"Parametrization"
"ParametrizationMatrix"
"ParametrizationToPuiseux"
"ParametrizeAnticanonicalP1xP1"
"ParametrizeAnticanonicalSphere"
"ParametrizeBlowup"
"ParametrizeDegree5DelPezzo"
"ParametrizeDegree6DelPezzo"
"ParametrizeDegree7DelPezzo"
"ParametrizeDegree8DelPezzo"
"ParametrizeDegree9DelPezzo"
"ParametrizeDelPezzo"
"ParametrizeDelPezzoDeg6"
"ParametrizeDelPezzoDeg9"
"ParametrizeOrdinaryCurve"
"ParametrizePencil"
"ParametrizeProjectiveHypersurface"
"ParametrizeProjectiveSurface"
"ParametrizeQuadric"
"ParametrizeRNC"
"ParametrizeRationalNormalCurve"
"ParametrizeScroll"
"ParametrizeSingularDegree3DelPezzo"
"ParametrizeSingularDegree4DelPezzo"
"Parent"
"ParentCategory"
"ParentCell"
"ParentGraph"
"ParentPlane"
"ParentRing"
"ParityCheckMatrix"
"PartialDual"
"PartialFactorization"
"PartialFractionDecomposition"
"PartialLeeWeightDistribution"
"PartialPrimaryInvariantSpaces"
"PartialWeightDistribution"
"Partition"
"Partition2WGtable"
"PartitionAction"
"PartitionCovers"
"PartitionToWeight"
"Partitions"
"PascalTriangle"
"PatchGerms"
"Path"
"PathExists"
"PathGraph"
"PathTree"
"PathTreeCyclicModule"
"Paths"
"Peakwords"
"PellEquation"
"Pencil"
"PerfectForms"
"PerfectGroupDatabase"
"PerfectSubgroups"
"PeriodMapping"
"Periods"
"PermCond"
"PermRep"
"PermRepDegrees"
"PermRepKeys"
"PermRestrict"
"PermToDualMatrix"
"PermToMatrix"
"PermToWord"
"Permutation"
"PermutationAutomorphism"
"PermutationCharacter"
"PermutationCode"
"PermutationCondensation"
"PermutationGroup"
"PermutationMatrix"
"PermutationModule"
"PermutationRepresentation"
"PermutationSupport"
"Permutations"
"PermuteSequence"
"PermuteWeights"
"Pfaffian"
"Pfaffians"
"PhaseFlip"
"Phi"
"PhiInverse"
"Pi"
"PicardClass"
"PicardGaloisModule"
"PicardGroup"
"PicardGroupGeometric"
"PicardIntersectionPairing"
"PicardLattice"
"PicardNumber"
"PicardToClassGroupsMap"
"PicardToClassLatticesMap"
"PicnDescent"
"Pipe"
"Place"
"PlaceEnumCopy"
"PlaceEnumCurrent"
"PlaceEnumInit"
"PlaceEnumNext"
"PlaceEnumPosition"
"Places"
"PlacticIntegerMonoid"
"PlacticMonoid"
"PlanarDual"
"PlanarGraphDatabase"
"PlaneCurve"
"PlaneToDisc"
"Plethysm"
"PlotkinAsymptoticBound"
"PlotkinBound"
"PlotkinSum"
"Plurigenus"
"Point"
"PointDegree"
"PointDegrees"
"PointGraph"
"PointGroup"
"PointInInterior"
"PointIndexes"
"PointOnRegularModel"
"PointSearch"
"PointSet"
"PointToBlowUp"
"Points"
"PointsAtInfinity"
"PointsCubicModel"
"PointsFiniteField"
"PointsInGeneralPosition"
"PointsKnown"
"PointsOverSplittingField"
"PointsQI"
"PointsToLaurent"
"Polar"
"PolarToComplex"
"Polarisation"
"PolarisedVariety"
"PoleDivisor"
"Poles"
"PollardRho"
"PolyMapKernel"
"PolyToSeries"
"PolycyclicByFiniteGroup"
"PolycyclicGenerators"
"PolygonGraph"
"Polyhedron"
"PolyhedronInSublattice"
"PolyhedronWithInequalities"
"Polylog"
"PolylogD"
"PolylogDold"
"PolylogP"
"Polynomial"
"PolynomialAlgebra"
"PolynomialCoefficient"
"PolynomialMap"
"PolynomialPair"
"PolynomialRing"
"PolynomialSieve"
"PolynomialToElementarySymmetric"
"PolynomialToPowerSums"
"Polynomials"
"Polytope"
"PolytopeCanonicalFanoDim2"
"PolytopeCanonicalFanoDim3"
"PolytopeLDP"
"PolytopeOfProjectiveSpace"
"PolytopeOfWPS"
"PolytopeReflexiveFanoDim2"
"PolytopeReflexiveFanoDim3"
"PolytopeSmoothFano"
"PolytopeSmoothFanoDim2"
"PolytopeSmoothFanoDim3"
"PolytopeSmoothFanoDim4"
"PolytopeSmoothFanoDim5"
"PolytopeSmoothFanoDim6"
"PolytopeSmoothFanoDim7"
"PolytopeSmoothFanoDim8"
"PolytopeTerminalFanoDim2"
"PolytopeTerminalFanoDim3"
"PolytopeToLaurent"
"PolytopelReflexiveDim2"
"Pop"
"PosRootsWeightBasis"
"Position"
"PositiveConjugates"
"PositiveConjugatesProcess"
"PositiveCoroots"
"PositiveDefiniteForm"
"PositiveGammaOrbitsOnRoots"
"PositiveQuadrant"
"PositiveRelativeRoots"
"PositiveRoots"
"PositiveRootsPerm"
"PositiveSum"
"PossibleCanonicalDissidentPoints"
"PossibleDiscriminants"
"PossibleSimpleCanonicalDissidentPoints"
"PowHom"
"Power"
"PowerFormalSet"
"PowerFreePart"
"PowerGroup"
"PowerIdeal"
"PowerIndexedSet"
"PowerMap"
"PowerMultiset"
"PowerPolynomial"
"PowerProduct"
"PowerProductSimplify"
"PowerRSpace"
"PowerRelation"
"PowerResidueCode"
"PowerSequence"
"PowerSeries"
"PowerSeriesAlgebra"
"PowerSeriesRing"
"PowerSet"
"PowerStructure"
"PowerSumToCoefficients"
"PowerSumToElementaryMatrix"
"PowerSumToElementarySymmetric"
"PowerSumToHomogeneousMatrix"
"PowerSumToMonomialMatrix"
"PowerSumToSchurMatrix"
"PrePatchMaps"
"Precision"
"PrecisionBound"
"Preimage"
"PreimageConstructorViaInverse"
"PreimageIdeal"
"PreimageRing"
"PreparataCode"
"Preprune"
"Presentation"
"PresentationIsSmall"
"PresentationLength"
"PresentationMatrix"
"PreviousPrime"
"PrimDecomp"
"PrimalityCertificate"
"Primary"
"PrimaryAbelianBasis"
"PrimaryAbelianInvariants"
"PrimaryAlgebra"
"PrimaryBasis"
"PrimaryComponents"
"PrimaryDecomposition"
"PrimaryIdeal"
"PrimaryInvariantFactors"
"PrimaryInvariants"
"PrimaryRationalForm"
"PrimaryRepresentation"
"Prime"
"PrimeBasis"
"PrimeComponents"
"PrimeDivisors"
"PrimeFactorisation"
"PrimeField"
"PrimeForm"
"PrimeIdeal"
"PrimeOrderElement"
"PrimePolynomials"
"PrimePowerKernelMatrix"
"PrimePowerNullspaceMatrix"
"PrimePowerOrderElement"
"PrimePowerRepresentation"
"PrimeRing"
"Primes"
"PrimesInInterval"
"PrimesUpTo"
"PrimitiveEisensteinSeries"
"PrimitiveElement"
"PrimitiveGroup"
"PrimitiveGroupDatabaseLimit"
"PrimitiveGroupDescription"
"PrimitiveGroupIdentification"
"PrimitiveGroupLabelFromSims"
"PrimitiveGroupLabelToSims"
"PrimitiveGroupProcess"
"PrimitiveGroupSims"
"PrimitiveGroups"
"PrimitiveIdempotentData"
"PrimitiveIdempotents"
"PrimitiveLatticeVector"
"PrimitivePart"
"PrimitivePolynomial"
"PrimitiveQuotient"
"PrimitiveRoot"
"PrimitiveWreathProduct"
"PrincipalCharacter"
"PrincipalDivisor"
"PrincipalDivisorMap"
"PrincipalIdealMap"
"PrincipalPolarisation"
"PrincipalPrimesUpTo"
"PrincipalSeriesParameters"
"PrincipalUnitGroup"
"PrincipalUnitGroupGenerators"
"PrintBase"
"PrintCategory"
"PrintCoding"
"PrintCollector"
"PrintExtensions"
"PrintFile"
"PrintFileMagma"
"PrintGenerators"
"PrintGrpLie"
"PrintGrpLieElt"
"PrintMagma"
"PrintMapping"
"PrintMatgMagma"
"PrintModuleMagma"
"PrintModules"
"PrintName"
"PrintPairs"
"PrintPrimes"
"PrintProbabilityDistribution"
"PrintProcess"
"PrintQuotient"
"PrintRelat"
"PrintRelatorLengths"
"PrintRelators"
"PrintSeries"
"PrintSortedProbabilityDistribution"
"PrintStatus"
"PrintSylowSubgroupStructure"
"PrintSymbols"
"PrintTermsOfDegree"
"PrintToPrecision"
"PrintTreesSU"
"PrintoutData"
"Probability"
"ProbabilityDistribution"
"ProbableAutomorphismGroup"
"ProbableRadicalDecomposition"
"Probit"
"ProcessLadder"
"Product"
"ProductCode"
"ProductProjectiveSpace"
"ProductRepresentation"
"ProfileGraph"
"ProfileHTMLOutput"
"ProfilePrintByTotalCount"
"ProfilePrintByTotalTime"
"ProfilePrintChildrenByCount"
"ProfilePrintChildrenByTime"
"ProfilePrintDescendantsByCount"
"ProfilePrintDescendantsByTime"
"ProfilePrintGraphByCount"
"ProfilePrintGraphByTime"
"ProfilePruneGraphByCount"
"ProfilePruneGraphByTime"
"ProfileReset"
"Proj"
"ProjKilling"
"Projection"
"ProjectionCentres"
"ProjectionCodimensions"
"ProjectionFromNonsingularPoint"
"ProjectionIndices"
"ProjectionMap"
"ProjectionMatrix"
"ProjectionOnto"
"ProjectionOntoImage"
"ProjectionSubtypes"
"ProjectionTypes"
"Projections"
"ProjectiveClosure"
"ProjectiveClosureMap"
"ProjectiveCover"
"ProjectiveDimension"
"ProjectiveEmbedding"
"ProjectiveFunction"
"ProjectiveGammaLinearGroup"
"ProjectiveGammaUnitaryGroup"
"ProjectiveGeneralLinearGroup"
"ProjectiveGeneralOrthogonalGroup"
"ProjectiveGeneralOrthogonalGroupMinus"
"ProjectiveGeneralOrthogonalGroupPlus"
"ProjectiveGeneralUnitaryGroup"
"ProjectiveIndecomposable"
"ProjectiveIndecomposableDimensions"
"ProjectiveIndecomposableModules"
"ProjectiveIndecomposables"
"ProjectiveLine"
"ProjectiveMap"
"ProjectiveModule"
"ProjectiveOmega"
"ProjectiveOmegaMinus"
"ProjectiveOmegaPlus"
"ProjectiveOrder"
"ProjectivePatchMap"
"ProjectivePlane"
"ProjectivePolynomial"
"ProjectiveRationalFunction"
"ProjectiveRepresentative"
"ProjectiveResolution"
"ProjectiveResolutionPGroup"
"ProjectiveSigmaLinearGroup"
"ProjectiveSigmaSuzukiGroup"
"ProjectiveSigmaSymplecticGroup"
"ProjectiveSigmaUnitaryGroup"
"ProjectiveSpace"
"ProjectiveSpaceAsToricVariety"
"ProjectiveSpecialLinearGroup"
"ProjectiveSpecialOrthogonalGroup"
"ProjectiveSpecialOrthogonalGroupMinus"
"ProjectiveSpecialOrthogonalGroupPlus"
"ProjectiveSpecialUnitaryGroup"
"ProjectiveSuzukiGroup"
"ProjectiveSymplecticGroup"
"Projectivity"
"Prospector"
"Prune"
"PseudoAdd"
"PseudoAddMultiple"
"PseudoBasis"
"PseudoCholeskyForm"
"PseudoCholeskyFormToCholesky"
"PseudoDimension"
"PseudoGenerators"
"PseudoInverse"
"PseudoMatrix"
"PseudoMordellWeilGroup"
"PseudoRandom"
"PseudoReflection"
"PseudoReflectionGroup"
"PseudoRemainder"
"Pseudoreflection"
"Psi"
"PthPowerMapping"
"PuiseuxExpansion"
"PuiseuxExponents"
"PuiseuxExponentsCommon"
"PuiseuxSeriesRing"
"PuiseuxToParametrization"
"Pullback"
"PunctureCode"
"PureBraidGroup"
"PureLattice"
"PureRayIndices"
"PureRays"
"PurelyRamifiedExtension"
"PushThroughIsogeny"
"Pushforward"
"Pushout"
"Put"
"PutInZ"
"Puts"
"Pyramid"
"QECC"
"QECCLowerBound"
"QECCUpperBound"
"QFactorialisation"
"QMatrix"
"QNF"
"QRCode"
"QRCodeZ4"
"QSpace"
"QUAToIntegralUEAMap"
"Qround"
"QuadeIdeal"
"QuadraticClassGroupTwoPart"
"QuadraticField"
"QuadraticForm"
"QuadraticFormCS"
"QuadraticFormMatrix"
"QuadraticFormMinus"
"QuadraticFormPlus"
"QuadraticFormPolynomial"
"QuadraticFormType"
"QuadraticForms"
"QuadraticNorm"
"QuadraticNormForm"
"QuadraticOrder"
"QuadraticSpace"
"QuadraticTransformation"
"QuadraticTwist"
"QuadraticTwists"
"QuadricIntersection"
"QuantizedUEA"
"QuantizedUEAlgebra"
"QuantizedUniversalEnvelopingAlgebra"
"QuantumBasisElement"
"QuantumBinaryErrorGroup"
"QuantumCode"
"QuantumCompactFormat"
"QuantumCyclicCode"
"QuantumDimension"
"QuantumErrorGroup"
"QuantumExtendedFormat"
"QuantumQuasiCyclicCode"
"QuantumState"
"QuantumTwistedCode"
"Quartic"
"QuarticG4Covariant"
"QuarticG6Covariant"
"QuarticHSeminvariant"
"QuarticIInvariant"
"QuarticJInvariant"
"QuarticMinimise"
"QuarticNumberOfRealRoots"
"QuarticPSeminvariant"
"QuarticQSeminvariant"
"QuarticRSeminvariant"
"QuarticReduce"
"QuasiCyclicCode"
"QuasiTwistedCyclicCode"
"QuaternaryPlotkinSum"
"Quaternion"
"QuaternionAlgebra"
"QuaternionOrder"
"QuaternionicAutomorphismGroup"
"QuaternionicComplement"
"QuaternionicDual"
"QuaternionicGModule"
"QuaternionicMatrixGroupDatabase"
"QuaternionicTranspose"
"QuickLLL"
"QuickLLLGram"
"Quotient"
"QuotientComplex"
"QuotientDimension"
"QuotientFactorization"
"QuotientGenerators"
"QuotientGradings"
"QuotientGroup"
"QuotientMap"
"QuotientModule"
"QuotientModuleAction"
"QuotientModuleImage"
"QuotientRepresentation"
"QuotientRing"
"QuotientWithPullback"
"Quotrem"
"RCLazySeries"
"RF"
"RGenerators"
"RHS"
"RMatrixSpace"
"RMatrixSpaceWithBasis"
"RModule"
"RModuleWithAction"
"RModuleWithBasis"
"RPolynomial"
"RSAModulus"
"RSKCorrespondence"
"RSpace"
"RSpaceWithBasis"
"RSpaceWithModuli"
"RWSGroup"
"RWSMonoid"
"Radical"
"RadicalDecomposition"
"RadicalExtension"
"RadicalQuotient"
"RaisePrecision"
"RamificationDegree"
"RamificationDivisor"
"RamificationField"
"RamificationGroup"
"RamificationIndex"
"RamificationPoints"
"RamifiedPlaces"
"RamifiedPrimes"
"RamifiedRepresentation"
"Ranbig"
"Random"
"RandomAdditiveCode"
"RandomAutomorphism"
"RandomBaseChange"
"RandomBits"
"RandomCFP"
"RandomCone"
"RandomConjugate"
"RandomConsecutiveBits"
"RandomCurveByGenus"
"RandomDigraph"
"RandomElementOfOrder"
"RandomExtension"
"RandomGLnZ"
"RandomGenusOneModel"
"RandomGraph"
"RandomHookWalk"
"RandomIrreduciblePolynomial"
"RandomLinearCode"
"RandomLowerTriangularMatrix"
"RandomMatrix"
"RandomModel"
"RandomNodalCurve"
"RandomPartition"
"RandomPlace"
"RandomPlaneCurve"
"RandomPlanePoints"
"RandomPolytope"
"RandomPositiveCone"
"RandomPrime"
"RandomPrimePolynomial"
"RandomProcess"
"RandomProcessWithValues"
"RandomProcessWithWords"
"RandomProcessWithWordsAndValues"
"RandomQuantumCode"
"RandomRightIdeal"
"RandomSLnZ"
"RandomSchreier"
"RandomSchreierBounded"
"RandomSchreierCoding"
"RandomSequence"
"RandomSequenceBlumBlumShub"
"RandomSequenceRSA"
"RandomSparseMatrix"
"RandomSubcomplex"
"RandomSubset"
"RandomSymplecticMatrix"
"RandomTableau"
"RandomTransformation"
"RandomTree"
"RandomUnit"
"RandomUpperTriangularMatrix"
"RandomWord"
"Rank"
"RankBound"
"RankBounds"
"RankZ2"
"RanksOfPrimitiveIdempotents"
"RationalCharacterDecomposition"
"RationalCharacterSchurIndex"
"RationalCharacterTable"
"RationalCharacterTableRSpace"
"RationalCurve"
"RationalCuspidalSubgroup"
"RationalCusps"
"RationalDifferentialField"
"RationalExtensionRepresentation"
"RationalField"
"RationalForm"
"RationalFunction"
"RationalFunctionField"
"RationalFunctions"
"RationalGCD"
"RationalHomology"
"RationalMap"
"RationalMapping"
"RationalMatrixGroupDatabase"
"RationalPart"
"RationalPoint"
"RationalPoints"
"RationalPointsByFibration"
"RationalPointsGeneric"
"RationalPuiseux"
"RationalReconstruction"
"RationalRootDecomposition"
"RationalRoundUp"
"RationalScroll"
"RationalSequence"
"RationalSolutions"
"RationalTensorSearch"
"Rationals"
"RationalsAsNumberField"
"Ratpoints"
"RawBasket"
"RawCurve"
"RawEval"
"Ray"
"RayClassField"
"RayClassGroup"
"RayClassGroupDiscLog"
"RayLattice"
"RayLatticeMap"
"RayResidueRing"
"Rays"
"Re"
"Reachable"
"Read"
"ReadAtlasMatrix"
"ReadBinary"
"ReadBytes"
"ReadEntry1"
"ReadEntryQECC"
"ReadIntegralMatrix"
"ReadTest"
"Real"
"RealEmbeddings"
"RealExtensions"
"RealField"
"RealHomology"
"RealInjection"
"RealMatrix"
"RealPeriod"
"RealPlaces"
"RealRoots"
"RealSigns"
"RealTamagawaNumber"
"RealToIntegerExponent"
"RealVectorSpace"
"RealVolume"
"RealWeakApproximation"
"Realtime"
"RecToGRBskt"
"RecToGRCrvS"
"RecToGRPtS"
"RecToGRSch"
"ReciprocalPolynomial"
"Recognise3D4"
"RecogniseAdjoint"
"RecogniseAlternating"
"RecogniseAlternatingOrSymmetric"
"RecogniseAlternatingSquare"
"RecogniseClassical"
"RecogniseClassicalSSA"
"RecogniseDelta"
"RecogniseExchangeSSA"
"RecogniseExtendedSL"
"RecogniseExtendedSp"
"RecogniseG2"
"RecogniseLargeRee"
"RecogniseRee"
"RecogniseSL"
"RecogniseSL2"
"RecogniseSL3"
"RecogniseSU3"
"RecogniseSU4"
"RecogniseSp4Even"
"RecogniseSpOdd"
"RecogniseStarAlgebra"
"RecogniseSymmetric"
"RecogniseSymmetricSquare"
"RecogniseSz"
"RecognizeClassical"
"RecognizeExtendedSL"
"RecognizeExtendedSp"
"RecognizeLargeRee"
"RecognizeRee"
"RecognizeSL"
"RecognizeSL2"
"RecognizeSpOdd"
"RecognizeStarAlgebra"
"RecognizeSz"
"Reconstruct"
"ReconstructBasis"
"ReconstructLatticeBasis"
"ReconstructionEnvironment"
"Rectify"
"RecursiveCoefficientLazySeries"
"RecursiveGrphRes"
"RedoEnumeration"
"Reduce"
"ReduceBasis"
"ReduceCharacters"
"ReduceCluster"
"ReduceCubicSurface"
"ReduceDefiningGenerators"
"ReduceGenerators"
"ReduceGroebnerBasis"
"ReducePlaneCurve"
"ReduceQuadrics"
"ReduceToTriangleVertices"
"ReduceVector"
"ReducedAteTPairing"
"ReducedBasis"
"ReducedDecomposition"
"ReducedDiscriminant"
"ReducedEtaTPairing"
"ReducedFactorisation"
"ReducedForm"
"ReducedForms"
"ReducedGramMatrix"
"ReducedLegendreEquation"
"ReducedLegendreModel"
"ReducedLegendrePolynomial"
"ReducedMinimalWeierstrassModel"
"ReducedModel"
"ReducedOrbits"
"ReducedPoint"
"ReducedSubscheme"
"ReducedTatePairing"
"ReducedWamelenModel"
"Reduction"
"ReductionOrbit"
"ReductionStep"
"ReductionType"
"Reductions"
"Reductions_Factor"
"ReductiveLieAlgebraOld"
"ReductiveMatrixLieAlgebraOld"
"ReductiveRank"
"ReductiveType"
"Reductum"
"Ree"
"ReeBNpair"
"ReeConjugacy"
"ReeConjugacyClasses"
"ReeConstructiveMembership"
"ReeCrossCharacteristicReduction"
"ReeDiagonalisation"
"ReeElementToWord"
"ReeFindOrbitPoint"
"ReeFixedPoints"
"ReeGeneralRecogniser"
"ReeGroup"
"ReeInvolutionCentraliser"
"ReeIrreducibleRepresentation"
"ReeMaximalSubgroups"
"ReeMaximalSubgroupsConjugacy"
"ReePermutationRepresentation"
"ReePointStabiliser"
"ReeRecognition"
"ReeReduction"
"ReeRedundantSLPGenerators"
"ReeResetRandomProcess"
"ReeSLPCoercion"
"ReeStabiliser"
"ReeStandardConstructiveMembership"
"ReeStandardCopy"
"ReeStandardGenerators"
"ReeStandardMaximalSubgroups"
"ReeStandardMembership"
"ReeStandardRecogniser"
"ReeSylow"
"ReeSylowConjugacy"
"ReeSymmetricSquareDecompose"
"ReeTensorDecompose"
"ReedMullerCode"
"ReedMullerCodeQRMZ4"
"ReedMullerCodeRMZ4"
"ReedMullerCodeZ4"
"ReedMullerCodesLRMZ4"
"ReedMullerCodesRMZ4"
"ReedSolomonCode"
"ReesIdeal"
"RefineSection"
"Reflection"
"ReflectionFactors"
"ReflectionGroup"
"ReflectionMatrices"
"ReflectionMatrix"
"ReflectionPermutation"
"ReflectionPermutations"
"ReflectionSubgroup"
"ReflectionTable"
"ReflectionWord"
"ReflectionWords"
"Reflections"
"Regexp"
"RegularLDPCEnsemble"
"RegularModel"
"RegularRepresentation"
"RegularSequence"
"RegularSpliceDiagram"
"RegularSubgroups"
"Regularity"
"Regulator"
"RegulatorLowerBound"
"RelationFromUnit"
"RelationIdeal"
"RelationMatrix"
"RelationModule"
"Relations"
"RelativeBasis"
"RelativeField"
"RelativeInvariant"
"RelativePrecision"
"RelativePrecisionOfDerivation"
"RelativeProj"
"RelativeRank"
"RelativeRootDatum"
"RelativeRootElement"
"RelativeRootSpace"
"RelativeRoots"
"RelativeSelmerElement"
"RelevantCosets"
"Remove"
"RemoveBasisElt"
"RemoveColumn"
"RemoveConstraint"
"RemoveCrossTerms"
"RemoveEdge"
"RemoveEdges"
"RemoveFactor"
"RemoveFiles"
"RemoveHypersurface"
"RemoveIrreducibles"
"RemovePowersInPlace"
"RemoveRow"
"RemoveRowColumn"
"RemoveRowContents"
"RemoveVertex"
"RemoveVertices"
"RemoveWeight"
"RemoveZeroRows"
"Rep"
"RepChevalleyBasis"
"RepetitionCode"
"ReplacePrimes"
"ReplaceRelation"
"ReplicationNumber"
"Representation"
"RepresentationDegree"
"RepresentationDimension"
"RepresentationMatrix"
"RepresentationMatrixOfMatrix"
"RepresentationNumber"
"RepresentationSum"
"RepresentationType"
"Representations"
"Representative"
"RepresentativeCocycles"
"RepresentativePoint"
"RepresentativePoints"
"Representatives"
"RepresentsFreeModule"
"RepsDBGet"
"RepsSmallGet"
"Res_H2_G_QmodZ"
"RescaledDual"
"ResetMaximumMemoryUsage"
"ResetMinimumWeightBounds"
"Residual"
"Residue"
"ResidueClassDegree"
"ResidueClassField"
"ResidueClassRing"
"ResidueCode"
"ResidueField"
"ResidueMatrixRing"
"ResidueSystem"
"Resolution"
"ResolutionData"
"ResolutionGraph"
"ResolutionGraphVertex"
"ResolutionSpine"
"ResolveAffineCurve"
"ResolveAffineMonicSurface"
"ResolveFanMap"
"ResolveLinearSystem"
"ResolveProjectiveCurve"
"ResolveProjectiveSurface"
"ResolvedDualFan"
"Restrict"
"RestrictDegree"
"RestrictEndomorphism"
"RestrictField"
"RestrictPartitionLength"
"RestrictParts"
"RestrictResolution"
"RestrictedPartitions"
"RestrictedSubalgebra"
"Restriction"
"RestrictionChainMap"
"RestrictionData"
"RestrictionMap"
"RestrictionMatrix"
"RestrictionOfGenerators"
"RestrictionOfScalars"
"RestrictionOfScalarsToQ"
"RestrictionToImage"
"RestrictionToPatch"
"RestrictionToSubtorus"
"Resultant"
"ResumeEnumeration"
"Retrieve"
"Reverse"
"ReverseColumns"
"ReverseRows"
"Reversion"
"RevertClass"
"Rewind"
"Rewrite"
"ReynoldsOperator"
"Rho"
"RichelotIsogenousSurface"
"RichelotIsogenousSurfaces"
"RiemannRochBasis"
"RiemannRochCoordinates"
"RiemannRochDimension"
"RiemannRochPolytope"
"RiemannRochSpace"
"RiemannZeta"
"RightAction"
"RightAdjointMatrix"
"RightAnnihilator"
"RightCancellation"
"RightCosetSpace"
"RightDescentSet"
"RightExactExtension"
"RightGCD"
"RightGcd"
"RightGreatestCommonDivisor"
"RightHandFactors"
"RightIdeal"
"RightIdealClasses"
"RightInverse"
"RightInverseMorphism"
"RightIsomorphism"
"RightLCM"
"RightLcm"
"RightLeastCommonMultiple"
"RightMixedCanonicalForm"
"RightNormalForm"
"RightOrder"
"RightRegularModule"
"RightRepresentationMatrix"
"RightRing"
"RightString"
"RightStringLength"
"RightTransversal"
"RightZeroExtension"
"Ring"
"RingClassField"
"RingClassGroup"
"RingGeneratedBy"
"RingMap"
"RingOfFractions"
"RingOfIntegers"
"RombergQuadrature"
"Root"
"RootAction"
"RootAutomorphism"
"RootClosure"
"RootDatum"
"RootDecomposition"
"RootGSet"
"RootHeight"
"RootImages"
"RootLattice"
"RootNorm"
"RootNorms"
"RootNumber"
"RootOfUnity"
"RootPermutation"
"RootPosition"
"RootSequence"
"RootSide"
"RootSpace"
"RootSystem"
"RootSystemMatrix"
"RootVertex"
"Roots"
"RootsAndCoroots"
"RootsInSplittingField"
"RootsNonExact"
"RosenhainInvariants"
"Rotate"
"RotateRows"
"RotateWord"
"Rotation"
"Round"
"RoundDownDivisor"
"RoundReal"
"RoundUpDivisor"
"Row"
"RowInsert"
"RowLength"
"RowNullSpace"
"RowReductionHomomorphism"
"RowSequence"
"RowSkewLength"
"RowSpace"
"RowSubmatrix"
"RowSubmatrixRange"
"RowWeight"
"RowWeights"
"RowWord"
"Rows"
"Rowspace"
"RowvColSplit"
"Rtest"
"RubinSilverbergPolynomials"
"RuledSurface"
"S1"
"S2"
"SAT"
"SClassGroup"
"SClassGroupAbelianInvariants"
"SClassGroupExactSequence"
"SClassNumber"
"SEA"
"SFA"
"SFAElementary"
"SFAHomogeneous"
"SFAMonomial"
"SFAPower"
"SFASchur"
"SHA1"
"SIntegralDesbovesPoints"
"SIntegralLjunggrenPoints"
"SIntegralPoints"
"SIntegralQuarticPoints"
"SL"
"SL2Characteristic"
"SL2ElementToWord"
"SL2Presentation"
"SL2Triple"
"SL3ElementToWord"
"SL4Covariants"
"SL4Invariants"
"SLPGroup"
"SLPolynomialRing"
"SO"
"SOMinus"
"SOPlus"
"SPolynomial"
"SPrimesUpTo"
"SPrincipalDivisorMap"
"SPrintCategory"
"SQUFOF"
"SQ_check"
"SQextSetup"
"SQsplitSetup"
"SRegulator"
"SU"
"SU3ElementToWord"
"SUnitAction"
"SUnitCohomologyProcess"
"SUnitDiscLog"
"SUnitGroup"
"SUnitSubGroup"
"SVMForLattAuto"
"SVMForLattIso"
"SVPermutation"
"SVWord"
"SafeInverseUniformiser"
"SafeUniformiser"
"SafeUniformizer"
"SatisfiesSL2Presentation"
"SatisfiesSzPresentation"
"Saturate"
"SaturateSheaf"
"Saturation"
"ScalarField"
"ScalarLattice"
"ScalarMatrix"
"ScalarProduct"
"ScalarSparseMatrix"
"ScaleGenerators"
"ScaleMatrix"
"ScaledIgusaInvariants"
"ScaledLattice"
"ScalingFactor"
"Scheme"
"SchemeGraphMap"
"SchemeGraphMapToSchemeMap"
"SchemeMap"
"SchemeThrough"
"SchreierGenerators"
"SchreierGraph"
"SchreierSystem"
"SchreierVector"
"SchreierVectors"
"Schur"
"SchurIndex"
"SchurIndexGroup"
"SchurIndices"
"SchurNorm"
"SchurToElementaryMatrix"
"SchurToHomogeneousMatrix"
"SchurToMonomialMatrix"
"SchurToPowerSumMatrix"
"Search"
"SearchEqual"
"SearchForDecomposition"
"SearchForIsomorphism"
"SearchPGroups"
"Sec"
"SecantVariety"
"Sech"
"SecondaryInvariants"
"SecondaryInvariantsNonModular"
"SectionCentraliser"
"SectionCentralizer"
"Sections"
"Seek"
"Self"
"SelfComplementaryGraphDatabase"
"SelfIntersection"
"Selfintersection"
"Selfintersections"
"SelmerGroup"
"SemiInvariantsOfDegree"
"SemiLinearGroup"
"SemiOrthogonalBasis"
"SemiOrthogonalBasis2"
"SemiSimpleCohomologyProcess"
"SemiSimpleType"
"Semidir"
"SemidirectProduct"
"SemisimpleEFAModuleMaps"
"SemisimpleEFAModules"
"SemisimpleEFASeries"
"SemisimpleGeneratorData"
"SemisimpleLieAlgebraOld"
"SemisimpleMatrixLieAlgebraOld"
"SemisimpleRank"
"SemisimpleSubLie"
"SemisimpleSubLieDatabase"
"SemisimpleType"
"SeparatingElement"
"SeparationVertices"
"Seq"
"SeqFact"
"Seqelt"
"Seqint"
"Seqlist"
"Seqset"
"SequenceOfRadicalGenerators"
"SequenceToCompositionFactors"
"SequenceToConjugacyClasses"
"SequenceToElement"
"SequenceToFactorization"
"SequenceToInteger"
"SequenceToList"
"SequenceToMultiset"
"SequenceToProcess"
"SequenceToSet"
"SequenceToSubgroups"
"SeriesFactors"
"SeriesProcess"
"SerreBound"
"Set"
"SetAlgorithm"
"SetAllInvariantsOfDegree"
"SetArrows"
"SetAssertions"
"SetAutoColumns"
"SetAutoCompact"
"SetAxisMultiplicities"
"SetBaseGerm"
"SetBeep"
"SetBufferSize"
"SetCanonicalClass"
"SetClassGroupBoundFactorBasis"
"SetClassGroupBoundGenerators"
"SetClassGroupBoundMaps"
"SetClassGroupBounds"
"SetColumns"
"SetConicSubfieldMethodDegreeBound"
"SetDebugOnError"
"SetDefaultRealField"
"SetDefaultRealFieldPrecision"
"SetDefining"
"SetDisplayLevel"
"SetEchoInput"
"SetElementPrintFormat"
"SetEntry"
"SetEvaluationComparison"
"SetForceCFP"
"SetFreezeAll"
"SetGaloisMultiplicities"
"SetGlobalTCParameters"
"SetHeckeBound"
"SetHelpExternalBrowser"
"SetHelpExternalSystem"
"SetHelpUseExternalBrowser"
"SetHelpUseExternalSystem"
"SetHistorySize"
"SetIgnoreEof"
"SetIgnorePrompt"
"SetIgnoreSpaces"
"SetIloadAllowEsc"
"SetIndent"
"SetIntegerSolutionVariables"
"SetKantLevel"
"SetKantPrecision"
"SetKantPrinting"
"SetKaratsubaThreshold"
"SetLMGSchreierBound"
"SetLibraries"
"SetLibraryRoot"
"SetLineEditor"
"SetLogFile"
"SetLowerBound"
"SetMS"
"SetMark"
"SetMaximiseFunction"
"SetMemoryExtensionSize"
"SetMemoryLimit"
"SetMultiplicities"
"SetNeighbouringGerms"
"SetNthreads"
"SetObjectiveFunction"
"SetOptions"
"SetOrderMaximal"
"SetOrderTorsionUnit"
"SetOrderUnitsAreFundamental"
"SetOutputFile"
"SetPath"
"SetPowerPrinting"
"SetPrePatchMaps"
"SetPrecision"
"SetPresentation"
"SetPreviousSize"
"SetPrimalityProof"
"SetPrimaryInvariants"
"SetPrimitiveElement"
"SetPrintKetsInteger"
"SetPrintLevel"
"SetProcessParameters"
"SetProfile"
"SetProjectivePatchMaps"
"SetPrompt"
"SetQuaternionOrder"
"SetQuitOnError"
"SetRationalBasis"
"SetRows"
"SetSeed"
"SetSelfintersections"
"SetShellCompletion"
"SetShowPromptAlways"
"SetSparseGCD"
"SetTargetRing"
"SetToIndexedSet"
"SetToMultiset"
"SetToSequence"
"SetTraceback"
"SetTransGroupIDMany"
"SetTransverseIntersections"
"SetUpperBound"
"SetUserProcessData"
"SetVerbose"
"SetVerboseMS"
"SetViMode"
"SetsOfSingularPlaces"
"Setseq"
"Seysen"
"SeysenGram"
"Shape"
"Sheaf"
"SheafHomomorphism"
"SheafHoms"
"SheafOfDifferentials"
"ShephardTodd"
"ShephardToddNumber"
"ShephardToddOld"
"Shift"
"ShiftLeft"
"ShiftRight"
"ShiftToDegreeZero"
"ShiftValuation"
"ShimuraConjugates"
"ShimuraReduceUnit"
"ShortBasis"
"ShortCosets"
"ShortLift"
"ShortSchreierVectorCoding"
"ShortSubset"
"ShortVectors"
"ShortVectorsMatrix"
"ShortVectorsProcess"
"ShortenCode"
"ShortenStabilizerCode"
"ShortestPath"
"ShortestPaths"
"ShortestVectors"
"ShortestVectorsMatrix"
"ShowDL"
"ShowIdentifiers"
"ShowMemoryUsage"
"ShowOptions"
"ShowPrevious"
"ShowValues"
"ShrikhandeGraph"
"ShrinkingGenerator"
"SiegelTransformation"
"Sieve"
"SieveFactorBaseBound"
"SigTable"
"Sign"
"SignDecomposition"
"Signature"
"Signatures relevant to Any:"
"SiksekBound"
"SilvermanBound"
"SimNEQ"
"SimilarityGroup"
"SimpleCanonicalDissidentPoints"
"SimpleCohomologyDimensions"
"SimpleCohomologyProcess"
"SimpleCoreflectionMatrices"
"SimpleCoroots"
"SimpleEpimorphisms"
"SimpleExtension"
"SimpleGraphDatabase"
"SimpleGroupName"
"SimpleGroupOfLieType"
"SimpleGroupOrder"
"SimpleGroupsWithOrder"
"SimpleGroupsWithOrderDividing"
"SimpleHomologyDimensions"
"SimpleLieAlgebraOld"
"SimpleMatrixLieAlgebraOld"
"SimpleModule"
"SimpleOrders"
"SimpleParameters"
"SimpleQuotientAlgebras"
"SimpleQuotientProcess"
"SimpleQuotients"
"SimpleReflectionMatrices"
"SimpleReflectionPermutations"
"SimpleReflections"
"SimpleRelativeRoots"
"SimpleRoots"
"SimpleStarAlgebra"
"SimpleSubgroups"
"Simplex"
"SimplexAlphaCodeZ4"
"SimplexBetaCodeZ4"
"SimplexCode"
"SimplicialComplex"
"SimplicialProjectivePlane"
"SimplicialSubcone"
"SimplicialSubdivision"
"SimplifiedModel"
"Simplify"
"SimplifyLength"
"SimplifyOrder"
"SimplifyPresentation"
"SimplifyRep"
"SimplyConnectedVersion"
"SimpsonQuadrature"
"SimsSchreier"
"SimsSchreierCoding"
"Sin"
"Sincos"
"SingerDifferenceSet"
"SingleSolutionTest"
"SingletonAsymptoticBound"
"SingletonBound"
"SingularCones"
"SingularFibres"
"SingularPoints"
"SingularPointsOverSplittingField"
"SingularRadical"
"SingularRank"
"SingularRankPerCodimension"
"SingularSubscheme"
"Sinh"
"SixDescent"
"Size"
"SizeDFA"
"Skeleton"
"SkewHadamardDatabase"
"SkewShape"
"SkewWeight"
"Slope"
"SlopeValuation"
"Slopes"
"SmallBasis"
"SmallGraphDatabase"
"SmallGroup"
"SmallGroup2Database"
"SmallGroupDatabase"
"SmallGroupDatabaseLimit"
"SmallGroupDecoding"
"SmallGroupEncoding"
"SmallGroupIsInsoluble"
"SmallGroupIsInsolvable"
"SmallGroupIsSoluble"
"SmallGroupIsSolvable"
"SmallGroupProcess"
"SmallGroupSF"
"SmallGroupSFId"
"SmallGroups"
"SmallModularCurve"
"SmallPeriodMatrix"
"SmallRoots"
"SmallerField"
"SmallerFieldBasis"
"SmallerFieldImage"
"SmithForm"
"Sn"
"Socket"
"SocketInformation"
"Socle"
"SocleAction"
"SocleFactor"
"SocleFactors"
"SocleImage"
"SocleKernel"
"SocleQuotient"
"SocleSeries"
"SolAutCompatible"
"SolAutDerivations"
"SolAutInducible"
"SolAutModule"
"SolubleNormalQuotient"
"SolubleQuotient"
"SolubleQuotientProcess"
"SolubleRadical"
"SolubleResidual"
"SolubleSchreier"
"SolubleSchreierCoding"
"SolubleSubgroups"
"Solution"
"SolutionSpace"
"Solutions"
"SolvableAlgebra"
"SolvableLieAlgebra"
"SolvableQuotient"
"SolvableRadical"
"SolvableResidual"
"SolvableSchreier"
"SolvableSchreierCoding"
"SolvableSubgroups"
"Solve"
"SolveByRadicals"
"SolveEquations"
"SolveForInvariants"
"SolveInProductSpace"
"SolveZeroDimIdeal"
"Sort"
"SortByMP"
"SortDecomposition"
"SortRows"
"Sp"
"SpaceOfDifferentialsFirstKind"
"SpaceOfHolomorphicDifferentials"
"Span"
"SpanZ2CodeZ4"
"SpanningFan"
"SpanningForest"
"SpanningTree"
"SparseHeckeOperator"
"SparseIrreducibleRootDatum"
"SparseMatrix"
"SparseMatrixGAP"
"SparseMatrixStructure"
"SparseRootDatum"
"SparseStandardRootDatum"
"Spec"
"SpecialEvaluate"
"SpecialLieAlgebra"
"SpecialLinearGroup"
"SpecialOrthogonalGroup"
"SpecialOrthogonalGroupMinus"
"SpecialOrthogonalGroupPlus"
"SpecialPresentation"
"SpecialUnitaryGroup"
"SpecialWeights"
"Specialization"
"SpecifyInverseMorphisms"
"SpectralRadius"
"Spectrum"
"Sphere"
"SpherePackingBound"
"SphereVolume"
"SpheresPackingBound"
"Spin"
"SpinAction"
"SpinMinus"
"SpinOrbit"
"SpinPlus"
"SpinWithImages"
"SpinorCharacters"
"SpinorGenera"
"SpinorGenerators"
"SpinorGenus"
"SpinorNorm"
"SpinorRepresentatives"
"Splice"
"SpliceDiagram"
"SpliceDiagramVertex"
"Split"
"SplitAbelianSection"
"SplitAllByValues"
"SplitCell"
"SplitCellsByValues"
"SplitCollector"
"SplitElementaryAbelianSection"
"SplitExtension"
"SplitExtensionSpace"
"SplitMaximalToralSubalgebra"
"SplitRealPlace"
"SplitRootDatum"
"SplitSection"
"SplitToralSubalgebra"
"SplitViaConic"
"SplitViaMinimalField"
"Splitcomponents"
"SplittingCartanSubalgebra"
"SplittingField"
"SplittingsOfCell"
"Sprint"
"Sqrt"
"SqrtDiscriminantPolynomial"
"SquareFree"
"SquareFreeFactorization"
"SquareLatticeGraph"
"SquareRoot"
"Squarefree"
"SquarefreeFactorization"
"SquarefreePart"
"SquarefreePartialFractionDecomposition"
"SquarefreeRoots"
"SrAutomorphism"
"SrivastavaCode"
"Stabiliser"
"StabiliserCode"
"StabiliserGroup"
"StabiliserMatrix"
"StabiliserOfSpaces"
"Stabilizer"
"StabilizerCode"
"StabilizerGroup"
"StabilizerLadder"
"StabilizerMatrix"
"StandardAction"
"StandardActionGroup"
"StandardBasis"
"StandardCusp"
"StandardForm"
"StandardFormConjugationMatrices"
"StandardFormDFA"
"StandardFormField"
"StandardFormInfo"
"StandardGenerators"
"StandardGeneratorsForLargeRee"
"StandardGraph"
"StandardGroup"
"StandardLattice"
"StandardLengthening"
"StandardMaximalTorus"
"StandardMetacyclicPGroup"
"StandardParabolicSubgroup"
"StandardPresentation"
"StandardRepresentation"
"StandardRootDatum"
"StandardRootSystem"
"StandardSimplex"
"StandardTableaux"
"StandardTableauxOfWeight"
"Star"
"StarInvolution"
"StarOnGroupAlgebra"
"StartEnumeration"
"StartNewClass"
"Stauduhar"
"SteenrodOperation"
"SteinWatkinsDatabase"
"SteinitzClass"
"SteinitzForm"
"Step1"
"Step2"
"SternsAttack"
"StirlingFirst"
"StirlingSecond"
"StitchProcesses"
"StoRModule"
"StoreClear"
"StoreFactor"
"StoreGet"
"StoreIsDefined"
"StoreKeys"
"StoreRemove"
"StoreSet"
"Stratum"
"StringToBytes"
"StringToCode"
"StringToInteger"
"StringToIntegerSequence"
"StringToLower"
"StringToRational"
"StringToUpper"
"Strings"
"Strip"
"StripWhiteSpace"
"StrippedCoding"
"StrongApproximation"
"StrongGeneratorLevel"
"StrongGenerators"
"StrongGeneratorsAtLevel"
"StronglyConnectedComponents"
"StronglyHorizontalVertices"
"StronglyIrregularValues"
"StronglyRegularGraphsDatabase"
"StructureConstant"
"StructureConstants"
"StructureSheaf"
"Sub"
"SubOrder"
"SubWeights"
"SubalgebraModule"
"SubalgebrasInclusionGraph"
"SubcanonicalCurve"
"Subcode"
"SubcodeBetweenCode"
"SubcodeWordsOfWeight"
"Subcomplex"
"SubfieldCode"
"SubfieldLattice"
"SubfieldRepresentationCode"
"SubfieldRepresentationParityCode"
"SubfieldSubcode"
"SubfieldSubplane"
"Subfields"
"Subgraph"
"Subgroup"
"SubgroupChain"
"SubgroupClasses"
"SubgroupElements"
"SubgroupElementsCT"
"SubgroupLattice"
"SubgroupLatticeOld"
"SubgroupOfTorus"
"SubgroupScheme"
"SubgroupToMatrix"
"Subgroups"
"SubgroupsData"
"SubgroupsLift"
"SubgroupsMeet"
"Sublattice"
"SublatticeClasses"
"SublatticeLattice"
"Sublattices"
"Submatrix"
"SubmatrixRange"
"Submodule"
"SubmoduleAction"
"SubmoduleClasses"
"SubmoduleImage"
"SubmoduleLattice"
"SubmoduleLatticeAbort"
"Submodules"
"SubnormalSeries"
"Subring"
"Subsequences"
"Subsets"
"Substitute"
"SubstituteCyclicJoins"
"SubstituteString"
"Substring"
"SubsystemSubgroup"
"Subword"
"SuccessiveMinima"
"SuggestedPrecision"
"Sum"
"SumNorm"
"SumOf"
"SumOfBettiNumbersOfSimpleModules"
"SumOfDivisors"
"SumOfImages"
"SumOfMorphismImages"
"Summands"
"SuperGroup"
"SuperScheme"
"SuperSummitCanonicalLength"
"SuperSummitInfimum"
"SuperSummitProcess"
"SuperSummitRepresentative"
"SuperSummitSet"
"SuperSummitSupremum"
"Superlattice"
"SupersingularEllipticCurve"
"SupersingularInvariants"
"SupersingularModule"
"SupersingularPoints"
"SupersingularPolynomial"
"Supplement"
"Supplements"
"Support"
"SupportOverSplittingField"
"SupportingCone"
"SupportingHyperplane"
"SupportsExtension"
"Supremum"
"SurjectivePart"
"Suspension"
"SuzukiBNpair"
"SuzukiConjugacy"
"SuzukiConjugateRecogniser"
"SuzukiConstructiveMembership"
"SuzukiCyclicEigenvalues"
"SuzukiFindOvoidPoints"
"SuzukiGeneralRecogniser"
"SuzukiGroup"
"SuzukiIrreducibleRepresentation"
"SuzukiMaximalSubgroups"
"SuzukiMaximalSubgroupsConjugacy"
"SuzukiNonSplit6Dim"
"SuzukiOddCharacteristicReduction"
"SuzukiPermutationRepresentation"
"SuzukiPointStabiliser"
"SuzukiRecognition"
"SuzukiReduction"
"SuzukiResetRandomProcess"
"SuzukiSmallFieldReduction"
"SuzukiStabiliser"
"SuzukiStandardConstructiveMembership"
"SuzukiStandardGeneratorsNaturalRep"
"SuzukiStandardMaximalSubgroups"
"SuzukiStandardMembership"
"SuzukiStandardRecogniser"
"SuzukiSylow"
"SuzukiSylowConjugacy"
"SuzukiTensorDecompose"
"SwapColumns"
"SwapElements"
"SwapExtension"
"SwapRows"
"SwinnertonDyerPolynomial"
"Switch"
"SwitchNullMatrix"
"Sylow"
"SylowBasis"
"SylowSubgroup"
"SylowSystem"
"SylvesterMatrix"
"Sym"
"SymmetricBilinearForm"
"SymmetricBilinearFormCS"
"SymmetricBilinearFormMinus"
"SymmetricBilinearFormPlus"
"SymmetricBilinearFormType"
"SymmetricCentralizer"
"SymmetricCharacter"
"SymmetricCharacterDegrees"
"SymmetricCharacterTable"
"SymmetricCharacterValue"
"SymmetricCharacterValues"
"SymmetricComponents"
"SymmetricElementToStandardWord"
"SymmetricElementToWord"
"SymmetricForms"
"SymmetricFunctionAlgebra"
"SymmetricFunctionAlgebraElementary"
"SymmetricFunctionAlgebraHomogeneous"
"SymmetricFunctionAlgebraMonomial"
"SymmetricFunctionAlgebraPower"
"SymmetricFunctionAlgebraSchur"
"SymmetricGroup"
"SymmetricHermitianForms"
"SymmetricMatrix"
"SymmetricNormaliser"
"SymmetricNormalizer"
"SymmetricPower"
"SymmetricPower2"
"SymmetricPowerK"
"SymmetricQuaternionicForms"
"SymmetricRepresentation"
"SymmetricRepresentationOrthogonal"
"SymmetricRepresentationSeminormal"
"SymmetricSquare"
"SymmetricSquarePreimage"
"SymmetricWeightEnumerator"
"Symmetrization"
"SymplecticComponent"
"SymplecticComponents"
"SymplecticDirectSum"
"SymplecticDual"
"SymplecticForm"
"SymplecticFormCS"
"SymplecticGroup"
"SymplecticInnerProduct"
"SymplecticMatrixGroupDatabase"
"SymplecticSpace"
"SymplecticTensorProduct"
"SymplecticTransvection"
"Syndrome"
"SyndromeSpace"
"SysAssignNamesNum"
"System"
"SystemNormaliser"
"SystemNormalizer"
"SystemOfEigenvalues"
"SyzygyMatrix"
"SyzygyModule"
"Sz"
"SzBlackBoxGenerators"
"SzBlackBoxMembership"
"SzClassMap"
"SzClassRepresentative"
"SzConjugacyClasses"
"SzElementToWord"
"SzIsConjugate"
"SzPresentation"
"SzRationalConjugacyClasses"
"SzRedundantSLPGenerators"
"SzSLPCoercion"
"TMPolyCharOdd"
"TMPolyCharOddCheck"
"Tableau"
"TableauIntegerMonoid"
"TableauMonoid"
"Tableaux"
"TableauxOfShape"
"TableauxOnShapeWithContent"
"TableauxWithContent"
"TaftDecomposition"
"TailVector"
"Tails"
"TamagawaNumber"
"TamagawaNumbers"
"TameOrder"
"Tan"
"Tangent"
"TangentAngle"
"TangentCone"
"TangentLine"
"TangentSheaf"
"TangentSpace"
"TangentVariety"
"Tanh"
"TannerGraph"
"TargetRestriction"
"TargetRing"
"TateLichtenbaumPairing"
"TatePairing"
"TeichmuellerLift"
"TeichmuellerSystem"
"Tell"
"Tempname"
"Tensor"
"TensorBasis"
"TensorCond"
"TensorCondensation"
"TensorFactors"
"TensorInducedAction"
"TensorInducedBasis"
"TensorInducedPermutations"
"TensorPower"
"TensorProduct"
"TensorProductAction"
"TensorWreathProduct"
"Term"
"TerminalIndex"
"TerminalPolarisation"
"TerminalVertex"
"Terminalisation"
"Terms"
"TestEquations"
"TestHeckeRep"
"TestHomomorphism"
"TestLists"
"TestPicnDesc"
"TestReeConjugacy"
"TestWG"
"Theta"
"ThetaOperator"
"ThetaSeries"
"ThetaSeriesIntegral"
"ThetaSeriesIntegralLimited"
"ThetaSeriesLimited"
"ThetaSeriesModularForm"
"ThetaSeriesModularFormSpace"
"ThreeDescent"
"ThreeDescentByIsogeny"
"ThreeDescentCubic"
"ThreeIsogenyDescent"
"ThreeIsogenyDescentCubic"
"ThreeIsogenySelmerGroups"
"ThreeSelmerElement"
"ThreeSelmerGroup"
"ThreeTorsionMatrices"
"ThreeTorsionOrbits"
"ThreeTorsionPoints"
"ThreeTorsionType"
"Thue"
"TietzeProcess"
"TitsGroup"
"TjurinaNumber"
"To2DUpperHalfSpaceFundamentalDomian"
"ToAnalyticJacobian"
"ToBianchiCone"
"ToLiE"
"<NAME>"
"<NAME>"
"ToddCoxeterSchreierCoding"
"Top"
"TopQuotients"
"Tor"
"ToralRootDatum"
"ToralRootSystem"
"ToricAffinePatch"
"ToricCode"
"ToricFunctionField"
"ToricIdentityMap"
"ToricIsAffine"
"ToricIsProjective"
"ToricLattice"
"ToricLiftRationalFunction"
"ToricRestrictRationalFunction"
"ToricVariety"
"ToricVarietyMap"
"ToroidalAutomorphism"
"TorsionBasis"
"TorsionBound"
"TorsionCoefficients"
"TorsionFreeRank"
"TorsionFreeSubgroup"
"TorsionInvariants"
"TorsionLowerBound"
"TorsionMultiple"
"TorsionSubgroup"
"TorsionSubgroupScheme"
"TorsionSubmodule"
"TorsionUnitGroup"
"Torus"
"TorusTerm"
"TotalDegree"
"TotalDegreeAbstract"
"TotalLinking"
"TotalNumberOfCosets"
"TotallyRamifiedExtension"
"TotallySingularComplement"
"TppMatrix"
"Trace"
"TraceAbs"
"TraceInnerProduct"
"TraceMatrix"
"TraceOfFrobenius"
"TraceOfProduct"
"TraceSortDecomposition"
"TraceZeroSubspace"
"Traceback"
"TracesOfFrobenius"
"TrailingCoefficient"
"TrailingTerm"
"Trans2"
"Trans32Identify"
"TransformBilinearForm"
"TransformForm"
"TransformRelations"
"Transformation"
"TransformationMatrix"
"TransitiveDirectProduct"
"TransitiveGroup"
"TransitiveGroupDatabase"
"TransitiveGroupDatabaseLimit"
"TransitiveGroupDescription"
"TransitiveGroupFundamentalInvariants"
"TransitiveGroupIdentification"
"TransitiveGroupProcess"
"TransitiveGroups"
"TransitiveQuotient"
"Transitivity"
"Translate"
"Translation"
"TranslationMap"
"TranslationOfSimplex"
"TranslationToInfinity"
"Transport"
"Transpose"
"TransposePartition"
"Transvection"
"TransvectionFactors"
"Transversal"
"TransversalElt"
"TransversalNonParabolic"
"TransversalParabolic"
"TransversalProcess"
"TransversalProcessNext"
"TransversalProcessRemaining"
"TransversalWords"
"TransverseIndex"
"TransverseIntersections"
"TransverseType"
"TrapezoidalQuadrature"
"TrialDivision"
"TriangularDecomposition"
"TriangularGraph"
"Triangulation"
"TriangulationOfBoundary"
"Trim"
"Trinomials"
"TrivialLieRepresentationDecomposition"
"TrivialModule"
"TrivialOneCocycle"
"TrivialRepresentation"
"TrivialRootDatum"
"TrivialRootSystem"
"TrivialSubgroup"
"Trivialize"
"TrivializeNew"
"Truncate"
"TruncateCoefficients"
"TruncatedHyperball"
"Truncation"
"Tuple"
"TupleToList"
"Tuplist"
"TwelveDescent"
"Twist"
"TwistedBasis"
"TwistedCartanName"
"TwistedGroup"
"TwistedGroupOfLieType"
"TwistedLieAlgebra"
"TwistedPolynomials"
"TwistedQRCode"
"TwistedRootDatum"
"TwistedTori"
"TwistedToriOrders"
"TwistedTorus"
"TwistedTorusOrder"
"TwistedWindingElement"
"TwistedWindingSubmodule"
"TwistingDegree"
"Twists"
"TwoCocycle"
"TwoCover"
"TwoCoverDescent"
"TwoCoverPullback"
"TwoDescendantsOverTwoIsogenyDescendant"
"TwoDescent"
"TwoElement"
"TwoElementNormal"
"TwoGenerators"
"TwoGenus"
"TwoIsogeny"
"TwoIsogenyDescent"
"TwoIsogenySelmerGroups"
"TwoSelmerElement"
"TwoSelmerGroup"
"TwoSelmerGroupData"
"TwoSelmerGroupNew"
"TwoSelmerGroupOld"
"TwoSelmerGroupTest"
"TwoSequencePolynomial"
"TwoSidedIdealClassGroup"
"TwoSidedIdealClasses"
"TwoTorsionMatrices"
"TwoTorsionOrbits"
"TwoTorsionPolynomial"
"TwoTorsionSubgroup"
"TwoTransitiveGroupIdentification"
"Type"
"TypeOfContraction"
"TypeOfSequence"
"Types"
"TypesOfContractions"
"UltraSummitProcess"
"UltraSummitRepresentative"
"UltraSummitSet"
"UncapacitatedGraph"
"Uncondense"
"Undefine"
"UnderlyingDigraph"
"UnderlyingElement"
"UnderlyingField"
"UnderlyingGraph"
"UnderlyingMultiDigraph"
"UnderlyingMultiGraph"
"UnderlyingNetwork"
"UnderlyingRing"
"UnderlyingSet"
"UnderlyingToriMap"
"UnderlyingVertex"
"Ungetc"
"UniformizingElement"
"UniformizingParameter"
"UnimodularExtension"
"Union"
"UnionOfLines"
"UnipotentBasis"
"UnipotentMatrixGroup"
"UnipotentStabiliser"
"UnitDisc"
"UnitEquation"
"UnitGenerators"
"UnitGroup"
"UnitGroupAsSubgroup"
"UnitGroupGenerators"
"UnitRank"
"UnitTrivialSubgroup"
"UnitVector"
"UnitalFeet"
"UnitaryDirectSum"
"UnitaryForm"
"UnitaryFormCS"
"UnitaryReflection"
"UnitarySpace"
"UnitaryTensorProduct"
"UnitaryTransvection"
"Units"
"Unity"
"UnivariateEliminationIdealGenerator"
"UnivariateEliminationIdealGenerators"
"UnivariatePolynomial"
"UniversalEnvelopingAlgebra"
"UniversalMap"
"UniversalPropertyOfCokernel"
"Universe"
"UniverseCode"
"UnlabelledCayleyGraph"
"UnlabelledGraph"
"UnlabelledSchreierGraph"
"Unnormalise"
"Unnormalize"
"UnprojectionCentres"
"UnprojectionCodimensions"
"UnprojectionIndices"
"UnprojectionSubtypes"
"UnprojectionTypes"
"Unprojections"
"UnramifiedExtension"
"UnramifiedQuotientRing"
"UnramifiedSquareSymbol"
"UnsetBounds"
"UnsetGlobalTCParameters"
"UnsetLogFile"
"UnsetOutputFile"
"UntwistedOvergroup"
"UntwistedRootDatum"
"UnweightedGraph"
"UpdateGraphLabels"
"UpdateHadamardDatabase"
"UpdateLevels"
"UpperCentralSeries"
"UpperHalfPlane"
"UpperHalfPlaneUnionCusps"
"UpperHalfPlaneWithCusps"
"UpperTriangularMatrix"
"UseFFT"
"UseFlag"
"UseIFFT"
"UseImult"
"UseSmod"
"UseTwistedHopfStructure"
"UserBasePoints"
"UserGenerators"
"UserMapCreateRaw"
"UserMapImageMapRootDtm"
"UserMapPreimageMapRootDtm"
"UserProcess"
"UserRepresentation"
"UsesBrandt"
"UsesMestre"
"VNullspace"
"Valence"
"Valency"
"ValidateCryptographicCurve"
"Valuation"
"ValuationRing"
"ValuationsOfRoots"
"ValueList"
"ValueMap"
"ValueRing"
"ValuesOnUnitGenerators"
"VanLintBound"
"VariableExtension"
"VariableWeights"
"Variant"
"Variety"
"VarietySequence"
"VarietySizeOverAlgebraicClosure"
"Vector"
"VectorSpace"
"VectorSpaceOverQ"
"VectorSpaceWithBasis"
"Verify"
"VerifyMinimumDistanceLowerBound"
"VerifyMinimumDistanceUpperBound"
"VerifyMinimumLeeDistanceLowerBound"
"VerifyMinimumLeeDistanceUpperBound"
"VerifyMinimumLeeWeightLowerBound"
"VerifyMinimumLeeWeightUpperBound"
"VerifyMinimumWeightLowerBound"
"VerifyMinimumWeightUpperBound"
"VerifyRelation"
"VerschiebungImage"
"VerschiebungMap"
"Vertex"
"VertexConnectivity"
"VertexFacetHeightMatrix"
"VertexFacetIncidenceMatrix"
"VertexLabel"
"VertexLabels"
"VertexPath"
"VertexSeparator"
"VertexSet"
"VerticalJoin"
"Vertices"
"ViewWithJavaview"
"ViewWithJmol"
"VirtualDecomposition"
"VirtualRayIndices"
"VirtualRays"
"Volume"
"VolumeOfBoundary"
"Voronoi"
"VoronoiCell"
"VoronoiData"
"VoronoiGraph"
"WG2GroupRep"
"WG2HeckeRep"
"WGelement2WGtable"
"WGidealgens2WGtable"
"WGtable2WG"
"WPS"
"WZWFusion"
"WaitForConnection"
"WaitForIO"
"WallDecomposition"
"WallForm"
"WallIsometry"
"WeakApproximation"
"WeakDegree"
"WeakOrder"
"WeakPopovForm"
"WeakValuation"
"WeberClassPolynomial"
"WeberF"
"WeberF1"
"WeberF2"
"WeberPolynomial"
"WeberToHilbertClassPolynomial"
"WedderburnDecomposition"
"WeierstrassModel"
"WeierstrassPlaces"
"WeierstrassPoints"
"WeierstrassSeries"
"Weight"
"WeightClass"
"WeightDistribution"
"WeightEnumerator"
"WeightLattice"
"WeightOneHalfData"
"WeightOrbit"
"WeightSequence"
"WeightSpace"
"WeightSpaces"
"WeightToPartition"
"WeightVectors"
"WeightedDegree"
"WeightedDynkinDiagram"
"WeightedProjectiveSpace"
"Weights"
"WeightsAndMultiplicities"
"WeightsAndVectors"
"WeightsOfFlip"
"Weil"
"WeilDescent"
"WeilDescentComposita"
"WeilDescentCompositaMap"
"WeilDescentDegree"
"WeilDescentDeltas"
"WeilDescentFrobeniusExtension"
"WeilDescentFrobeniusExtensions"
"WeilDescentGenus"
"WeilDescentPrimitiveReducedCompositum"
"WeilDescentRationalParametrization"
"WeilDescentReducedCompositum"
"WeilDescentReducedDelta_1"
"WeilHeight"
"WeilPairing"
"WeilPolynomialOverFieldExtension"
"WeilPolynomialToRankBound"
"WeilRepresentation"
"WeilRestriction"
"WeilToClassGroupsMap"
"WeilToClassLatticesMap"
"WeylGroup"
"WeylMatrix"
"WeylWord"
"WeylWordFromAction"
"WhiteheadReduction"
"Width"
"Widths"
"WindingElement"
"WindingElementProjection"
"WindingLattice"
"WindingSubmodule"
"WittDecomposition"
"WittDesign"
"WittIndex"
"WittInvariant"
"WittInvariants"
"WittLieAlgebra"
"WittRing"
"Word"
"WordAcceptor"
"WordAcceptorSize"
"WordAcceptorTable"
"WordCount"
"WordDifferenceAutomaton"
"WordDifferenceSize"
"WordDifferenceTable"
"WordDifferences"
"WordGroup"
"WordInStrongGenerators"
"WordMap"
"WordOnCorootSpace"
"WordOnRoot"
"WordOnRootSpace"
"WordProblem"
"WordProblemData"
"WordStrip"
"WordToDualMatrix"
"WordToMatrix"
"WordToPerm"
"WordToSequence"
"WordToTableau"
"WordWrap"
"Words"
"WordsGramMatrix"
"WordsMatrix"
"WordsOfBoundedLeeWeight"
"WordsOfBoundedWeight"
"WordsOfLeeWeight"
"WordsTransposedMatrix"
"WreathProduct"
"Write"
"WriteBinary"
"WriteBytes"
"WriteFanoData"
"WriteGModuleOver"
"WriteGModuleOverExtensionOf"
"WriteHadamardDatabase"
"WriteIntegralMatrix"
"WriteK3Data"
"WriteNewtonPolytopeToPSFile"
"WriteOver"
"WriteOverElement"
"WriteOverLargerField"
"WriteOverMatrix"
"WriteOverSmallerField"
"WritePolytopeToJVX"
"WritePolytopeToJmolFile"
"WritePolytopeToPALP"
"WritePolytopeToPSFile"
"WritePolytopeToSvgFile"
"WritePolytopesToJVX"
"WriteRawHadamardData"
"WriteRepresentationOver"
"WriteWG"
"WronskianDeterminant"
"WronskianMatrix"
"WronskianOrders"
"X0NQuotient"
"XGCD"
"XXX_VarietySequence"
"Xgcd"
"Xor"
"YYY_SupersingularInvariants"
"YoungSubgroup"
"YoungSubgroupLadder"
"Z4CodeFromBinaryChain"
"Z4Dimension"
"Z4Type"
"ZBasis"
"ZClasses"
"ZGenerators"
"ZSpace"
"ZariskiDecomposition"
"ZechLog"
"Zero"
"ZeroChainMap"
"ZeroCocycle"
"ZeroCode"
"ZeroComplex"
"ZeroCone"
"ZeroCoordinates"
"ZeroDivisor"
"ZeroExtension"
"ZeroFan"
"ZeroGammaOrbitsOnRoots"
"ZeroMap"
"ZeroMatrix"
"ZeroModularAbelianVariety"
"ZeroModule"
"ZeroRootLattice"
"ZeroRootSpace"
"ZeroSequence"
"ZeroSubgroup"
"ZeroSubspace"
"ZeroSubvariety"
"ZeroSumCode"
"Zeroes"
"Zeros"
"ZetaFunction"
"ZetaFunctionsByDeformation"
"ZimmertBound"
"ZinovievCode"
"aInvariants"
"all_ram_extensions_of_deg_p_m_j"
"bInvariants"
"c9LatticeRecord"
"cInvariants"
"calculateAlbertAlgebra"
"calculateBigReeTwistingMapCBMs"
"fPolynomial"
"fValue"
"fValueProof"
"fVector"
"hPolynomial"
"hVector"
"has_element_of_norm_sub"
"isValidSuzukiOrder"
"jFunction"
"jInvariant"
"jInvariantMap"
"jNInvariant"
"jParameter"
"jPoints"
"kArc"
"mainInvolution"
"mfdevel"
"myFindLieAlgebra"
"nCovering"
"nIsogeny"
"nTorsionSubgroup"
"pAdicDiagonalization"
"pAdicEllipticLogarithm"
"pAdicEllipticLogarithmOfCombination"
"pAdicEmbeddings"
"pAdicField"
"pAdicHeight"
"pAdicHeightPairingMatrix"
"pAdicLSeries"
"pAdicQuotientRing"
"pAdicRegulator"
"pAdicRing"
"pCentralSeries"
"pClass"
"pClosure"
"pCore"
"pCover"
"pCoveringGroup"
"pElementaryAbelianNormalSubgroup"
"pExcess"
"pFundamentalUnits"
"pIntegralGModule"
"pIntegralModel"
"pIsogenyDescent"
"pMap"
"pMatrixRing"
"pMaximalOrder"
"pMaximalSubmodules"
"pMinimalWeierstrassModel"
"pMinimise"
"pMinus1"
"pMultiplicator"
"pMultiplicatorRank"
"pNewModularDegree"
"pNormalModel"
"pPlus1"
"pPowerTorsion"
"pPrimaryComponent"
"pPrimaryInvariants"
"pQuotient"
"pQuotientProcess"
"pRadical"
"pRank"
"pRanks"
"pSelmerGroup"
"pSignature"
"pSubalgebra"
"pSubgroup"
"p_hom"
"qCoverDescent"
"qCoverPartialDescent"
"qEigenform"
"qEigenformReductions"
"qExpansion"
"qExpansionBasis"
"qExpansionExpressions"
"qExpansionsOfGenerators"
"qIntegralBasis"
]
commentStart: "// "
| true | ".source.Magma":
editor:
increaseIndentPattern: "^\\s*(function|procedure|if|for|while|elif|else|case|when|repeat|try|catch)[^;]*$|^\\s*\\b([A-Za-z_][A-Za-z0-9_]*)\\b\\s*:=\\s*\\b(function|procedure)\\b.*$"
decreaseIndentPattern: "^\\s*((end (for|if|procedure|function|case|while|try))|else|elif|until)\\b.*"
completions: [
"AFRNumber"
"AGCode"
"AGDecode"
"AGDualCode"
"AGL"
"AGM"
"AGammaL"
"AHom"
"AHomOverCentralizingField"
"AInfinityRecord"
"AModule"
"APNCompleteGeneration"
"APNGeneration"
"APNMatrix"
"APNRationalGeneration"
"AQInvariants"
"AQPrimes"
"ASL"
"ASigmaL"
"ATLASGroup"
"ATLASGroupNames"
"AbelianBasis"
"AbelianExtension"
"AbelianGroup"
"AbelianInvariants"
"AbelianLieAlgebra"
"AbelianNormalQuotient"
"AbelianNormalSubgroup"
"AbelianNormalSubgroupSSS"
"AbelianQuotient"
"AbelianQuotientInvariants"
"AbelianQuotientRewrite"
"AbelianSection"
"AbelianSubfield"
"AbelianSubgroups"
"AbelianpExtension"
"Abs"
"AbsDenominator"
"AbsEltseq"
"AbsIrrApplyConjugation"
"AbsIrrApplyGalois"
"AbsIrrFromMap"
"AbsIrrFromModul"
"AbsoluteAffineAlgebra"
"AbsoluteAlgebra"
"AbsoluteBasis"
"AbsoluteCartanMatrix"
"AbsoluteCharacteristicPolynomial"
"AbsoluteDecomposition"
"AbsoluteDegree"
"AbsoluteDiscriminant"
"AbsoluteField"
"AbsoluteFrobenius"
"AbsoluteFunctionField"
"AbsoluteGaloisGroup"
"AbsoluteInertiaDegree"
"AbsoluteInertiaIndex"
"AbsoluteInvariants"
"AbsoluteLogarithmicHeight"
"AbsoluteMinimalPolynomial"
"AbsoluteModuleOverMinimalField"
"AbsoluteModulesOverMinimalField"
"AbsoluteNorm"
"AbsoluteOrder"
"AbsolutePolynomial"
"AbsolutePrecision"
"AbsoluteQuotientRing"
"AbsoluteRamificationDegree"
"AbsoluteRamificationIndex"
"AbsoluteRank"
"AbsoluteRationalScroll"
"AbsoluteRepresentation"
"AbsoluteRepresentationMatrix"
"AbsoluteTotallyRamifiedExtension"
"AbsoluteTrace"
"AbsoluteValue"
"AbsoluteValues"
"AbsolutelyIrreducibleConstituents"
"AbsolutelyIrreducibleModule"
"AbsolutelyIrreducibleModules"
"AbsolutelyIrreducibleModulesBurnside"
"AbsolutelyIrreducibleModulesDelete"
"AbsolutelyIrreducibleModulesInit"
"AbsolutelyIrreducibleModulesSchur"
"AbsolutelyIrreducibleRepresentationProcessDelete"
"AbsolutelyIrreducibleRepresentationsApply"
"AbsolutelyIrreducibleRepresentationsDelete"
"AbsolutelyIrreducibleRepresentationsInit"
"AbsolutelyIrreducibleRepresentationsProcess"
"AbsolutelyIrreducibleRepresentationsProcessDegree"
"AbsolutelyIrreducibleRepresentationsProcessGroup"
"AbsolutelyIrreducibleRepresentationsSchur"
"Absolutize"
"Ac"
"ActingGroup"
"ActingWord"
"Action"
"ActionGenerator"
"ActionGenerators"
"ActionGroup"
"ActionImage"
"ActionKernel"
"ActionMatrix"
"ActionOnVector"
"AdamsOperator"
"AdaptedBasis"
"AdaptedBasisIndex"
"AdaptedBasisProcessAdd"
"AdaptedBasisProcessAddTest"
"AdaptedBasisProcessInit"
"Add"
"AddAttribute"
"AddAttributes"
"AddColumn"
"AddConstraints"
"AddCovers"
"AddCrossTerms"
"AddCubics"
"AddEdge"
"AddEdges"
"AddGenerator"
"AddMult"
"AddNormalizingGenerator"
"AddPrimes"
"AddRedundantGenerators"
"AddRelation"
"AddRelator"
"AddRepresentation"
"AddRow"
"AddScaledMatrix"
"AddSimplex"
"AddStrongGenerator"
"AddStrongGeneratorToLevel"
"AddSubgroupGenerator"
"AddVectorToLattice"
"AddVertex"
"AddVertices"
"AdditiveCode"
"AdditiveConstaCyclicCode"
"AdditiveCyclicCode"
"AdditiveGroup"
"AdditiveHilbert90"
"AdditiveMacWilliamsTransform"
"AdditiveOrder"
"AdditivePermutationCode"
"AdditivePolynomialFromRoots"
"AdditiveQuasiCyclicCode"
"AdditiveQuasiTwistedCyclicCode"
"AdditiveRepetitionCode"
"AdditiveUniverseCode"
"AdditiveZeroCode"
"AdditiveZeroSumCode"
"AdjacencyMatrix"
"Adjoin"
"Adjoint"
"AdjointAlgebra"
"AdjointGraph"
"AdjointIdeal"
"AdjointIdealForNodalCurve"
"AdjointLinearSystem"
"AdjointLinearSystemForNodalCurve"
"AdjointLinearSystemFromIdeal"
"AdjointMatrix"
"AdjointModule"
"AdjointPreimage"
"AdjointRepresentation"
"AdjointRepresentationDecomposition"
"AdjointVersion"
"Adjoints"
"AdmissableTriangleGroups"
"AdmissiblePair"
"Advance"
"Af"
"AffineAction"
"AffineAlgebra"
"AffineAlgebraMapKernel"
"AffineAmbient"
"AffineDecomposition"
"AffineGammaLinearGroup"
"AffineGeneralLinearGroup"
"AffineImage"
"AffineKernel"
"AffineLieAlgebra"
"AffineNormalForm"
"AffinePatch"
"AffinePlane"
"AffineRepresentative"
"AffineSigmaLinearGroup"
"AffineSpace"
"AffineSpecialLinearGroup"
"Agemo"
"Alarm"
"AlgComb"
"Algebra"
"AlgebraGenerators"
"AlgebraMap"
"AlgebraOverCenter"
"AlgebraOverFieldOfFractions"
"AlgebraStructure"
"AlgebraicClosure"
"AlgebraicGenerators"
"AlgebraicGeometricCode"
"AlgebraicGeometricDualCode"
"AlgebraicPowerSeries"
"AlgebraicToAnalytic"
"AlgorithmicFunctionField"
"AllCliques"
"AllCompactChainMaps"
"AllCones"
"AllDefiningPolynomials"
"AllExtensions"
"AllFaces"
"AllHomomorphisms"
"AllInformationSets"
"AllInverseDefiningPolynomials"
"AllIrreduciblePolynomials"
"AllLinearRelations"
"AllNilpotentLieAlgebras"
"AllPairsShortestPaths"
"AllParallelClasses"
"AllParallelisms"
"AllPartitions"
"AllPassants"
"AllRays"
"AllReductionMaps"
"AllReductionMaps_Factor"
"AllResolutions"
"AllRoots"
"AllSecants"
"AllSlopes"
"AllSolvableLieAlgebras"
"AllSqrts"
"AllSquareRoots"
"AllTangents"
"AllVertices"
"Alldeg"
"AllowableSubgroup"
"AlmostIntegralGModule"
"AlmostInvariantForm"
"AlmostSimpleGroupDatabase"
"Alphabet"
"AlphabetExtensionDegree"
"Alt"
"AlternantCode"
"AlternatingCharacter"
"AlternatingCharacterTable"
"AlternatingCharacterValue"
"AlternatingDominant"
"AlternatingElementToStandardWord"
"AlternatingElementToWord"
"AlternatingGroup"
"AlternatingPower"
"AlternatingSquarePreimage"
"AlternatingSum"
"AlternatingWeylSum"
"AlternativePatches"
"Ambient"
"AmbientLieAlgebra"
"AmbientMatrix"
"AmbientModule"
"AmbientSpace"
"AmbientVariety"
"AmbiguousForms"
"AnalyticDrinfeldModule"
"AnalyticHomomorphisms"
"AnalyticInformation"
"AnalyticJacobian"
"AnalyticModule"
"AnalyticRank"
"AnalyticRankNumberOfTerms"
"AnalyticRankQuadraticTwist"
"And"
"Angle"
"AnisotropicSubdatum"
"Annihilator"
"AntiAutomorphismTau"
"Antipode"
"AntisymmetricForms"
"AntisymmetricHermitianForms"
"AntisymmetricMatrix"
"AntisymmetricQuaternionicForms"
"AnyDescription"
"ApparentCodimension"
"ApparentEquationDegrees"
"ApparentSyzygyDegrees"
"Append"
"AppendBasePoint"
"AppendModule"
"Apply"
"ApplyAutomorphism"
"ApplyForAutgCoerce"
"ApplyTransformation"
"ApplyWeylElement"
"Approx"
"ApproximateByTorsionGroup"
"ApproximateByTorsionPoint"
"ApproximateOrder"
"ApproximateStabiliser"
"Arccos"
"Arccosec"
"Arccot"
"Arcsec"
"Arcsin"
"Arctan"
"Arctan2"
"AreCohomologous"
"AreCollinear"
"AreEqualMorphisms"
"AreEqualObjects"
"AreGenerators"
"AreIdentical"
"AreInvolutionsConjugate"
"AreLinearlyEquivalent"
"AreProportional"
"ArfInvariant"
"Arg"
"Argcosech"
"Argcosh"
"Argcoth"
"Argsech"
"Argsinh"
"Argtanh"
"Argument"
"ArithmeticGenus"
"ArithmeticGenusOfDesingularization"
"ArithmeticGeometricMean"
"ArithmeticLSeries"
"ArithmeticTriangleGroup"
"ArithmeticVolume"
"ArrowWeights"
"Arrows"
"ArtRepCreate"
"ArtinMap"
"ArtinRepresentation"
"ArtinRepresentations"
"ArtinSchreierExtension"
"ArtinSchreierImage"
"ArtinSchreierMap"
"ArtinSchreierSymbol"
"ArtinTateFormula"
"AsExtensionOf"
"AssertAttribute"
"AssertEmbedding"
"AssignBase"
"AssignCapacities"
"AssignCapacity"
"AssignEdgeLabels"
"AssignLDPCMatrix"
"AssignLabel"
"AssignLabels"
"AssignNamePrefix"
"AssignNames"
"AssignNamesBase"
"AssignVertexLabels"
"AssignWeight"
"AssignWeights"
"AssociatedEllipticCurve"
"AssociatedHyperellipticCurve"
"AssociatedNewSpace"
"AssociatedPrimitiveCharacter"
"AssociatedPrimitiveGrossencharacter"
"AssociativeAlgebra"
"AssociativeArray"
"AtEof"
"AteTPairing"
"AteqPairing"
"AtkinLehner"
"AtkinLehnerDecomposition"
"AtkinLehnerEigenvalue"
"AtkinLehnerInvolution"
"AtkinLehnerNumberOfFixedPoints"
"AtkinLehnerOperator"
"AtkinLehnerOperatorOverQ"
"AtkinLehnerPrimes"
"AtkinLehnerSubspace"
"AtkinModularEquation"
"AtkinModularPolynomial"
"AtlasGroup"
"AtlasGroupNames"
"AtlasVersionInfo"
"Attach"
"AttachSpec"
"AugmentCode"
"Augmentation"
"AugmentationIdeal"
"AugmentationMap"
"Aut"
"AutGpSG"
"AutPSp"
"AutoCorrelation"
"AutoDD"
"AutoDR"
"AutoDW"
"AutomaticGroup"
"Automorphism"
"AutomorphismGroup"
"AutomorphismGroupAsMatrixGroup"
"AutomorphismGroupFF"
"AutomorphismGroupOverCyclotomicExtension"
"AutomorphismGroupOverExtension"
"AutomorphismGroupOverQ"
"AutomorphismGroupPGroup"
"AutomorphismGroupPGroup2"
"AutomorphismGroupStabilizer"
"AutomorphismOmega"
"AutomorphismSubgroup"
"AutomorphismTalpha"
"AutomorphismWorld"
"Automorphisms"
"AutomorphousClasses"
"AuxiliaryLevel"
"AxisMultiplicities"
"BBSModulus"
"BCHBound"
"BCHCode"
"BDLC"
"BDLCLowerBound"
"BDLCUpperBound"
"BFSTree"
"BKLC"
"BKLCLowerBound"
"BKLCUpperBound"
"BKQC"
"BKZ"
"BLLC"
"BLLCLowerBound"
"BLLCUpperBound"
"BQPlotkinSum"
"BSGS"
"BSGSProcess"
"BString"
"BachBound"
"BacherPolynomialInternal"
"BacherPolynomialTestInternal"
"BadPlaces"
"BadPrimes"
"BaerDerivation"
"BaerSubplane"
"Ball"
"Bang"
"BarAutomorphism"
"Barvinok"
"BarycentricSubdivision"
"Base"
"Base64Decode"
"Base64DecodeFile"
"Base64Encode"
"Base64EncodeFile"
"BaseBlowupContribution"
"BaseCategory"
"BaseChange"
"BaseChangeMatrix"
"BaseChangedDefiningEquations"
"BaseComponent"
"BaseCurve"
"BaseElement"
"BaseExtend"
"BaseExtension"
"BaseExtensionMorphisms"
"BaseField"
"BaseGerm"
"BaseImage"
"BaseImageWordStrip"
"BaseLocus"
"BaseMPolynomial"
"BaseModule"
"BaseObject"
"BasePoint"
"BasePoints"
"BaseRing"
"BaseScheme"
"BaseSize"
"BaseSpace"
"BasicAlgebra"
"BasicAlgebraGrpPToBasicAlgebra"
"BasicAlgebraOfEndomorphismAlgebra"
"BasicAlgebraOfExtAlgebra"
"BasicAlgebraOfGroupAlgebra"
"BasicAlgebraOfHeckeAlgebra"
"BasicAlgebraOfMatrixAlgebra"
"BasicAlgebraOfSchurAlgebra"
"BasicAlgebraPGroup"
"BasicCodegrees"
"BasicDegrees"
"BasicOrbit"
"BasicOrbitLength"
"BasicOrbitLengths"
"BasicOrbits"
"BasicParameters"
"BasicRootMatrices"
"BasicStabiliser"
"BasicStabiliserChain"
"BasicStabilizer"
"BasicStabilizerChain"
"Basis"
"BasisChange"
"BasisDenominator"
"BasisElement"
"BasisMatrix"
"BasisMinus"
"BasisOfDegree0CoxMonomials"
"BasisOfDifferentialsFirstKind"
"BasisOfHolomorphicDifferentials"
"BasisOfRationalFunctionField"
"BasisPlus"
"BasisProduct"
"BasisProducts"
"BasisReduction"
"Basket"
"Bell"
"BerlekampMassey"
"Bernoulli"
"BernoulliApproximation"
"BernoulliNumber"
"BernoulliPolynomial"
"BesselFunction"
"BesselFunctionSecondKind"
"BestApproximation"
"BestDimensionLinearCode"
"BestKnownLinearCode"
"BestKnownQuantumCode"
"BestLengthLinearCode"
"BestTranslation"
"BetaFunction"
"BettiNumber"
"BettiNumbers"
"BettiTable"
"BianchiCuspForms"
"Bicomponents"
"BigO"
"BigPeriodMatrix"
"BigTorus"
"BilinearFormSign"
"BilinearFormType"
"BinaryCodedForm"
"BinaryForms"
"BinaryQuadraticForms"
"BinaryResidueCode"
"BinaryString"
"BinaryToBytes"
"BinaryTorsionCode"
"Binomial"
"BinomialPolynomial"
"BinomialToricEmbedding"
"BipartiteGraph"
"Bipartition"
"BiquadraticResidueSymbol"
"BitFlip"
"BitPrecision"
"BitwiseAnd"
"BitwiseNot"
"BitwiseOr"
"BitwiseXor"
"BlackboxGroup"
"Block"
"BlockDegree"
"BlockDegrees"
"BlockDiagMat"
"BlockDiagScalarMat"
"BlockGraph"
"BlockGroup"
"BlockMatrix"
"BlockSet"
"BlockSize"
"BlockSizes"
"BlockTranspose"
"Blocks"
"BlocksAction"
"BlocksImage"
"BlocksKernel"
"Blowup"
"BlumBlumShub"
"BlumBlumShubModulus"
"BogomolovNumber"
"BooleanPolynomial"
"BooleanPolynomialRing"
"Booleans"
"BorderedDoublyCirculantQRCode"
"Borel"
"BorelSubgroup"
"Bottom"
"Bound"
"Boundary"
"BoundaryIntersection"
"BoundaryMap"
"BoundaryMapGrpP"
"BoundaryMaps"
"BoundaryMatrix"
"BoundaryPoints"
"BoundedFSubspace"
"BoundingBox"
"BoxElements"
"BraidGroup"
"Branch"
"BranchVertexPath"
"BrandtModule"
"BrandtModuleDimension"
"BrauerCharacter"
"BrauerCharacterTable"
"BrauerClass"
"BravaisGroup"
"BreadthFirstSearchTree"
"Bruhat"
"BruhatDescendants"
"BruhatLessOrEqual"
"BuildHom"
"BurauRepresentation"
"BurnsideMatrix"
"BytesToString"
"C6Action"
"C6Basis"
"C6Image"
"C6Kernel"
"C6Parameters"
"C9AlternatingElementToStandardWord"
"C9AlternatingElementToWord"
"C9RecogniseAlternating"
"CFP"
"CGO"
"CGOMinus"
"CGOPlus"
"CGSp"
"CGU"
"CMPoints"
"CMTwists"
"CO"
"COMinus"
"COPlus"
"CRT"
"CSO"
"CSOMinus"
"CSOPlus"
"CSSCode"
"CSU"
"CSp"
"CU"
"CacheClearToricLattice"
"CacheClearToricVariety"
"CalabiYau"
"CalculateCanonicalClass"
"CalculateMultiplicities"
"CalculateRegularSpliceDiagram"
"CalculateTransverseIntersections"
"CalderbankShorSteaneCode"
"CambridgeMatrix"
"CanChangeRing"
"CanChangeUniverse"
"CanContinueEnumeration"
"CanDetermineIsomorphism"
"CanIdentifyGroup"
"CanMakeIntegral"
"CanMakeIntegralGModule"
"CanNormalize"
"CanReallyMakeIntegral"
"CanRedoEnumeration"
"CanSignNormalize"
"CanWriteOver"
"CanonicalBasis"
"CanonicalClass"
"CanonicalDegree"
"CanonicalDissidentPoints"
"CanonicalDivisor"
"CanonicalElements"
"CanonicalEmbedding"
"CanonicalFactorRepresentation"
"CanonicalGenerators"
"CanonicalGraph"
"CanonicalHeight"
"CanonicalImage"
"CanonicalInvolution"
"CanonicalLength"
"CanonicalLinearSystem"
"CanonicalLinearSystemFromIdeal"
"CanonicalMap"
"CanonicalModularEquation"
"CanonicalModularPolynomial"
"CanonicalModule"
"CanonicalMultiplicity"
"CanonicalRepresentation"
"CanonicalSheaf"
"Canonicalisation"
"CanteautChabaudsAttack"
"CantorComposition1"
"CantorComposition2"
"Capacities"
"Capacity"
"CarlitzModule"
"CarmichaelLambda"
"CartanInteger"
"CartanMatrix"
"CartanName"
"CartanSubalgebra"
"CarterSubgroup"
"CartesianPower"
"CartesianProduct"
"Cartier"
"CartierRepresentation"
"CartierToWeilMap"
"CasimirValue"
"CasselsMap"
"CasselsTatePairing"
"Catalan"
"Category"
"CayleyGraph"
"Ceiling"
"Cell"
"CellNumber"
"CellSize"
"CellSizeByPoint"
"Center"
"CenterDensity"
"CenterPolynomials"
"CentralCharacter"
"CentralCollineationGroup"
"CentralEndomorphisms"
"CentralExtension"
"CentralExtensionProcess"
"CentralExtensions"
"CentralIdempotents"
"CentralOrder"
"CentralProductDecomposition"
"CentralSumDecomposition"
"CentralValue"
"Centraliser"
"CentraliserOfInvolution"
"CentralisingMatrix"
"CentralisingRoots"
"Centralizer"
"CentralizerGLZ"
"CentralizerOfNormalSubgroup"
"Centre"
"CentreDensity"
"CentreOfEndomorphismAlgebra"
"CentreOfEndomorphismRing"
"CentrePolynomials"
"CentredAffinePatch"
"Chabauty"
"Chabauty0"
"ChabautyEquations"
"ChainComplex"
"ChainMap"
"ChainmapToCohomology"
"ChangGraphs"
"ChangeAmbient"
"ChangeBase"
"ChangeBasis"
"ChangeBasisCSAlgebra"
"ChangeDerivation"
"ChangeDifferential"
"ChangeDirectory"
"ChangeExponentDenominator"
"ChangeField"
"ChangeModel"
"ChangeN"
"ChangeOfBasisMatrix"
"ChangeOrder"
"ChangePrecision"
"ChangeRepresentationType"
"ChangeRing"
"ChangeRingAlgLie"
"ChangeSign"
"ChangeSupport"
"ChangeUniverse"
"Char"
"Character"
"CharacterDegrees"
"CharacterDegreesPGroup"
"CharacterField"
"CharacterFromTraces"
"CharacterMultiset"
"CharacterOfImage"
"CharacterRing"
"CharacterTable"
"CharacterTableConlon"
"CharacterTableDS"
"CharacterToModular"
"CharacterToRepresentation"
"Characteristic"
"CharacteristicPolynomial"
"CharacteristicPolynomialFromTraces"
"CharacteristicSeries"
"CharacteristicVector"
"Characters"
"CharpolyOfFrobenius"
"ChebyshevFirst"
"ChebyshevSecond"
"ChebyshevT"
"ChebyshevU"
"CheckBasket"
"CheckCharacterTable"
"CheckCodimension"
"CheckEmbed"
"CheckFunctionalEquation"
"CheckIdeal"
"CheckOrder"
"CheckPoint"
"CheckPolynomial"
"CheckSparseRootDatum"
"CheckWeilPolynomial"
"ChevalleyBasis"
"ChevalleyBasisOld"
"ChevalleyGroup"
"ChevalleyGroupOrder"
"ChevalleyOrderPolynomial"
"ChiefFactors"
"ChiefFactorsToString"
"ChiefSeries"
"ChienChoyCode"
"ChineseRemainderTheorem"
"Cholesky"
"ChromaticIndex"
"ChromaticNumber"
"ChromaticPolynomial"
"ChtrLiftInternal"
"Class"
"ClassAction"
"ClassCentraliser"
"ClassCentralizer"
"ClassField"
"ClassFunctionSpace"
"ClassGroup"
"ClassGroupAbelianInvariants"
"ClassGroupChecks"
"ClassGroupCyclicFactorGenerators"
"ClassGroupExactSequence"
"ClassGroupGenerationBound"
"ClassGroupGetUseMemory"
"ClassGroupPRank"
"ClassGroupPrimeRepresentatives"
"ClassGroupSetUseMemory"
"ClassGroupStructure"
"ClassImage"
"ClassMap"
"ClassMatrix"
"ClassNumber"
"ClassNumberApproximation"
"ClassNumberApproximationBound"
"ClassPowerCharacter"
"ClassPowerGroup"
"ClassRepresentative"
"ClassRepresentativeFromInvariants"
"ClassTwo"
"ClassUnion"
"Classes"
"ClassesAHInternal"
"ClassesAlmostSimpleInternal"
"ClassesData"
"ClassesInductive"
"ClassesInductiveSetup"
"ClassesLiftCentPMSetup"
"ClassesTF"
"ClassesTFOrbitReps"
"ClassicalConstructiveRecognition"
"ClassicalElementToWord"
"ClassicalForms"
"ClassicalFormsCS"
"ClassicalGroupOrder"
"ClassicalGroupQuotient"
"ClassicalIntersection"
"ClassicalMaximals"
"ClassicalModularEquation"
"ClassicalModularPolynomial"
"ClassicalMultiplication"
"ClassicalMultiplierMap"
"ClassicalPeriod"
"ClassicalStandardGenerators"
"ClassicalStandardPresentation"
"ClassicalSylow"
"ClassicalSylowConjugation"
"ClassicalSylowNormaliser"
"ClassicalSylowToPC"
"ClassicalType"
"ClassifyProjectiveSurface"
"Clean"
"CleanCompositionTree"
"ClearDenominator"
"ClearDenominators"
"ClearIdentificationTree"
"ClearPrevious"
"ClearRowDenominators"
"ClearVerbose"
"ClebschGraph"
"ClebschInvariants"
"ClebschToIgusaClebsch"
"CliffordAlgebra"
"CliqueComplex"
"CliqueNumber"
"ClockCycles"
"CloseSmallGroupDatabase"
"CloseVectors"
"CloseVectorsMatrix"
"CloseVectorsProcess"
"ClosestUnit"
"ClosestVectors"
"ClosestVectorsMatrix"
"ClosureGraph"
"ClosureLiE"
"Cluster"
"CoblesRadicand"
"CoboundaryMapImage"
"Cocycle"
"CocycleMap"
"CodeComplement"
"CodeEntry"
"CodeEntryQECC"
"CodePermutationToMatrix"
"CodeToString"
"Codegree"
"Codifferent"
"Codimension"
"Codomain"
"Coefficient"
"CoefficientField"
"CoefficientHeight"
"CoefficientIdeal"
"CoefficientIdeals"
"CoefficientLength"
"CoefficientMap"
"CoefficientMorphism"
"CoefficientRing"
"CoefficientSpace"
"Coefficients"
"CoefficientsAndMonomials"
"CoefficientsNonSpiral"
"CoefficientsToElementarySymmetric"
"CoerceByClassAction"
"CoerceGrpLie"
"Coercion"
"CoercionGrpLie"
"Coercions"
"Cofactor"
"Cofactors"
"CohenCoxeterName"
"CohomologicalDimension"
"CohomologicalDimensions"
"Cohomology"
"CohomologyClass"
"CohomologyDimension"
"CohomologyElementToChainMap"
"CohomologyElementToCompactChainMap"
"CohomologyGeneratorToChainMap"
"CohomologyGroup"
"CohomologyLeftModuleGenerators"
"CohomologyModule"
"CohomologyRelations"
"CohomologyRightModuleGenerators"
"CohomologyRing"
"CohomologyRingGenerators"
"CohomologyRingQuotient"
"CohomologyToChainmap"
"Coincidence"
"CoisogenyGroup"
"Cokernel"
"ColinearPointsOnPlane"
"CollateWhiteSpace"
"Collect"
"CollectRelations"
"CollineationGroup"
"CollineationGroupStabilizer"
"CollineationSubgroup"
"Colon"
"ColonIdeal"
"ColonIdealEquivalent"
"ColonModule"
"Column"
"ColumnLength"
"ColumnMatrix"
"ColumnSkewLength"
"ColumnSubmatrix"
"ColumnSubmatrixRange"
"ColumnWeight"
"ColumnWeights"
"ColumnWord"
"Columns"
"CombineIdealFactorisation"
"CombineInvariants"
"CommonComplement"
"CommonComponent"
"CommonDenominator"
"CommonEigenspaces"
"CommonModularStructure"
"CommonOverfield"
"CommonZeros"
"Commutator"
"CommutatorGraph"
"CommutatorGroup"
"CommutatorIdeal"
"CommutatorModule"
"CommutatorSubgroup"
"CompactDeletedProjectiveResolution"
"CompactInjectiveResolution"
"CompactPart"
"CompactPresentation"
"CompactProjectiveResolution"
"CompactProjectiveResolutionPGroup"
"CompactProjectiveResolutionsOfAllSimpleModules"
"CompactSystemOfEigenvalues"
"CompactSystemOfEigenvaluesOverQ"
"CompactSystemOfEigenvaluesVector"
"CompanionMatrix"
"Complement"
"ComplementBasis"
"ComplementDFA"
"ComplementEquationsMatrix"
"ComplementOfImage"
"ComplementVectors"
"ComplementaryDivisor"
"ComplementaryErrorFunction"
"Complements"
"Complete"
"CompleteClassGroup"
"CompleteDescription"
"CompleteDigraph"
"CompleteGraph"
"CompleteKArc"
"CompleteTheSquare"
"CompleteTupleList"
"CompleteUnion"
"CompleteWeightEnumerator"
"Completion"
"Complex"
"ComplexCartanMatrix"
"ComplexConjugate"
"ComplexEmbeddings"
"ComplexField"
"ComplexReflectionGroup"
"ComplexReflectionGroupOld"
"ComplexRootDatum"
"ComplexRootMatrices"
"ComplexToPolar"
"ComplexValue"
"Component"
"ComponentGroup"
"ComponentGroupOfIntersection"
"ComponentGroupOfKernel"
"ComponentGroupOrder"
"ComponentProduct"
"Components"
"ComposeQuotients"
"ComposeTransformations"
"Composite"
"CompositeFields"
"Composition"
"CompositionFactors"
"CompositionSequence"
"CompositionSeries"
"CompositionSeriesMatrix"
"CompositionTree"
"CompositionTreeCBM"
"CompositionTreeElementToWord"
"CompositionTreeFactorNumber"
"CompositionTreeFastVerification"
"CompositionTreeNiceGroup"
"CompositionTreeNiceToUser"
"CompositionTreeOrder"
"CompositionTreeReductionInfo"
"CompositionTreeSLPGroup"
"CompositionTreeSeries"
"CompositionTreeVerify"
"Compositum"
"ComputePreImageRule"
"ComputePrimeFactorisation"
"ComputeReducedFactorisation"
"ComputeSubgroupLattice"
"Comultiplication"
"ConcatenateProcesses"
"ConcatenatedCode"
"CondensationMatrices"
"CondensedAlgebra"
"CondensedAlgebraSimpleModules"
"CondensedModule"
"ConditionNumber"
"ConditionalClassGroup"
"ConditionedGroup"
"Conductor"
"ConductorOfCharacterField"
"ConductorRange"
"Cone"
"ConeInSublattice"
"ConeIndices"
"ConeIntersection"
"ConeQuotientByLinearSubspace"
"ConeToPolyhedron"
"ConeWithInequalities"
"Cones"
"ConesOfCodimension"
"ConformalClassicalGroup"
"ConformalHamiltonianLieAlgebra"
"ConformalOrthogonalGroup"
"ConformalOrthogonalGroupMinus"
"ConformalOrthogonalGroupPlus"
"ConformalSpecialLieAlgebra"
"ConformalSymplecticGroup"
"ConformalUnitaryGroup"
"CongruenceGroup"
"CongruenceGroupAnemic"
"CongruenceImage"
"CongruenceIndices"
"CongruenceModulus"
"CongruenceSubgroup"
"Conic"
"ConicOverSubfield"
"ConjecturalRegulator"
"ConjecturalSha"
"ConjugacyClasses"
"Conjugate"
"ConjugateComplementSubspace"
"ConjugateIntoBorel"
"ConjugateIntoTorus"
"ConjugatePartition"
"ConjugateTranspose"
"Conjugates"
"ConjugatesToPowerSums"
"ConjugatingElement"
"ConjugationClassLength"
"Connect"
"ConnectedCenter"
"ConnectedCentre"
"ConnectedComponents"
"ConnectedKernel"
"ConnectingHomomorphism"
"ConnectionNumber"
"ConnectionPolynomial"
"Conorm"
"Consistency"
"ConstaCyclicCode"
"ConstantCoefficient"
"ConstantField"
"ConstantFieldExtension"
"ConstantMap"
"ConstantRing"
"ConstantTerm"
"ConstantWords"
"Constituent"
"Constituents"
"ConstituentsWithMultiplicities"
"Constraint"
"ConstructBasicOrbit"
"ConstructBasicOrbits"
"ConstructOneOrbitInternal"
"ConstructOrbitsInternal"
"ConstructPermsInternal"
"ConstructTable"
"Construction"
"ConstructionX"
"ConstructionX3"
"ConstructionX3u"
"ConstructionXChain"
"ConstructionXX"
"ConstructionXXu"
"ConstructionY1"
"ContactLieAlgebra"
"ContainsQuadrangle"
"ContainsZero"
"Content"
"ContentAndPrimitivePart"
"Continuations"
"ContinueEnumeration"
"ContinuedFraction"
"Contpp"
"Contract"
"Contraction"
"Contravariants"
"ControlledNot"
"Convergents"
"ConvergentsSequence"
"Converse"
"ConvertFromManinSymbol"
"ConvertToCWIFormat"
"Convolution"
"ConwayPolynomial"
"Coordelt"
"Coordinate"
"CoordinateLattice"
"CoordinateMatrix"
"CoordinateRing"
"CoordinateSpace"
"CoordinateSubvariety"
"CoordinateVector"
"Coordinates"
"CoordinatesToElement"
"Coppersmith"
"CoprimeBasis"
"CoprimeBasisInsert"
"CoprimeRepresentative"
"Copy"
"CopyNames"
"CopyRepresentation"
"CordaroWagnerCode"
"Core"
"CoreflectionGroup"
"CoreflectionMatrices"
"CoreflectionMatrix"
"CorestrictCocycle"
"CorestrictionMapImage"
"Coroot"
"CorootAction"
"CorootGSet"
"CorootHeight"
"CorootLattice"
"CorootNorm"
"CorootNorms"
"CorootPosition"
"CorootSpace"
"Coroots"
"CorrectForm"
"Correlation"
"CorrelationGroup"
"CorrespondingResolutionGraph"
"CorrespondingVertices"
"Cos"
"Cosec"
"Cosech"
"CosetAction"
"CosetDistanceDistribution"
"CosetEnumerationProcess"
"CosetGeometry"
"CosetGraphIntersect"
"CosetImage"
"CosetIntersection"
"CosetKernel"
"CosetLeaders"
"CosetNumber"
"CosetRepresentatives"
"CosetSatisfying"
"CosetSpace"
"CosetTable"
"CosetTableToDFA"
"CosetTableToPermutationGroup"
"CosetTableToRepresentation"
"CosetsSatisfying"
"Cosh"
"Cot"
"Coth"
"Cotrace"
"Counit"
"CountEntriesEqual"
"CountPGroups"
"Covalence"
"Covariant"
"CoveringCovariants"
"CoveringMap"
"CoveringRadius"
"CoveringStructure"
"CoveringSubgroup"
"Coverlattice"
"CoweightLattice"
"CoxMonomialLattice"
"CoxRing"
"CoxeterDiagram"
"CoxeterElement"
"CoxeterForm"
"CoxeterGraph"
"CoxeterGroup"
"CoxeterGroupFactoredOrder"
"CoxeterGroupOrder"
"CoxeterLength"
"CoxeterMatrix"
"CoxeterNumber"
"Cputime"
"CreateCharacterFile"
"CreateCycleFile"
"CreateElement"
"CreateFanoData"
"CreateK3Data"
"CreateLieGroup"
"CreateLieGroupElement"
"CreateNilpOrbAlgLie"
"CreateRootVectorSpace"
"CreateVirtualRays"
"Create_SmallCrvMod_Structure"
"CremonaDatabase"
"CremonaReference"
"CremonaReferenceData"
"CriticalStrip"
"CrossCorrelation"
"CrossPolytope"
"CrvGenericGroup"
"CryptographicCurve"
"CrystalGraph"
"CubicFromPoint"
"CubicModel"
"CubicModelSearch"
"CubicSurfaceByHexahedralCoefficients"
"Cunningham"
"Current"
"CurrentLabel"
"Curve"
"CurveDifferential"
"CurveDivisor"
"CurvePlace"
"CurveQuotient"
"Curves"
"Cusp"
"CuspForms"
"CuspIsSingular"
"CuspPlaces"
"CuspWidth"
"CuspidalInducingDatum"
"CuspidalProjection"
"CuspidalSubgroup"
"CuspidalSubspace"
"Cusps"
"CutVertices"
"Cycle"
"CycleCount"
"CycleDecomposition"
"CycleIndexPolynomial"
"CycleStructure"
"CycleStructureToSeq"
"CyclicCode"
"CyclicGroup"
"CyclicPolytope"
"CyclicShiftsMatrix"
"CyclicSubgroups"
"CyclicToRadical"
"CyclotomicAutomorphismGroup"
"CyclotomicClassNumbers"
"CyclotomicEmbedding"
"CyclotomicFactors"
"CyclotomicField"
"CyclotomicOrder"
"CyclotomicPolynomial"
"CyclotomicQuadraticExtensions"
"CyclotomicRelativeField"
"CyclotomicUnitGroup"
"CyclotomicUnits"
"CyclotomicUnramifiedExtension"
"Cylinder"
"DFSTree"
"Darstellungsgruppe"
"Data"
"DataAutLie"
"DatabaseID"
"DatabaseType"
"DawsonIntegral"
"DecimalToBitPrecision"
"Decimation"
"Decode"
"DecodeML"
"DecodingAttack"
"DecomposeAutomorphism"
"DecomposeCharacter"
"DecomposeExteriorPower"
"DecomposeKronecker"
"DecomposeSymmetricPower"
"DecomposeTensorProduct"
"DecomposeUsing"
"DecomposeVector"
"Decomposition"
"DecompositionField"
"DecompositionGroup"
"DecompositionMatrix"
"DecompositionMultiset"
"DecompositionOldAndNew"
"DecompositionType"
"DecompositionTypeFrequency"
"Decycle"
"DedekindEta"
"DedekindTest"
"DeepHoles"
"DefRing"
"Defect"
"DefectGroup"
"DefinedInDegrees"
"DefinesAbelianSubvariety"
"DefinesHomomorphism"
"DefinesTableau"
"DefiningConstantField"
"DefiningEquation"
"DefiningEquations"
"DefiningIdeal"
"DefiningMap"
"DefiningMatrix"
"DefiningModularSymbolsSpace"
"DefiningModulusIsConductor"
"DefiningMonomial"
"DefiningPoints"
"DefiningPolynomial"
"DefiningPolynomials"
"DefiningSubschemePolynomial"
"DefiniteClassNumber"
"DefiniteGramMatrix"
"DefiniteNorm"
"DefinitionSets"
"DegeneracyCosetRepsInner"
"DegeneracyMap"
"DegeneracyMatrix"
"Degree"
"Degree2Subcovers"
"Degree3Subcovers"
"Degree6DelPezzoType2_1"
"Degree6DelPezzoType2_2"
"Degree6DelPezzoType2_3"
"Degree6DelPezzoType3"
"Degree6DelPezzoType4"
"Degree6DelPezzoType6"
"DegreeMap"
"DegreeOfCharacterField"
"DegreeOfExactConstantField"
"DegreeOfFieldExtension"
"DegreeOnePrimeIdeals"
"DegreeRange"
"DegreeReduction"
"DegreeSequence"
"Degrees"
"DegreesOfCohomologyGenerators"
"DegreesOfGenerators"
"DelPezzoSurface"
"Delaunay"
"DelaunayMesh"
"DeleteAllAssociatedData"
"DeleteAttributes"
"DeleteCapacities"
"DeleteCapacity"
"DeleteCollector"
"DeleteData"
"DeleteEdgeLabels"
"DeleteGenerator"
"DeleteGlobalModularFormsData"
"DeleteHeckePrecomputation"
"DeleteLabel"
"DeleteLabels"
"DeleteNonsplitCollector"
"DeleteNonsplitSolutionspace"
"DeleteProcess"
"DeleteProcessComplete"
"DeleteProcessDown"
"DeleteRelation"
"DeleteSplitCollector"
"DeleteSplitSolutionspace"
"DeleteStoredWords"
"DeleteVertexLabels"
"DeleteWeight"
"DeleteWeights"
"DeletedProjectiveResolution"
"DelsarteGoethalsCode"
"Delta"
"DeltaPreimage"
"Demazure"
"Denominator"
"Density"
"DensityEvolutionBinarySymmetric"
"DensityEvolutionGaussian"
"Depth"
"DepthFirstSearchTree"
"Derivation"
"Derivative"
"DerivedGroup"
"DerivedGroupMonteCarlo"
"DerivedLength"
"DerivedSeries"
"DerivedSubgroup"
"DerksenIdeal"
"Descendants"
"DescentInformation"
"DescentMaps"
"Design"
"Detach"
"DetachSpec"
"Determinant"
"Development"
"Diagonal"
"DiagonalAutomorphism"
"DiagonalBlockDecomposition"
"DiagonalBlockStructure"
"DiagonalBlocks"
"DiagonalBlocksStructure"
"DiagonalForm"
"DiagonalJoin"
"DiagonalMatrix"
"DiagonalModel"
"DiagonalSparseMatrix"
"DiagonalSum"
"Diagonalisation"
"DiagonalisingMatrix"
"Diagonalization"
"Diagram"
"DiagramAutomorphism"
"Diameter"
"DiameterPath"
"DickmanRho"
"DicksonFirst"
"DicksonInvariant"
"DicksonSecond"
"DicyclicGroup"
"Difference"
"DifferenceSet"
"Different"
"DifferentDivisor"
"Differential"
"DifferentialBasis"
"DifferentialField"
"DifferentialFieldExtension"
"DifferentialIdeal"
"DifferentialLaurentSeriesRing"
"DifferentialOperator"
"DifferentialOperatorRing"
"DifferentialRing"
"DifferentialRingExtension"
"DifferentialSpace"
"Differentiation"
"DifferentiationSequence"
"DihedralForms"
"DihedralGroup"
"Dilog"
"Dimension"
"DimensionBoundTest"
"DimensionByFormula"
"DimensionComplexTorus"
"DimensionCuspForms"
"DimensionCuspFormsGamma0"
"DimensionCuspFormsGamma1"
"DimensionNewCuspForms"
"DimensionNewCuspFormsGamma0"
"DimensionNewCuspFormsGamma1"
"DimensionOfAlgebra"
"DimensionOfCentreOfEndomorphismRing"
"DimensionOfEndomorphismRing"
"DimensionOfExactConstantField"
"DimensionOfFieldOfGeometricIrreducibility"
"DimensionOfHighestWeightModule"
"DimensionOfHom"
"DimensionOfHomology"
"DimensionOfKernelZ2"
"DimensionOfNonQFactorialLocus"
"DimensionOfSpanZ2"
"DimensionsEstimate"
"DimensionsOfHomology"
"DimensionsOfInjectiveModules"
"DimensionsOfProjectiveModules"
"DimensionsOfTerms"
"DirectProduct"
"DirectSum"
"DirectSumDecomposition"
"DirectSumRestrictionOfScalarsToQ"
"DirichletCharacter"
"DirichletCharacterFromValuesOnUnitGenerators"
"DirichletCharacters"
"DirichletGroup"
"DirichletGroupCopy"
"DirichletGroupFull"
"DirichletRestriction"
"DiscToPlane"
"Disconnect"
"DiscreteLogMapSmooth"
"Discriminant"
"DiscriminantDivisor"
"DiscriminantOfHeckeAlgebra"
"DiscriminantRange"
"DisownChildren"
"Display"
"DisplayBurnsideMatrix"
"DisplayCompTreeNodes"
"DisplayFareySymbolDomain"
"DisplayPolyMap"
"DisplayPolygons"
"Distance"
"DistanceMatrix"
"DistancePartition"
"Distances"
"DistinctDegreeFactorization"
"DistinctExtensions"
"DistinguishedOrbitsOnSimples"
"DistinguishedRoot"
"DivideOutIntegers"
"DivisionFunction"
"DivisionPoints"
"DivisionPolynomial"
"DivisionPsi"
"Divisor"
"DivisorBasis"
"DivisorClassGroup"
"DivisorClassLattice"
"DivisorGroup"
"DivisorIdeal"
"DivisorMap"
"DivisorOfDegreeOne"
"DivisorSigma"
"DivisorToPoint"
"DivisorToSheaf"
"Divisors"
"Dodecacode"
"DoesDefineFan"
"Domain"
"DominantCharacter"
"DominantDiagonalForm"
"DominantLSPath"
"DominantWeight"
"DotProduct"
"Double"
"DoubleCoset"
"DoubleCosetRepresentatives"
"DoubleCosets"
"DoubleDual"
"DoubleGenusOneModel"
"DoublePlotkinSum"
"DoubleSpaceQuartic"
"DoublyCirculantQRCode"
"DoublyCirculantQRCodeGF4"
"Dual"
"DualAtkinLehner"
"DualAtkinLehnerOperator"
"DualBasisLattice"
"DualCoxeterForm"
"DualEuclideanWeightDistribution"
"DualFaceInDualFan"
"DualFan"
"DualGraphCanonical"
"DualGraphMultiplicities"
"DualHeckeOperator"
"DualIsogeny"
"DualKroneckerZ4"
"DualLattice"
"DualLeeWeightDistribution"
"DualMatrixToPerm"
"DualMatrixToWord"
"DualModularSymbol"
"DualMorphism"
"DualPartition"
"DualPrintName"
"DualQuotient"
"DualRepresentation"
"DualStarInvolution"
"DualVectorSpace"
"DualWeightDistribution"
"DualityAutomorphism"
"DumpVerbose"
"DuvalPuiseuxExpansion"
"DynkinDiagram"
"DynkinDigraph"
"E2NForm"
"E4Form"
"E6Form"
"E8gens"
"EARNS"
"ECCanonicalLiftTraceGen"
"ECCanonicalLiftTraceGenus0"
"ECCanonicalLiftTraceHyp"
"ECDeformationTrace"
"ECM"
"ECMFactoredOrder"
"ECMOrder"
"ECMSteps"
"ECPCShanks"
"EFAModuleMaps"
"EFAModules"
"EFASeries"
"EIS"
"EISDatabase"
"EModule"
"EVALInternal"
"Ealpha"
"EasyBasis"
"EasyClean"
"EasyIdeal"
"EchRat"
"EchelonForm"
"EchelonNullspace"
"EcheloniseWord"
"Echelonize"
"EchelonizeWord"
"EckardtPoints"
"EdgeCapacities"
"EdgeConnectivity"
"EdgeDeterminant"
"EdgeGroup"
"EdgeIndices"
"EdgeLabel"
"EdgeLabels"
"EdgeMultiplicity"
"EdgeSeparator"
"EdgeSet"
"EdgeUnion"
"EdgeWeights"
"Edges"
"EffectivePossibilities"
"EffectiveSubcanonicalCurves"
"EhrhartCoefficient"
"EhrhartCoefficients"
"EhrhartDeltaVector"
"EhrhartPolynomial"
"EhrhartSeries"
"EichlerInvariant"
"Eigenform"
"Eigenforms"
"Eigenspace"
"Eigenvalues"
"EigenvectorInTermsOfExpansionBasis"
"EightCoverings"
"EightDescent"
"Eisenstein"
"EisensteinData"
"EisensteinIntegerRing"
"EisensteinIntegers"
"EisensteinProjection"
"EisensteinSeries"
"EisensteinSubspace"
"EisensteinTwo"
"Element"
"ElementOfNorm"
"ElementOfOrder"
"ElementOffset"
"ElementSequence"
"ElementSet"
"ElementToMonoidSequence"
"ElementToSequence"
"ElementToSequencePad"
"ElementToTuple"
"ElementType"
"ElementaryAbelianGroup"
"ElementaryAbelianNormalSubgroup"
"ElementaryAbelianQuotient"
"ElementaryAbelianSection"
"ElementaryAbelianSeries"
"ElementaryAbelianSeriesCanonical"
"ElementaryAbelianSubgroups"
"ElementaryDivisors"
"ElementaryDivisorsMultiset"
"ElementarySymmetricPolynomial"
"ElementarySymmetricToCoefficients"
"ElementarySymmetricToPowerSums"
"ElementaryToHomogeneousMatrix"
"ElementaryToMonomialMatrix"
"ElementaryToPowerSumMatrix"
"ElementaryToSchurMatrix"
"Elements"
"EliasAsymptoticBound"
"EliasBound"
"Eliminate"
"EliminateGenerators"
"EliminateRedundancy"
"EliminateRedundantBasePoints"
"Elimination"
"EliminationIdeal"
"EllipticCurve"
"EllipticCurveDatabase"
"EllipticCurveDatabaseLarge"
"EllipticCurveFromjInvariant"
"EllipticCurveSearch"
"EllipticCurveWithGoodReductionSearch"
"EllipticCurveWithjInvariant"
"EllipticCurves"
"EllipticExponential"
"EllipticFactors"
"EllipticInvariants"
"EllipticLogarithm"
"EllipticPeriods"
"EllipticPoints"
"EltTup"
"Eltlist"
"Eltnum"
"Eltseq"
"EltseqPad"
"Embed"
"EmbedIntoMinimalCyclotomicField"
"EmbedPlaneCurveInP3"
"Embedding"
"EmbeddingMap"
"EmbeddingMatrix"
"EmbeddingSpace"
"Embeddings"
"EmptyBasket"
"EmptyCohomologyModule"
"EmptyDigraph"
"EmptyGraph"
"EmptyMultiDigraph"
"EmptyMultiGraph"
"EmptyNetwork"
"EmptyPolyhedron"
"EmptyScheme"
"EmptySubscheme"
"End"
"EndVertices"
"EndomorphismAlgebra"
"EndomorphismRing"
"Endomorphisms"
"EndpointWeight"
"EnterStauduhar"
"EntriesInterpolation"
"EntriesInterpolationExpansion"
"Entropy"
"Entry"
"Enumerate"
"EnumerationCost"
"EnumerationCostArray"
"Eof"
"EqualDFA"
"EqualDegreeFactorization"
"Equality"
"EqualizeDegrees"
"Equation"
"EquationOrder"
"EquationOrderFinite"
"EquationOrderInfinite"
"Equations"
"EquiDecomposition"
"EquidimensionalDecomposition"
"EquidimensionalPart"
"EquidimensionalRadical"
"EquitablePartition"
"EquivalentPoint"
"EquivalentQuotients"
"Erf"
"Erfc"
"Error"
"ErrorFunction"
"EstimateOrbit"
"Eta"
"EtaTPairing"
"EtaqPairing"
"EuclideanLeftDivision"
"EuclideanNorm"
"EuclideanRightDivision"
"EuclideanWeight"
"EuclideanWeightDistribution"
"EuclideanWeightEnumerator"
"EulerCharacteristic"
"EulerFactor"
"EulerFactorModChar"
"EulerFactorsByDeformation"
"EulerGamma"
"EulerGraphDatabase"
"EulerPhi"
"EulerPhiInverse"
"EulerProduct"
"EulerianGraphDatabase"
"EulerianNumber"
"Evaluate"
"EvaluateAt"
"EvaluateByPowerSeries"
"EvaluateClassGroup"
"EvaluateDerivatives"
"EvaluatePolynomial"
"EvaluationPowerSeries"
"EvenOrderElement"
"EvenSublattice"
"EvenWeightCode"
"EvenWeightSubcode"
"ExactConstantField"
"ExactExtension"
"ExactLattice"
"ExactLength"
"ExactQuotient"
"ExactScalarProduct"
"ExactValue"
"ExceptionalCurveIntersection"
"ExceptionalSelfIntersection"
"ExceptionalUnitOrbit"
"ExceptionalUnits"
"ExchangeElement"
"Exclude"
"ExcludedConjugate"
"ExcludedConjugates"
"ExistsConwayPolynomial"
"ExistsCosetSatisfying"
"ExistsCoveringStructure"
"ExistsExcludedConjugate"
"ExistsGroupData"
"ExistsModularCurveDatabase"
"ExistsNormalisingCoset"
"ExistsNormalizingCoset"
"Exp"
"Expand"
"ExpandBasis"
"ExpandQuaternionicBasis"
"ExpandToPrecision"
"ExpandZ"
"Experimental_InnerTwistOperator"
"ExplicitCoset"
"Exponent"
"ExponentDenominator"
"ExponentLattice"
"ExponentLaw"
"ExponentSum"
"ExponentialFieldExtension"
"ExponentialIntegral"
"ExponentialIntegralE1"
"Exponents"
"ExpurgateCode"
"ExpurgateWeightCode"
"Ext"
"ExtAlgebra"
"ExtGenerators"
"Extcont"
"Extend"
"ExtendBasicOrbit"
"ExtendBasicOrbits"
"ExtendBasis"
"ExtendCode"
"ExtendDynkinDiagramPermutation"
"ExtendEchelonForm"
"ExtendField"
"ExtendFieldCode"
"ExtendGaloisCocycle"
"ExtendGeodesic"
"ExtendIsometry"
"ExtendMultiplicativeGroup"
"ExtendPrimaryInvariants"
"ExtendedCategory"
"ExtendedCohomologyClass"
"ExtendedGreatestCommonDivisor"
"ExtendedGreatestCommonLeftDivisor"
"ExtendedGreatestCommonRightDivisor"
"ExtendedLeastCommonLeftMultiple"
"ExtendedOneCocycle"
"ExtendedPerfectCodeZ4"
"ExtendedReals"
"ExtendedRing"
"ExtendedSL"
"ExtendedSp"
"ExtendedType"
"ExtendedValuationRing"
"Extends"
"Extension"
"ExtensionCategory"
"ExtensionClasses"
"ExtensionExponents"
"ExtensionMorphism"
"ExtensionNumbers"
"ExtensionPrimes"
"ExtensionProcess"
"ExtensionsOfElementaryAbelianGroup"
"ExtensionsOfSolubleGroup"
"Exterior"
"ExteriorAlgebra"
"ExteriorPower"
"ExteriorPowerNaturalModule"
"ExteriorSquare"
"ExternalLines"
"ExtraAutomorphism"
"ExtraSpecialAction"
"ExtraSpecialBasis"
"ExtraSpecialGroup"
"ExtraSpecialNormaliser"
"ExtraSpecialParameters"
"ExtractBlock"
"ExtractBlockRange"
"ExtractDiagonalBlocks"
"ExtractGenerators"
"ExtractGroup"
"ExtractRep"
"ExtraspecialPair"
"ExtraspecialPairs"
"ExtraspecialSigns"
"ExtremalLieAlgebra"
"ExtremalRayContraction"
"ExtremalRayContractionDivisor"
"ExtremalRayContractions"
"ExtremalRays"
"F4O"
"FFPatchIndex"
"FGIntersect"
"FPGroup"
"FPGroupColouring"
"FPGroupStrong"
"FPQuotient"
"Face"
"FaceFunction"
"FaceIndices"
"FaceSupportedBy"
"Faces"
"FacesContaining"
"FacetIndices"
"Facets"
"Facint"
"Facpol"
"Factor"
"FactorBasis"
"FactorBasisCreate"
"FactorBasisVerify"
"FactoredCarmichaelLambda"
"FactoredCharacteristicPolynomial"
"FactoredChevalleyGroupOrder"
"FactoredClassicalGroupOrder"
"FactoredDefiningPolynomials"
"FactoredDiscriminant"
"FactoredEulerPhi"
"FactoredEulerPhiInverse"
"FactoredHeckePolynomial"
"FactoredIndex"
"FactoredInverseDefiningPolynomials"
"FactoredMCPolynomials"
"FactoredMinimalAndCharacteristicPolynomials"
"FactoredMinimalPolynomial"
"FactoredModulus"
"FactoredOrder"
"FactoredOrderGL"
"FactoredProjectiveOrder"
"Factorial"
"FactorialValuation"
"Factorisation"
"FactorisationOverSplittingField"
"FactorisationToInteger"
"FactorisationToPolynomial"
"Factorization"
"FactorizationOfQuotient"
"FactorizationOverSplittingField"
"FactorizationToInteger"
"FaithfulModule"
"FakeIsogenySelmerSet"
"FakeProjectiveSpace"
"Falpha"
"FaltingsHeight"
"FamilyOfMultivaluedSections"
"Fan"
"FanOfAffineSpace"
"FanOfFakeProjectiveSpace"
"FanOfWPS"
"Fano"
"FanoBaseGenus"
"FanoBaskets"
"FanoDatabase"
"FanoGenus"
"FanoIndex"
"FanoIsolatedBaskets"
"FanoToRecord"
"FareySymbol"
"FastRoots"
"FewGenerators"
"Fibonacci"
"FibonacciGroup"
"Field"
"FieldAutomorphism"
"FieldCategory"
"FieldCharacteristic"
"FieldExponent"
"FieldMorphism"
"FieldOfDefinition"
"FieldOfFractions"
"FieldOfGeometricIrreducibility"
"FieldSize"
"FileProcess"
"FilterProcess"
"FilterVector"
"FindAsocAlgebraRep"
"FindChevalleyBasis"
"FindChevalleyBasisDiagonal"
"FindChevalleyBasisQuad"
"FindCommonEmbeddings"
"FindDependencies"
"FindEntries"
"FindFirstGenerators"
"FindGenerators"
"FindIndexes"
"FindLieAlgebra"
"FindN"
"FindPowerSeries"
"FindPowerSeriesForChabauty"
"FindRelations"
"FindRelationsInCWIFormat"
"FindSplitElement"
"FindWord"
"FindXYH"
"FineEquidimensionalDecomposition"
"FiniteAffinePlane"
"FiniteDivisor"
"FiniteField"
"FiniteLieAlgebra"
"FiniteProjectivePlane"
"FiniteSplit"
"FireCode"
"FirstCohomology"
"FirstIndexOfColumn"
"FirstIndexOfRow"
"FirstPoleElement"
"FirstWeights"
"FischerSubgroup"
"FittingGroup"
"FittingIdeal"
"FittingIdeals"
"FittingLength"
"FittingSeries"
"FittingSubgroup"
"Fix"
"FixedArc"
"FixedField"
"FixedGroup"
"FixedPoints"
"FixedSubspaceToPolyhedron"
"FlagComplex"
"Flat"
"FlatProduct"
"FlatsNullMatrix"
"Flexes"
"Flip"
"Floor"
"Flow"
"Flush"
"Form"
"FormType"
"FormalChain"
"FormalGroupHomomorphism"
"FormalGroupLaw"
"FormalLog"
"FormalPoint"
"FormalSet"
"Format"
"FourCoverPullback"
"FourDescent"
"FourToTwoCovering"
"FourierMotzkin"
"FractionalPart"
"FrattiniQuotientRank"
"FrattiniSubgroup"
"FreeAbelianGroup"
"FreeAbelianQuotient"
"FreeAlgebra"
"FreeGenerators"
"FreeGroup"
"FreeGroupIndex"
"FreeGroupIsIn"
"FreeLieAlgebra"
"FreeMonoid"
"FreeNilpotentGroup"
"FreeProduct"
"FreeResolution"
"FreeSemigroup"
"FreefValues"
"Frobenius"
"FrobeniusActionOnPoints"
"FrobeniusActionOnReducibleFiber"
"FrobeniusActionOnTrivialLattice"
"FrobeniusAutomorphism"
"FrobeniusAutomorphisms"
"FrobeniusElement"
"FrobeniusEndomorphism"
"FrobeniusForm"
"FrobeniusFormAlternating"
"FrobeniusImage"
"FrobeniusMap"
"FrobeniusPolynomial"
"FrobeniusTraceDirect"
"FrobeniusTracesToWeilPolynomials"
"FromAnalyticJacobian"
"FromLiE"
"FuchsianGroup"
"FuchsianMatrixRepresentation"
"FullCharacteristicPolynomial"
"FullCone"
"FullCorootLattice"
"FullDimension"
"FullDirichletGroup"
"FullMinimalPolynomialTest"
"FullModule"
"FullPrimaryInvariantSpaces"
"FullRootLattice"
"Function"
"FunctionDegree"
"FunctionField"
"FunctionFieldCategory"
"FunctionFieldDatabase"
"FunctionFieldDifferential"
"FunctionFieldDivisor"
"FunctionFieldPlace"
"FunctionFields"
"Functor"
"FundamentalClassGroup"
"FundamentalClassGroupStructure"
"FundamentalClassNumber"
"FundamentalClosure"
"FundamentalCoweights"
"FundamentalDiscriminant"
"FundamentalDomain"
"FundamentalElement"
"FundamentalGroup"
"FundamentalInvariants"
"FundamentalInvariantsKing"
"FundamentalKernel"
"FundamentalQuotient"
"FundamentalUnit"
"FundamentalUnits"
"FundamentalVolume"
"FundamentalWeights"
"G2"
"G2Invariants"
"G2Reduced"
"G2ToIgusaInvariants"
"GCD"
"GCDSup"
"GCLD"
"GCRD"
"GF"
"GHom"
"GHomOverCentralizingField"
"GL"
"GLB"
"GLNormaliser"
"GModule"
"GModuleAction"
"GModuleConductorOfCoefficientField"
"GModuleLinear"
"GModulePrimes"
"GO"
"GOMinus"
"GOPlus"
"GPCGroup"
"GR"
"GRBsktToRec"
"GRCrvSToRec"
"GRHBound"
"GRPtSToRec"
"GRSCode"
"GRSchToRec"
"GSShortOrbitSubset"
"GSShortSubset"
"GSet"
"GSetFromIndexed"
"GU"
"GabidulinCode"
"GallagerCode"
"GaloisActionOnLines"
"GaloisCohomology"
"GaloisConjugacyRepresentatives"
"GaloisConjugate"
"GaloisData"
"GaloisField"
"GaloisGroup"
"GaloisGroupInvariant"
"GaloisImage"
"GaloisMultiplicities"
"GaloisOrbit"
"GaloisProof"
"GaloisQuotient"
"GaloisRepresentation"
"GaloisRing"
"GaloisRoot"
"GaloisSplittingField"
"GaloisSubfieldTower"
"GaloisSubgroup"
"Gamma"
"Gamma0"
"Gamma1"
"GammaAction"
"GammaActionOnSimples"
"GammaCorootSpace"
"GammaD"
"GammaFactors"
"GammaGroup"
"GammaOrbitOnRoots"
"GammaOrbitsOnRoots"
"GammaOrbitsRepresentatives"
"GammaRootSpace"
"GammaUpper0"
"GammaUpper1"
"GapNumbers"
"GaussNumber"
"GaussReduce"
"GaussReduceGram"
"GaussSum"
"GaussianBinomial"
"GaussianFactorial"
"GaussianIntegerRing"
"GaussianIntegers"
"Gcd"
"GcdSup"
"GcdWithLoss"
"GegenbauerPolynomial"
"GenCrvGrpData"
"GenModuleProject"
"GeneralLinearGroup"
"GeneralOrthogonalGroup"
"GeneralOrthogonalGroupMinus"
"GeneralOrthogonalGroupPlus"
"GeneralReeTorusElement"
"GeneralUnitaryGroup"
"GeneralisedEquationOrder"
"GeneralisedNorm"
"GeneralisedRowReduction"
"GeneralisedWallForm"
"GeneralizedAGCode"
"GeneralizedAlgebraicGeometricCode"
"GeneralizedFibonacciNumber"
"GeneralizedNorm"
"GeneralizedSrivastavaCode"
"GenerateGraphs"
"GeneratepGroups"
"GeneratingPolynomial"
"GeneratingSet"
"GeneratingSubfields"
"GeneratingSubfieldsLattice"
"GeneratingWords"
"Generator"
"GeneratorMatrix"
"GeneratorNumber"
"GeneratorOrder"
"GeneratorPolynomial"
"GeneratorStructure"
"Generators"
"GeneratorsOverBaseRing"
"GeneratorsSequence"
"GeneratorsSequenceOverBaseRing"
"Generic"
"GenericAbelianGroup"
"GenericDatabase"
"GenericGenus"
"GenericGroup"
"GenericModel"
"GenericPoint"
"GenericPolynomial"
"Genus"
"GenusContribution"
"GenusDistribution"
"GenusField"
"GenusOneModel"
"GenusRepresentatives"
"GenusX0N"
"GenusX0NQuotient"
"GenusX1N"
"Geodesic"
"GeodesicExists"
"Geodesics"
"GeodesicsIntersection"
"GeometricAutomorphismGroup"
"GeometricAutomorphismGroupClassification"
"GeometricGenus"
"GeometricGenusOfDesingularization"
"GeometricGenusUsingToricGeometry"
"GeometricMordellWeilLattice"
"GeometricPicardGroup"
"GeometricSupport"
"GeometricTorsionBound"
"Germ"
"GetAssertions"
"GetAttributes"
"GetAutoColumns"
"GetAutoCompact"
"GetBeep"
"GetBraidRelations"
"GetCells"
"GetChild"
"GetChildren"
"GetClassGroupBoundFactorBasis"
"GetClassGroupBoundGenerators"
"GetColumns"
"GetConicSubfieldMethodDegreeBound"
"GetCurrentDirectory"
"GetDefaultRealField"
"GetEchoInput"
"GetElementPrintFormat"
"GetEnv"
"GetEnvironmentValue"
"GetEvaluationComparison"
"GetForceCFP"
"GetHelpExternalBrowser"
"GetHelpExternalSystem"
"GetHelpUseExternal"
"GetHistorySize"
"GetIgnoreEof"
"GetIgnorePrompt"
"GetIgnoreSpaces"
"GetIloadAllowEsc"
"GetIndent"
"GetIntegerNewtonPolygon"
"GetIntrinsicName"
"GetKantPrecision"
"GetKaratsubaThreshold"
"GetLibraries"
"GetLibraryRoot"
"GetLineEditor"
"GetMPCVersion"
"GetMPFRVersion"
"GetMS"
"GetMaximumMemoryUsage"
"GetMemoryExtensionSize"
"GetMemoryLimit"
"GetMemoryUsage"
"GetModule"
"GetModules"
"GetMonoidNewtonPolygon"
"GetNthreads"
"GetParent"
"GetPath"
"GetPrecision"
"GetPresentation"
"GetPreviousSize"
"GetPrimes"
"GetPrintLevel"
"GetPrompt"
"GetQuotient"
"GetRep"
"GetRows"
"GetSeed"
"GetShellCompletion"
"GetShowPromptAlways"
"GetStoredFactors"
"GetTempDir"
"GetTraceback"
"GetTransGroupIDMany"
"GetUserProcessData"
"GetVerbose"
"GetVersion"
"GetViMode"
"Getc"
"Getpid"
"Gets"
"Getuid"
"Getvecs"
"GewirtzGraph"
"GilbertVarshamovAsymptoticBound"
"GilbertVarshamovBound"
"GilbertVarshamovLinearBound"
"Girth"
"GirthCycle"
"GlobalSectionSubmodule"
"GlobalUnitGroup"
"Glue"
"GoethalsCode"
"GoethalsDelsarteCode"
"GolayCode"
"GolayCodeZ4"
"GoodBasePoints"
"GoodDescription"
"GoodLDPCEnsemble"
"GoppaCode"
"GoppaDesignedDistance"
"GorensteinClosure"
"GorensteinIndex"
"Graded"
"GradedBettiTable"
"GradedCokernel"
"GradedCommutativeRing"
"GradedCone"
"GradedDirectSum"
"GradedDual"
"GradedDualComplex"
"GradedDualWithHoms"
"GradedFreeModule"
"GradedHoms"
"GradedIdentityMap"
"GradedImage"
"GradedKernel"
"GradedMinimalFreeResolution"
"GradedModule"
"GradedRingData"
"GradedRingDatabase"
"GradedToricLattice"
"GradientVector"
"GradientVectors"
"Grading"
"Gradings"
"GramIsomorphismInvariants"
"GramLength"
"GramMatrix"
"GramReduction"
"GramSchmidtProcess"
"GramSchmidtReduce"
"GramSchmidtReduction"
"Graph"
"GraphAutomorphism"
"GraphInBytes"
"GraphSizeInBytes"
"Graphs"
"GrayMap"
"GrayMapImage"
"GreatestCommonDivisor"
"GreatestCommonLeftDivisor"
"GreatestCommonRightDivisor"
"GreatestLowerBound"
"GriesmerBound"
"GriesmerLengthBound"
"GriesmerMinimumWeightBound"
"Groebner"
"GroebnerBasis"
"GroebnerBasisUnreduced"
"GroebnerWalk"
"GrossenCheck"
"Grossencharacter"
"GroundField"
"Group"
"GroupAlgebra"
"GroupAlgebraAsStarAlgebra"
"GroupData"
"GroupGenerators"
"GroupIdeal"
"GroupOfLieType"
"GroupOfLieTypeFactoredOrder"
"GroupOfLieTypeHomomorphism"
"GroupOfLieTypeOrder"
"GroupType"
"Groupsp7"
"GrowthFunction"
"GrowthFunctionDFA"
"GrowthFunctionOld"
"GrpFPToCox"
"GrpPermToCox"
"GuessAltsymDegree"
"H2_G_A"
"H2_G_QmodZ"
"HBChevalleyGroupOrder"
"HBClassicalGroupOrder"
"HBinomial"
"HKZ"
"HKZGram"
"HadamardAutomorphismGroup"
"HadamardCanonicalForm"
"HadamardCodeZ4"
"HadamardColumnDesign"
"HadamardDatabase"
"HadamardDatabaseInformation"
"HadamardDatabaseInformationEmpty"
"HadamardEltseq"
"HadamardGraph"
"HadamardInvariant"
"HadamardMatrixFromInteger"
"HadamardMatrixToInteger"
"HadamardNormalize"
"HadamardRowDesign"
"HadamardTransformation"
"HalfIntegralWeightForms"
"HalfspaceToPolyhedron"
"HallSubgroup"
"HamiltonianLieAlgebra"
"HammingAsymptoticBound"
"HammingCode"
"HammingWeightEnumerator"
"HarmonicNumber"
"HasAdditionAlgorithm"
"HasAffinePatch"
"HasAllPQuotientsMetacyclic"
"HasAllRootsOnUnitCircle"
"HasAlmostUniqueLocalParametrization"
"HasAlmostUniqueLocalUniformizer"
"HasAssociatedNewSpace"
"HasAttribute"
"HasAutomorphisms"
"HasBSGS"
"HasBaseExtension"
"HasBaseExtensionMorphisms"
"HasBlockDiagMat"
"HasC6Decomposition"
"HasCM"
"HasClique"
"HasClosedCosetTable"
"HasCoercion"
"HasComplement"
"HasCompleteCosetTable"
"HasComplexConjugate"
"HasComplexMultiplication"
"HasComposition"
"HasCompositionSequence"
"HasCompositionTree"
"HasComputableAbelianQuotient"
"HasComputableLCS"
"HasComputableSubgroups"
"HasConic"
"HasCoordinates"
"HasDecomposition"
"HasDefinedModuleMap"
"HasDefinedTerm"
"HasDefiningMap"
"HasDenseAndSparseRep"
"HasDenseRep"
"HasDenseRepOnly"
"HasEasyIdeal"
"HasEchelonForm"
"HasElementaryBasis"
"HasEmbedding"
"HasExtension"
"HasFactorisation"
"HasFactorization"
"HasFiniteAQ"
"HasFiniteAbelianQuotient"
"HasFiniteDimension"
"HasFiniteKernel"
"HasFiniteOrder"
"HasFixedBaseObject"
"HasFrobeniusEndomorphism"
"HasFunctionField"
"HasGCD"
"HasGNB"
"HasGrevlexOrder"
"HasGroebnerBasis"
"HasHomogeneousBasis"
"HasIdentity"
"HasImage"
"HasInclusion"
"HasIndexOne"
"HasIndexOneEverywhereLocally"
"HasInfiniteComputableAbelianQuotient"
"HasInfinitePSL2Quotient"
"HasIntegralPoint"
"HasIntersectionProperty"
"HasIntersectionPropertyN"
"HasInverse"
"HasIrregularFibres"
"HasIsomorphismExtension"
"HasIsomorphismExtensions"
"HasIsomorphisms"
"HasIsotropicVector"
"HasKnownInverse"
"HasLeviSubalgebra"
"HasLine"
"HasLinearGrayMapImage"
"HasMatrix"
"HasMonomialBasis"
"HasMorphism"
"HasMorphismAutomorphism"
"HasMorphismAutomorphisms"
"HasMorphismFromImages"
"HasMorphismFromImagesAndBaseMorphism"
"HasMultiplicityOne"
"HasNegativeWeightCycle"
"HasNonSingularFibres"
"HasNonsingularPoint"
"HasOddDegreeModel"
"HasOne"
"HasOnlyOrdinarySingularities"
"HasOnlyOrdinarySingularitiesMonteCarlo"
"HasOrder"
"HasOutputFile"
"HasPRoot"
"HasParallelClass"
"HasParallelism"
"HasPlace"
"HasPoint"
"HasPointsEverywhereLocally"
"HasPointsOverExtension"
"HasPolynomial"
"HasPolynomialFactorization"
"HasPolynomialGroebnerBasis"
"HasPolynomialResultant"
"HasPowerSumBasis"
"HasPreimage"
"HasPreimageFunction"
"HasProjectiveDerivation"
"HasRandomPlace"
"HasRationalPoint"
"HasRationalPointUsingSubfield"
"HasRationalSolutions"
"HasReducedFibres"
"HasResolution"
"HasRestriction"
"HasResultant"
"HasRightCancellation"
"HasRoot"
"HasRootOfUnity"
"HasSchurBasis"
"HasSignature"
"HasSingularPointsOverExtension"
"HasSingularVector"
"HasSparseRep"
"HasSparseRepOnly"
"HasSquareSha"
"HasSupplement"
"HasTwistedHopfStructure"
"HasValidCosetTable"
"HasValidIndex"
"HasWeakIntersectionProperty"
"HasZeroDerivation"
"Hash"
"HasseMinkowskiInvariant"
"HasseMinkowskiInvariants"
"HasseWittInvariant"
"HeckeAlgebra"
"HeckeAlgebraFields"
"HeckeAlgebraZBasis"
"HeckeBound"
"HeckeCharacter"
"HeckeCharacterGroup"
"HeckeCharacteristicPolynomial"
"HeckeCorrespondence"
"HeckeEigenvalue"
"HeckeEigenvalueBound"
"HeckeEigenvalueField"
"HeckeEigenvalueRing"
"HeckeFieldSpan"
"HeckeImages"
"HeckeImagesAll"
"HeckeLift"
"HeckeMatrix"
"HeckeMatrixBianchi"
"HeckeOperator"
"HeckeOperatorModSym"
"HeckePolynomial"
"HeckeSpan"
"HeckeTrace"
"HeegnerDiscriminants"
"HeegnerForms"
"HeegnerIndex"
"HeegnerPoint"
"HeegnerPointNumberOfTerms"
"HeegnerPoints"
"HeegnerTorsionElement"
"Height"
"HeightConstant"
"HeightOnAmbient"
"HeightPairing"
"HeightPairingLattice"
"HeightPairingMatrix"
"HeightZeroSublattice"
"HeilbronnCremona"
"HeilbronnMerel"
"HenselLift"
"HenselProcess"
"HermiteConstant"
"HermiteForm"
"HermiteNormalFormProcess"
"HermiteNormalForms"
"HermiteNumber"
"HermitePolynomial"
"HermitianAutomorphismGroup"
"HermitianCartanMatrix"
"HermitianCode"
"HermitianCurve"
"HermitianDual"
"HermitianFunctionField"
"HermitianTranspose"
"HesseCovariants"
"HesseModel"
"HessePolynomials"
"HessenbergForm"
"Hessian"
"HessianMatrix"
"Hexacode"
"HighMap"
"HighProduct"
"HighRankExceptionalStdGens"
"HighestCoroot"
"HighestLongCoroot"
"HighestLongRoot"
"HighestRoot"
"HighestShortCoroot"
"HighestShortRoot"
"HighestWeightModule"
"HighestWeightRepresentation"
"HighestWeightSpace"
"HighestWeightVectors"
"HighestWeights"
"HighestWeightsAndVectors"
"Hilbert90"
"HilbertBasis"
"HilbertCharacterSubgroup"
"HilbertClassField"
"HilbertClassPolynomial"
"HilbertCoefficient"
"HilbertCoefficients"
"HilbertCuspForms"
"HilbertDeltaVector"
"HilbertDenominator"
"HilbertFunction"
"HilbertGroebnerBasis"
"HilbertIdeal"
"HilbertMatrix"
"HilbertNumerator"
"HilbertNumeratorBettiNumbers"
"HilbertPolynomial"
"HilbertPolynomialOfCurve"
"HilbertSeries"
"HilbertSeriesApproximation"
"HilbertSeriesMultipliedByMinimalDenominator"
"HilbertSpace"
"HilbertSymbol"
"HirschNumber"
"HirzebruchSurface"
"Holes"
"Holomorph"
"Hom"
"HomAdjoints"
"HomGenerators"
"HomogeneousBlock"
"HomogeneousComponent"
"HomogeneousComponents"
"HomogeneousModuleTest"
"HomogeneousModuleTestBasis"
"HomogeneousRadical"
"HomogeneousToElementaryMatrix"
"HomogeneousToMonomialMatrix"
"HomogeneousToPowerSumMatrix"
"HomogeneousToSchurMatrix"
"Homogenization"
"HomologicalDimension"
"Homology"
"HomologyBasis"
"HomologyData"
"HomologyGenerators"
"HomologyGroup"
"HomologyOfChainComplex"
"Homomorphism"
"Homomorphisms"
"HomomorphismsLM"
"HomomorphismsProcess"
"HookLength"
"HorizontalFunction"
"HorizontalJoin"
"HorizontalVertices"
"Hull"
"HyperbolicBasis"
"HyperbolicCoxeterGraph"
"HyperbolicCoxeterMatrix"
"HyperbolicPair"
"HyperbolicSplitting"
"Hypercenter"
"Hypercentre"
"HyperellipticCurve"
"HyperellipticCurveFromG2Invariants"
"HyperellipticCurveFromIgusaClebsch"
"HyperellipticCurveOfGenus"
"HyperellipticInfiniteIntegral0"
"HyperellipticIntegral"
"HyperellipticInvolution"
"HyperellipticPolynomial"
"HyperellipticPolynomials"
"HypergeometricSeries"
"HypergeometricSeries2F1"
"HypergeometricU"
"Hyperplane"
"HyperplaneAtInfinity"
"HyperplaneSectionDivisor"
"HyperplaneToPolyhedron"
"ISA"
"ISABaseField"
"Id"
"IdDataNLAC"
"IdDataSLAC"
"Ideal"
"IdealFactorisation"
"IdealOfSupport"
"IdealQuotient"
"IdealWithFixedBasis"
"Idealiser"
"Idealizer"
"Ideals"
"IdealsAreEqual"
"IdealsUpTo"
"Idempotent"
"IdempotentActionGenerators"
"IdempotentGenerators"
"IdempotentPositions"
"Idempotents"
"IdenticalAmbientSpace"
"IdentificationNumber"
"IdentifyAlmostSimpleGroup"
"IdentifyGroup"
"IdentifyOneCocycle"
"IdentifyTwoCocycle"
"IdentifyZeroCocycle"
"Identity"
"IdentityAutomorphism"
"IdentityFieldMorphism"
"IdentityHomomorphism"
"IdentityIsogeny"
"IdentityMap"
"IdentityMatrix"
"IdentityMorphism"
"IdentitySparseMatrix"
"IdentityTransformation"
"IgusaClebschInvariants"
"IgusaClebschToClebsch"
"IgusaInvariants"
"IgusaToG2Invariants"
"IharaBound"
"Ilog"
"Ilog2"
"Im"
"Image"
"ImageBasis"
"ImageFan"
"ImageFromMat"
"ImageFunction"
"ImageOfComponentGroupOfJ0N"
"ImageSystem"
"ImageWithBasis"
"Imaginary"
"ImplicitFunction"
"Implicitization"
"ImportExternalMorphism"
"ImprimitiveAction"
"ImprimitiveBasis"
"ImprimitiveReflectionGroup"
"ImprimitiveReflectionGroupOld"
"ImproveAutomorphismGroup"
"InDegree"
"InEdge"
"InNeighbors"
"InNeighbours"
"IncidenceDigraph"
"IncidenceGeometry"
"IncidenceGraph"
"IncidenceMatrix"
"IncidenceStructure"
"IncidentEdges"
"Include"
"IncludeAutomorphism"
"IncludeWeight"
"InclusionMap"
"Inclusions"
"IndCond"
"IndecomposableSummands"
"IndentPop"
"IndentPush"
"IndependenceNumber"
"IndependentGenerators"
"IndependentUnits"
"IndeterminacyLocus"
"Index"
"IndexCalculus"
"IndexCalculusMatrix"
"IndexFormEquation"
"IndexOfFirstWhiteSpace"
"IndexOfNonWhiteSpace"
"IndexOfPartition"
"IndexOfSpeciality"
"IndexToElement"
"IndexedCoset"
"IndexedSet"
"IndexedSetToSequence"
"IndexedSetToSet"
"Indicator"
"Indices"
"IndicialPolynomial"
"IndivisibleSubdatum"
"IndivisibleSubsystem"
"InduceWG"
"InduceWGtable"
"InducedAutomorphism"
"InducedDivisorMap"
"InducedDivisorMap_old"
"InducedGammaGroup"
"InducedMap"
"InducedMapOnHomology"
"InducedOneCocycle"
"InducedPermutation"
"Induction"
"InductionCondensation"
"InductionSpin"
"IneffectiveDivisorToSheaf"
"IneffectivePossibilities"
"IneffectiveRiemannRochBasis"
"IneffectiveSubcanonicalCurves"
"Inequalities"
"InertiaDegree"
"InertiaField"
"InertiaGroup"
"InertialElement"
"Infimum"
"InfiniteDivisor"
"InfiniteOrderTest"
"InfinitePart"
"InfinitePlaces"
"InfinitePolynomial"
"InfiniteSum"
"Infinity"
"InflationMap"
"InflationMapImage"
"InflectionPoints"
"InformationRate"
"InformationSet"
"InformationSpace"
"InitProspector"
"InitialCoefficients"
"InitialVertex"
"InitialiseProspector"
"Initialize"
"InitializeBase"
"InitializeEvaluation"
"InitializeGaussianQuadrature"
"Injection"
"Injections"
"InjectiveHull"
"InjectiveModule"
"InjectiveResolution"
"InjectiveSyzygyModule"
"InnerAutomorphism"
"InnerAutomorphismGroup"
"InnerFaces"
"InnerGenerators"
"InnerNormal"
"InnerNormals"
"InnerProduct"
"InnerProductMatrix"
"InnerShape"
"InnerSlopes"
"InnerTwistOperator"
"InnerTwists"
"InnerVertices"
"InseparableDegree"
"Insert"
"InsertBasePoint"
"InsertBlock"
"InsertVertex"
"InstallInverseConstructor"
"Instance"
"InstancesForDimensions"
"IntegerMatrixEntryBound"
"IntegerRelation"
"IntegerRing"
"IntegerSolutionVariables"
"IntegerToSequence"
"IntegerToString"
"Integers"
"Integral"
"IntegralBasis"
"IntegralBasisLattice"
"IntegralBasisMinus"
"IntegralBasisPlus"
"IntegralClosure"
"IntegralDecomposition"
"IntegralGramMatrix"
"IntegralGroup"
"IntegralHeckeOperator"
"IntegralHomology"
"IntegralMapping"
"IntegralMatrix"
"IntegralMatrixByRows"
"IntegralMatrixGroupDatabase"
"IntegralMatrixOverQ"
"IntegralModel"
"IntegralModule"
"IntegralMultiple"
"IntegralNormEquation"
"IntegralPart"
"IntegralPoints"
"IntegralQuarticPoints"
"IntegralRepresentation"
"IntegralSplit"
"IntegralUEA"
"IntegralUEAlgebra"
"IntegralUniversalEnvelopingAlgebra"
"IntegralVector"
"Interior"
"InteriorPoints"
"Interpolation"
"IntersectKernels"
"Intersection"
"IntersectionArray"
"IntersectionCardinality"
"IntersectionForm"
"IntersectionForms"
"IntersectionGroup"
"IntersectionMatrix"
"IntersectionNumber"
"IntersectionOfImages"
"IntersectionPairing"
"IntersectionPairingIntegral"
"IntersectionPoints"
"IntersectionWithNormalSubgroup"
"IntersectionZBasis"
"Intseq"
"InvHom"
"InvariantBasis"
"InvariantFactors"
"InvariantField"
"InvariantForm"
"InvariantForms"
"InvariantHermitianForms"
"InvariantModule"
"InvariantQuaternionicForms"
"InvariantRepresentation"
"InvariantRing"
"Invariants"
"InvariantsMetacyclicPGroup"
"InvariantsOfDegree"
"Inverse"
"InverseDefiningPolynomials"
"InverseErf"
"InverseJeuDeTaquin"
"InverseKrawchouk"
"InverseMattsonSolomonTransform"
"InverseMod"
"InverseRSKCorrespondenceDoubleWord"
"InverseRSKCorrespondenceMatrix"
"InverseRSKCorrespondenceSingleWord"
"InverseRoot"
"InverseRowInsert"
"InverseSqrt"
"InverseSquareRoot"
"InverseTransformation"
"InverseWordMap"
"Involution"
"InvolutionClassicalGroupEven"
"Iroot"
"IrrationalPart"
"IrreducibleCartanMatrix"
"IrreducibleComponents"
"IrreducibleCoxeterGraph"
"IrreducibleCoxeterGroup"
"IrreducibleCoxeterMatrix"
"IrreducibleDynkinDigraph"
"IrreducibleFiniteStandardParabolicSubgroups"
"IrreducibleLowTermGF2Polynomial"
"IrreducibleMatrix"
"IrreducibleMatrixGroup"
"IrreducibleModule"
"IrreducibleModules"
"IrreducibleModulesBurnside"
"IrreducibleModulesInit"
"IrreducibleModulesSchur"
"IrreduciblePolynomial"
"IrreducibleReflectionGroup"
"IrreducibleRepresentationsInit"
"IrreducibleRepresentationsSchur"
"IrreducibleRootDatum"
"IrreducibleRootSystem"
"IrreducibleSecondaryInvariants"
"IrreducibleSimpleSubalgebraTreeSU"
"IrreducibleSimpleSubalgebrasOfSU"
"IrreducibleSolubleSubgroups"
"IrreducibleSparseGF2Polynomial"
"IrreducibleSubgroups"
"IrreducibleTrinomialsDatabase"
"IrreducibleWord"
"IrregularLDPCEnsemble"
"IrregularValues"
"IrregularVertices"
"Irregularity"
"IrrelevantComponents"
"IrrelevantGenerators"
"IrrelevantIdeal"
"Is2T1"
"IsAModule"
"IsAPN"
"IsAbelian"
"IsAbelianByFinite"
"IsAbelianVariety"
"IsAbsoluteField"
"IsAbsoluteOrder"
"IsAbsolutelyIrreducible"
"IsAbstractCartanMatrix"
"IsAcceptedWordDFA"
"IsAdditive"
"IsAdditiveOrder"
"IsAdditiveProjective"
"IsAdjoint"
"IsAffine"
"IsAffineLinear"
"IsAlgebraic"
"IsAlgebraicDifferentialField"
"IsAlgebraicField"
"IsAlgebraicGeometric"
"IsAlgebraicallyDependent"
"IsAlgebraicallyIsomorphic"
"IsAlmostIntegral"
"IsAlternating"
"IsAltsym"
"IsAmbient"
"IsAmbientSpace"
"IsAmple"
"IsAnalyticallyIrreducible"
"IsAnisotropic"
"IsAnticanonical"
"IsAntisymmetric"
"IsArc"
"IsArithmeticWeight"
"IsArithmeticallyCohenMacaulay"
"IsAssociative"
"IsAttachedToModularSymbols"
"IsAttachedToNewform"
"IsAutomatic"
"IsAutomaticGroup"
"IsAutomorphism"
"IsBalanced"
"IsBase64Encoded"
"IsBasePointFree"
"IsBiconnected"
"IsBig"
"IsBijective"
"IsBipartite"
"IsBlock"
"IsBlockTransitive"
"IsBogomolovUnstable"
"IsBoundary"
"IsBravaisEquivalent"
"IsCM"
"IsCalabiYauNumericalSeries"
"IsCanonical"
"IsCanonicalWithTwist"
"IsCapacitated"
"IsCartanEquivalent"
"IsCartanMatrix"
"IsCartanSubalgebra"
"IsCartier"
"IsCategory"
"IsCentral"
"IsCentralByFinite"
"IsCentralCollineation"
"IsChainMap"
"IsCharacter"
"IsChevalleyBasis"
"IsClassicalType"
"IsCluster"
"IsCoercible"
"IsCoercibleGrpLie"
"IsCohenMacaulay"
"IsCokernelTorsionFree"
"IsCollinear"
"IsCommutative"
"IsCompactHyperbolic"
"IsCompatible"
"IsComplete"
"IsCompletelyReducible"
"IsComplex"
"IsComponent"
"IsConcurrent"
"IsConditioned"
"IsConfluent"
"IsCongruence"
"IsCongruent"
"IsConic"
"IsConjugate"
"IsConjugateSubgroup"
"IsConnected"
"IsConnectedFibre"
"IsConsistent"
"IsConstaCyclic"
"IsConstant"
"IsConstantCurve"
"IsConway"
"IsCoprime"
"IsCorootSpace"
"IsCoxeterAffine"
"IsCoxeterCompactHyperbolic"
"IsCoxeterFinite"
"IsCoxeterGraph"
"IsCoxeterHyperbolic"
"IsCoxeterIrreducible"
"IsCoxeterIsomorphic"
"IsCoxeterMatrix"
"IsCrystallographic"
"IsCubeHeuristically"
"IsCubicModel"
"IsCurve"
"IsCusp"
"IsCuspidal"
"IsCuspidalNewform"
"IsCyclic"
"IsCyclotomic"
"IsCyclotomicPolynomial"
"IsDecomposable"
"IsDefault"
"IsDeficient"
"IsDefined"
"IsDefinedByQuadric"
"IsDefinedByQuadrics"
"IsDefinite"
"IsDegenerate"
"IsDelPezzo"
"IsDenselyRepresented"
"IsDesarguesian"
"IsDesign"
"IsDiagonal"
"IsDifferenceSet"
"IsDifferentialField"
"IsDifferentialIdeal"
"IsDifferentialLaurentSeriesRing"
"IsDifferentialOperatorRing"
"IsDifferentialRing"
"IsDifferentialRingElement"
"IsDifferentialSeriesRing"
"IsDirectSum"
"IsDirectSummand"
"IsDirected"
"IsDiscriminant"
"IsDisjoint"
"IsDistanceRegular"
"IsDistanceTransitive"
"IsDivisible"
"IsDivisibleBy"
"IsDivisionAlgebra"
"IsDivisionRing"
"IsDivisorialContraction"
"IsDomain"
"IsDominant"
"IsDoublePoint"
"IsDoublyEven"
"IsDualComputable"
"IsDynkinDigraph"
"IsEdgeCapacitated"
"IsEdgeLabelled"
"IsEdgeTransitive"
"IsEdgeWeighted"
"IsEffective"
"IsEichler"
"IsEigenform"
"IsEisenstein"
"IsEisensteinSeries"
"IsElementaryAbelian"
"IsEllipticCurve"
"IsEllipticWeierstrass"
"IsEmbedded"
"IsEmpty"
"IsEmptySimpleQuotientProcess"
"IsEmptyWord"
"IsEndomorphism"
"IsEof"
"IsEqual"
"IsEquationOrder"
"IsEquidistant"
"IsEquitable"
"IsEquivalent"
"IsEuclideanDomain"
"IsEuclideanRing"
"IsEulerian"
"IsEven"
"IsExact"
"IsExactlyDivisible"
"IsExceptionalUnit"
"IsExport"
"IsExtensionCategory"
"IsExtensionOf"
"IsExtraSpecial"
"IsExtraSpecialNormaliser"
"IsFTGeometry"
"IsFace"
"IsFactorial"
"IsFactorisationPrime"
"IsFaithful"
"IsFakeWeightedProjectiveSpace"
"IsFanMap"
"IsFano"
"IsField"
"IsFieldCategory"
"IsFinite"
"IsFiniteOrder"
"IsFirm"
"IsFixedAtLevel"
"IsFlag"
"IsFlex"
"IsFlexFast"
"IsFlipping"
"IsForest"
"IsFree"
"IsFrobenius"
"IsFuchsianOperator"
"IsFull"
"IsFunctionFieldCategory"
"IsFunctor"
"IsFundamental"
"IsFundamentalDiscriminant"
"IsGE"
"IsGHom"
"IsGL2Equivalent"
"IsGLConjugate"
"IsGLConjugateBigClassical"
"IsGLConjugateClassical"
"IsGLConjugateExtraspecial"
"IsGLConjugateImprimitive"
"IsGLConjugateReducible"
"IsGLConjugateSemilinear"
"IsGLConjugateSubfield"
"IsGLConjugateTensor"
"IsGLConjugateTensorInduced"
"IsGLQConjugate"
"IsGLZConjugate"
"IsGLattice"
"IsGamma"
"IsGamma0"
"IsGamma1"
"IsGammaUpper0"
"IsGammaUpper1"
"IsGe"
"IsGeneralizedCartanMatrix"
"IsGeneralizedCharacter"
"IsGenuineWeightedDynkinDiagram"
"IsGenus"
"IsGenusComputable"
"IsGenusOneModel"
"IsGeometricallyHyperelliptic"
"IsGerm"
"IsGlobal"
"IsGlobalUnit"
"IsGlobalUnitWithPreimage"
"IsGloballySplit"
"IsGorenstein"
"IsGorensteinSurface"
"IsGraded"
"IsGraph"
"IsGroebner"
"IsHadamard"
"IsHadamardEquivalent"
"IsHadamardEquivalentLeon"
"IsHeckeAlgebra"
"IsHeckeOperator"
"IsHereditary"
"IsHilbertNumerator"
"IsHolzerReduced"
"IsHomeomorphic"
"IsHomogeneous"
"IsHomomorphism"
"IsHyperbolic"
"IsHyperelliptic"
"IsHyperellipticCurve"
"IsHyperellipticCurveOfGenus"
"IsHyperellipticWeierstrass"
"IsHyperplane"
"IsHypersurface"
"IsHypersurfaceDivisor"
"IsId"
"IsIdeal"
"IsIdempotent"
"IsIdentical"
"IsIdenticalPresentation"
"IsIdentity"
"IsIdentityProduct"
"IsInArtinSchreierRepresentation"
"IsInBasicOrbit"
"IsInCorootSpace"
"IsInDual"
"IsInImage"
"IsInInterior"
"IsInKummerRepresentation"
"IsInRadical"
"IsInRootSpace"
"IsInSecantVariety"
"IsInSmallGroupDatabase"
"IsInSmallModularCurveDatabase"
"IsInSupport"
"IsInTangentVariety"
"IsInTwistedForm"
"IsIndecomposable"
"IsIndefinite"
"IsIndependent"
"IsIndivisibleRoot"
"IsInduced"
"IsInert"
"IsInertial"
"IsInfinite"
"IsInflectionPoint"
"IsInjective"
"IsInner"
"IsInnerAutomorphism"
"IsInt"
"IsInteger"
"IsIntegral"
"IsIntegralDomain"
"IsIntegralModel"
"IsIntegrallyClosed"
"IsInterior"
"IsIntrinsic"
"IsInvariant"
"IsInvertible"
"IsIrreducible"
"IsIrreducibleFiniteNilpotent"
"IsIrregularSingularPlace"
"IsIsogenous"
"IsIsogenousPeriodMatrices"
"IsIsogeny"
"IsIsolated"
"IsIsometric"
"IsIsometry"
"IsIsomorphic"
"IsIsomorphicBigPeriodMatrices"
"IsIsomorphicCubicSurface"
"IsIsomorphicFF"
"IsIsomorphicOverBase"
"IsIsomorphicOverQt"
"IsIsomorphicPGroups"
"IsIsomorphicSmallPeriodMatrices"
"IsIsomorphicWithTwist"
"IsIsomorphism"
"IsJacobianPencil"
"IsKEdgeConnected"
"IsKVertexConnected"
"IsKnownIsomorphic"
"IsKnuthEquivalent"
"IsLDPC"
"IsLE"
"IsLabelled"
"IsLabelledEdge"
"IsLabelledVertex"
"IsLargeReeGroup"
"IsLaurent"
"IsLe"
"IsLeaf"
"IsLeftIdeal"
"IsLeftIsomorphic"
"IsLeftModule"
"IsLehmerCode"
"IsLexicographicallyOrdered"
"IsLie"
"IsLineRegular"
"IsLineTransitive"
"IsLinear"
"IsLinearGroup"
"IsLinearScheme"
"IsLinearSpace"
"IsLinearSystemNonEmpty"
"IsLinearlyDependent"
"IsLinearlyEquivalent"
"IsLinearlyEquivalentToCartier"
"IsLinearlyIndependent"
"IsLittleWoodRichardson"
"IsLocal"
"IsLocalNorm"
"IsLocallyFree"
"IsLocallySoluble"
"IsLocallySolvable"
"IsLocallyTwoTransitive"
"IsLongRoot"
"IsLowerTriangular"
"IsMDS"
"IsMagmaEuclideanRing"
"IsMatrixRing"
"IsMaximal"
"IsMaximalAtRamifiedPrimes"
"IsMaximisingFunction"
"IsMaximumDimensional"
"IsMaximumDistanceSeparable"
"IsMemberBasicOrbit"
"IsMetacyclicPGroup"
"IsMinimal"
"IsMinimalModel"
"IsMinimalTwist"
"IsMinusOne"
"IsMinusQuotient"
"IsMixed"
"IsMobile"
"IsModular"
"IsModularCurve"
"IsModuleHomomorphism"
"IsMonic"
"IsMonomial"
"IsMonomialIsomorphic"
"IsMonomialRepresentation"
"IsMoriFibreSpace"
"IsMorphism"
"IsMorphismCategory"
"IsMultiChar"
"IsNarrowlyPrincipal"
"IsNearLinearSpace"
"IsNearlyPerfect"
"IsNeat"
"IsNef"
"IsNefAndBig"
"IsNegative"
"IsNegativeDefinite"
"IsNegativeSemiDefinite"
"IsNew"
"IsNewform"
"IsNewtonPolygonOf"
"IsNilpotent"
"IsNilpotentByFinite"
"IsNodalCurve"
"IsNode"
"IsNonSingular"
"IsNonsingular"
"IsNorm"
"IsNormal"
"IsNormalised"
"IsNormalising"
"IsNormalized"
"IsNull"
"IsNullHomotopy"
"IsNumberField"
"IsObject"
"IsOdd"
"IsOddDegree"
"IsOnBoundary"
"IsOne"
"IsOneCoboundary"
"IsOneCocycle"
"IsOnlyMotivic"
"IsOptimal"
"IsOrbit"
"IsOrder"
"IsOrderTerm"
"IsOrdered"
"IsOrdinary"
"IsOrdinaryProjective"
"IsOrdinarySingularity"
"IsOrthogonalGroup"
"IsOuter"
"IsOverQ"
"IsOverSmallerField"
"IsPID"
"IsPIR"
"IsPRI"
"IsPSaturated"
"IsParabolic"
"IsParallel"
"IsParallelClass"
"IsParallelWeight"
"IsParallelism"
"IsPartialRoot"
"IsPartition"
"IsPartitionRefined"
"IsPath"
"IsPerfect"
"IsPerfectlyCentered"
"IsPermutationModule"
"IsPlanar"
"IsPlaneCurve"
"IsPlusQuotient"
"IsPoint"
"IsPointRegular"
"IsPointTransitive"
"IsPointed"
"IsPolycyclic"
"IsPolycyclicByFinite"
"IsPolygon"
"IsPolynomial"
"IsPolytope"
"IsPositive"
"IsPositiveDefinite"
"IsPositiveSemiDefinite"
"IsPower"
"IsPowerOf"
"IsPrimary"
"IsPrime"
"IsPrimeCertificate"
"IsPrimeField"
"IsPrimePower"
"IsPrimitive"
"IsPrimitiveFiniteNilpotent"
"IsPrincipal"
"IsPrincipalIdealDomain"
"IsPrincipalIdealRing"
"IsPrincipalSeries"
"IsProbablePrime"
"IsProbablyMaximal"
"IsProbablyPerfect"
"IsProbablyPermutationPolynomial"
"IsProbablyPrime"
"IsProbablySupersingular"
"IsProductOfParallelDescendingCycles"
"IsProjective"
"IsProjectivelyIrreducible"
"IsProper"
"IsProperChainMap"
"IsProportional"
"IsPseudoReflection"
"IsPure"
"IsPureOrder"
"IsPureQuantumCode"
"IsPyramid"
"IsQCartier"
"IsQFactorial"
"IsQGorenstein"
"IsQPrincipal"
"IsQuadratic"
"IsQuadraticSpace"
"IsQuadraticTwist"
"IsQuadricIntersection"
"IsQuasiCyclic"
"IsQuasiSimpleTwistedCyclic"
"IsQuasiTwistedCyclic"
"IsQuasisplit"
"IsQuaternionAlgebra"
"IsQuaternionic"
"IsQuotient"
"IsRC"
"IsRPRI"
"IsRWP"
"IsRWPRI"
"IsRadical"
"IsRamified"
"IsRational"
"IsRationalCurve"
"IsRationalFunctionField"
"IsRationalPoint"
"IsRawCurve"
"IsReal"
"IsRealReflectionGroup"
"IsRealisableOverSmallerField"
"IsRealisableOverSubfield"
"IsReduced"
"IsReductive"
"IsReeGroup"
"IsReflection"
"IsReflectionGroup"
"IsReflectionSubgroup"
"IsReflexive"
"IsRegular"
"IsRegularLDPC"
"IsRegularPlace"
"IsRegularSingularOperator"
"IsRegularSingularPlace"
"IsRepresentation"
"IsResiduallyConnected"
"IsResiduallyPrimitive"
"IsResiduallyWeaklyPrimitive"
"IsResolution"
"IsRestrictable"
"IsRestricted"
"IsRestrictedLieAlgebra"
"IsRestrictedSubalgebra"
"IsReverseLatticeWord"
"IsRightIdeal"
"IsRightIsomorphic"
"IsRightModule"
"IsRing"
"IsRingHomomorphism"
"IsRingOfAllModularForms"
"IsRoot"
"IsRootOfUnity"
"IsRootSpace"
"IsRootedTree"
"IsSIntegral"
"IsSPrincipal"
"IsSUnit"
"IsSUnitWithPreimage"
"IsSatisfied"
"IsSaturated"
"IsScalar"
"IsScalarGroup"
"IsSelfDual"
"IsSelfNormalising"
"IsSelfNormalizing"
"IsSelfOrthogonal"
"IsSemiLinear"
"IsSemiregular"
"IsSemisimple"
"IsSeparable"
"IsSeparating"
"IsServerSocket"
"IsSharplyTransitive"
"IsShortExactSequence"
"IsShortRoot"
"IsSimilar"
"IsSimple"
"IsSimpleOrder"
"IsSimpleStarAlgebra"
"IsSimplex"
"IsSimplicial"
"IsSimplifiedModel"
"IsSimplyConnected"
"IsSimplyLaced"
"IsSinglePrecision"
"IsSingular"
"IsSkew"
"IsSmooth"
"IsSmoothHyperSurface"
"IsSoluble"
"IsSolubleByFinite"
"IsSolvable"
"IsSpecial"
"IsSpinorGenus"
"IsSpinorNorm"
"IsSplit"
"IsSplitAsIdealAt"
"IsSplitToralSubalgebra"
"IsSplittingCartanSubalgebra"
"IsSplittingField"
"IsSquare"
"IsSquarefree"
"IsStandard"
"IsStandardAffinePatch"
"IsStandardGF"
"IsStandardParabolicSubgroup"
"IsStarAlgebra"
"IsSteiner"
"IsStratum"
"IsStrictlyConvex"
"IsStrictlyNef"
"IsStronglyAG"
"IsStronglyConnected"
"IsSubcanonicalCurve"
"IsSubfield"
"IsSubgraph"
"IsSubgroup"
"IsSublattice"
"IsSubmodule"
"IsSubnormal"
"IsSubscheme"
"IsSubsequence"
"IsSuitableQuaternionOrder"
"IsSuperSummitRepresentative"
"IsSupercuspidal"
"IsSuperlattice"
"IsSupersingular"
"IsSupersoluble"
"IsSupportingHyperplane"
"IsSurjective"
"IsSuzukiGroup"
"IsSymmetric"
"IsSymplecticGroup"
"IsSymplecticMatrix"
"IsSymplecticSelfDual"
"IsSymplecticSelfOrthogonal"
"IsSymplecticSpace"
"IsTIrreducible"
"IsTSelfdual"
"IsTamelyRamified"
"IsTangent"
"IsTensor"
"IsTensorInduced"
"IsTerminal"
"IsTerminalThreefold"
"IsThick"
"IsThin"
"IsToralSubalgebra"
"IsTorsionUnit"
"IsTorusInvariant"
"IsTotallyComplex"
"IsTotallyEven"
"IsTotallyPositive"
"IsTotallyRamified"
"IsTotallyReal"
"IsTotallySingular"
"IsTotallySplit"
"IsTransformation"
"IsTransitive"
"IsTransvection"
"IsTransverse"
"IsTree"
"IsTriangleGroup"
"IsTriangulable"
"IsTriconnected"
"IsTrivial"
"IsTrivialOnUnits"
"IsTwist"
"IsTwisted"
"IsTwoCoboundary"
"IsTwoSidedIdeal"
"IsUFD"
"IsUltraSummitRepresentative"
"IsUndirected"
"IsUniform"
"IsUnipotent"
"IsUniqueFactorisationDomain"
"IsUniqueFactorizationDomain"
"IsUniquePartialRoot"
"IsUnit"
"IsUnitWithPreimage"
"IsUnital"
"IsUnitary"
"IsUnitaryGroup"
"IsUnitaryRepresentation"
"IsUnitarySpace"
"IsUnivariate"
"IsUnramified"
"IsUpperTriangular"
"IsValid"
"IsValidLargeReeOrder"
"IsVerbose"
"IsVertex"
"IsVertexLabelled"
"IsVertexTransitive"
"IsWGsymmetric"
"IsWP"
"IsWPRI"
"IsWeaklyAG"
"IsWeaklyAGDual"
"IsWeaklyAdjoint"
"IsWeaklyConnected"
"IsWeaklyEqual"
"IsWeaklyMonic"
"IsWeaklyPrimitive"
"IsWeaklySimplyConnected"
"IsWeaklyZero"
"IsWeierstrassModel"
"IsWeierstrassPlace"
"IsWeight"
"IsWeightVector"
"IsWeighted"
"IsWeightedProjectiveSpace"
"IsWeil"
"IsWildlyRamified"
"IsWindows"
"IsWreathProduct"
"IsZero"
"IsZeroAt"
"IsZeroComplex"
"IsZeroDimensional"
"IsZeroDivisor"
"IsZeroMap"
"IsZeroTerm"
"Isetseq"
"Isetset"
"Iso"
"IsogeniesAreEqual"
"IsogenousCurves"
"Isogeny"
"IsogenyFromKernel"
"IsogenyFromKernelFactored"
"IsogenyGroup"
"IsogenyMapOmega"
"IsogenyMapPhi"
"IsogenyMapPhiMulti"
"IsogenyMapPsi"
"IsogenyMapPsiMulti"
"IsogenyMapPsiSquared"
"IsogenyMu"
"IsolGroup"
"IsolGroupDatabase"
"IsolGroupOfDegreeFieldSatisfying"
"IsolGroupOfDegreeSatisfying"
"IsolGroupSatisfying"
"IsolGroupsOfDegreeFieldSatisfying"
"IsolGroupsOfDegreeSatisfying"
"IsolGroupsSatisfying"
"IsolGuardian"
"IsolInfo"
"IsolIsPrimitive"
"IsolMinBlockSize"
"IsolNumberOfDegreeField"
"IsolOrder"
"IsolProcess"
"IsolProcessGroup"
"IsolProcessInfo"
"IsolProcessIsEmpty"
"IsolProcessLabel"
"IsolProcessNext"
"IsolProcessOfDegree"
"IsolProcessOfDegreeField"
"IsolProcessOfField"
"IsolateRoots"
"IsolatedGorensteinSingularitiesOfIndex"
"IsolatedPointsFinder"
"IsolatedPointsLiftToMinimalPolynomials"
"IsolatedPointsLifter"
"IsometricCircle"
"IsometryGroup"
"IsomorphicCopy"
"IsomorphicMatrixLieAlgebra"
"IsomorphicProjectionToSubspace"
"IsomorphicSubmodules"
"Isomorphism"
"IsomorphismData"
"IsomorphismExtension"
"IsomorphismExtensions"
"IsomorphismToIsogeny"
"IsomorphismToStandardCopy"
"IsomorphismToStandardSCDtm"
"Isomorphisms"
"IsomorphismsOverBase"
"IsotropicSubspace"
"IsotropicVector"
"IspGroup"
"IspIntegral"
"IspLieAlgebra"
"IspMaximal"
"IspMinimal"
"IspNormal"
"IspSubalgebra"
"Isqrt"
"Itest"
"JBessel"
"JH"
"JInvariants"
"JOne"
"JZero"
"Jacobi"
"JacobiSymbol"
"JacobiTheta"
"JacobiThetaNullK"
"Jacobian"
"JacobianIdeal"
"JacobianMatrix"
"JacobianOrdersByDeformation"
"JacobianPoint"
"JacobianSequence"
"JacobianSubrankScheme"
"JacobsonRadical"
"JacobsonRadicalAlgBas"
"JacobsonRadicalOverFiniteField"
"JellyfishConstruction"
"JellyfishImage"
"JellyfishPreimage"
"JenningsLieAlgebra"
"JenningsSeries"
"JeuDeTaquin"
"JohnsonBound"
"Join"
"JoinDFA"
"JordanBlock"
"JordanDecomposition"
"JordanForm"
"Js"
"JustesenCode"
"Juxtaposition"
"K3Baskets"
"K3Copy"
"K3Database"
"K3Surface"
"K3SurfaceFromRawData"
"K3SurfaceRawData"
"K3SurfaceToRecord"
"K3SurfaceWithCodimension"
"KBessel"
"KBessel2"
"KBinomial"
"KCubeGraph"
"KDegree"
"KLPolynomial"
"KMatrixSpace"
"KMatrixSpaceWithBasis"
"KModule"
"KModuleWithBasis"
"KSpace"
"KSpaceWithBasis"
"KacMoodyClass"
"KacMoodyClasses"
"KappaLattice"
"KaratsubaMultiplication"
"KeepAbelian"
"KeepElementary"
"KeepElementaryAbelian"
"KeepGeneratorAction"
"KeepGeneratorOrder"
"KeepGroupAction"
"KeepPGroupWeights"
"KeepPrimePower"
"KeepSplit"
"KeepSplitAbelian"
"KeepSplitElementaryAbelian"
"KerdockCode"
"Kernel"
"KernelBasis"
"KernelEmbedding"
"KernelMatrix"
"KernelOrder"
"KernelZ2CodeZ4"
"Kernels"
"Keys"
"KillingDifferentialModp"
"KillingForm"
"KillingMatrix"
"KissingNumber"
"KleinBottle"
"KleinQuartic"
"KnapsackSolutions"
"Knot"
"KnownAutomorphismSubgroup"
"KnownCoefficient"
"KnownFactors"
"KnownFactorsAndCoefficient"
"KnownIrreducibles"
"KodairaSymbol"
"KodairaSymbols"
"KostkaNumber"
"KrawchoukPolynomial"
"KrawchoukTransform"
"KroneckerCharacter"
"KroneckerDelta"
"KroneckerProduct"
"KroneckerSymbol"
"KummerSurface"
"KummerSurfacePointRaw"
"KummerSurfaceRaw"
"LCLM"
"LCM"
"LCT"
"LCfRequired"
"LDPCBinarySymmetricThreshold"
"LDPCCode"
"LDPCDecode"
"LDPCDensity"
"LDPCEnsembleRate"
"LDPCGaussianThreshold"
"LDPCGirth"
"LDPCMatrix"
"LDPCSimulate"
"LFSRSequence"
"LFSRStep"
"LFunction"
"LGetCoefficients"
"LHS"
"LLL"
"LLLBasis"
"LLLBasisMatrix"
"LLLBlock"
"LLLGram"
"LLLGramMatrix"
"LLLReducedModel"
"LMGCenter"
"LMGCentre"
"LMGChiefFactors"
"LMGChiefSeries"
"LMGCommutatorSubgroup"
"LMGCompositionFactors"
"LMGCompositionSeries"
"LMGDerivedGroup"
"LMGEqual"
"LMGFactoredOrder"
"LMGFittingSubgroup"
"LMGIndex"
"LMGIsIn"
"LMGIsNilpotent"
"LMGIsNormal"
"LMGIsSoluble"
"LMGIsSolvable"
"LMGIsSubgroup"
"LMGNormalClosure"
"LMGOrder"
"LMGSocleStar"
"LMGSocleStarAction"
"LMGSocleStarActionKernel"
"LMGSocleStarFactors"
"LMGSocleStarQuotient"
"LMGSolubleRadical"
"LMGSolvableRadical"
"LMGSylow"
"LMGUnipotentRadical"
"LPProcess"
"LPolynomial"
"LProduct"
"LRatio"
"LRatioOddPart"
"LSeries"
"LSeriesData"
"LSeriesLeadingCoefficient"
"LSetCoefficients"
"LSetPrecision"
"LStar"
"LTaylor"
"LUB"
"Label"
"LabelToMatrixInternal"
"Labelling"
"Labels"
"LaguerrePolynomial"
"LaminatedLattice"
"Lang"
"LanguageCountInternal"
"LanguageDFA"
"Laplace"
"LargeRee"
"LargeReeBNpair"
"LargeReeConjugacy"
"LargeReeDiagonalisation"
"LargeReeElementToWord"
"LargeReeGeneralRecogniser"
"LargeReeGroup"
"LargeReeInvolutionCentraliser"
"LargeReeInvolutionClass"
"LargeReeIrreducibleRepresentation"
"LargeReeMaximalSubgroups"
"LargeReeRecognition"
"LargeReeReduction"
"LargeReeRedundantSLPGenerators"
"LargeReeResetRandomProcess"
"LargeReeSLPCoercion"
"LargeReeStandardConstructiveMembership"
"LargeReeStandardMaximalSubgroups"
"LargeReeStandardMembership"
"LargeReeStandardRecogniser"
"LargeReeSylow"
"LargeReeSzInvolution"
"LargestConductor"
"LargestDimension"
"LargestOrder"
"LastColumnEntry"
"LastIndexOfRow"
"Lattice"
"LatticeBasisInCone"
"LatticeBasisMatrix"
"LatticeCoordinates"
"LatticeData"
"LatticeDatabase"
"LatticeElementToMonomial"
"LatticeMap"
"LatticeMinkowskiDecomposition"
"LatticeName"
"LatticeToZGram"
"LatticeVector"
"LatticeVectorsInBox"
"LatticeWithBasis"
"LatticeWithGram"
"LaurentSeriesAlgebra"
"LaurentSeriesRing"
"LayerBoundary"
"LayerLength"
"LazyPowerSeriesRing"
"LazySeries"
"Lcm"
"LeadingCoefficient"
"LeadingExponent"
"LeadingGenerator"
"LeadingMonomial"
"LeadingMonomialIdeal"
"LeadingTerm"
"LeadingTotalDegree"
"LeadingWeightedDegree"
"LeastCommonLeftMultiple"
"LeastCommonMultiple"
"LeastUpperBound"
"LeeBrickellsAttack"
"LeeDistance"
"LeeDistance1"
"LeeWeight"
"LeeWeight1"
"LeeWeightDistribution"
"LeeWeightEnumerator"
"LeftAnnihilator"
"LeftComplex"
"LeftConjugate"
"LeftCosetSpace"
"LeftDescentSet"
"LeftDiv"
"LeftExactExtension"
"LeftGCD"
"LeftGcd"
"LeftGreatestCommonDivisor"
"LeftIdeal"
"LeftIdealClasses"
"LeftInverse"
"LeftInverseMorphism"
"LeftIsomorphism"
"LeftLCM"
"LeftLcm"
"LeftLeastCommonMultiple"
"LeftMixedCanonicalForm"
"LeftNormalForm"
"LeftOrder"
"LeftRepresentationMatrix"
"LeftString"
"LeftStringLength"
"LeftZeroExtension"
"LegendreEquation"
"LegendreModel"
"LegendrePolynomial"
"LegendreSymbol"
"LehmerCode"
"LehmerCodeToPerm"
"Length"
"LengthenCode"
"Lengths"
"LensSpace"
"LeonsAttack"
"LetterCreate"
"LetterDelete"
"LetterPreImage"
"LetterPrint"
"LetterVarAlgebra"
"LetterVarCalc"
"LetterVarCheck"
"LetterVarCocycles"
"LetterVarConsistency"
"LetterVarConsistencyProc"
"LetterVarCreate"
"LetterVarDelete"
"LetterVarEquations"
"LetterVarFpRelsProc"
"LetterVarGroup"
"LetterVarPreImage"
"LetterVarPrint"
"Level"
"Levels"
"LevenshteinBound"
"LexProduct"
"LexicographicalOrdering"
"LiEMaximalSubgroups"
"LiERootDatum"
"LiESymmetricCharacterValue"
"LibFileOpen"
"LieAlgebra"
"LieAlgebraHomomorphism"
"LieAlgebraOfDerivations"
"LieBracket"
"LieCharacteristic"
"LieConstant_C"
"LieConstant_M"
"LieConstant_N"
"LieConstant_epsilon"
"LieConstant_eta"
"LieConstant_p"
"LieConstant_q"
"LieRepresentationDecomposition"
"LieType"
"Lift"
"LiftCharacter"
"LiftCharacters"
"LiftCocycle"
"LiftDescendant"
"LiftHomomorphism"
"LiftHomomorphismGroupP"
"LiftIsogeny"
"LiftIsomorphism"
"LiftMap"
"LiftModule"
"LiftModules"
"LiftNonsplitExtension"
"LiftNonsplitExtensionRow"
"LiftPoint"
"LiftSplitExtension"
"LiftSplitExtensionRow"
"LiftToChainmap"
"Line"
"LineAtInfinity"
"LineGraph"
"LineGroup"
"LineOrbits"
"LineSet"
"LinearCharacters"
"LinearCode"
"LinearCombinationOfEigenformsOverC"
"LinearConeGenerators"
"LinearElimination"
"LinearGraph"
"LinearRelation"
"LinearRelations"
"LinearRepresentationSetup"
"LinearRepresentations"
"LinearShift"
"LinearSpace"
"LinearSpanEquations"
"LinearSpanGenerators"
"LinearSubspaceGenerators"
"LinearSystem"
"LinearSystemAtPhi"
"LinearSystemTrace"
"LinearlyEquivalentDivisorWithNoSupportOn"
"Lines"
"LinesInScheme"
"Linking"
"LinkingNumbers"
"ListAttributes"
"ListCategories"
"ListEntriesEqual"
"ListSignatures"
"ListTypes"
"ListVerbose"
"LittlewoodRichardsonTensor"
"LocalComponent"
"LocalCoxeterGroup"
"LocalDegree"
"LocalFactorization"
"LocalField"
"LocalGenera"
"LocalGlobalSelmerDiagram"
"LocalHeight"
"LocalInformation"
"LocalIntersectionData"
"LocalPolynomialRing"
"LocalRing"
"LocalRootNumber"
"LocalTwoSelmerMap"
"LocalUniformizer"
"Localisation"
"Localization"
"Log"
"LogCanonicalThreshold"
"LogCanonicalThresholdAtOrigin"
"LogCanonicalThresholdOverExtension"
"LogDerivative"
"LogGamma"
"LogIntegral"
"LogNorms"
"LogarithmicFieldExtension"
"Logs"
"LongBits"
"LongDivision"
"LongExactSequenceOnHomology"
"LongWords"
"LongestElement"
"LongestWeylWord"
"Lookup"
"LookupPrime"
"LowDimSubmodules"
"LowIndexNormalSubgroups"
"LowIndexProcess"
"LowIndexSubgroups"
"LowIndexSubgroupsSn"
"LowIndexSubmodules"
"LowerCentralSeries"
"LowerFaces"
"LowerSlopes"
"LowerTriangularMatrix"
"LowerVertices"
"Lucas"
"MAXSGPInternal"
"MCPolynomials"
"MCSplit"
"MDSCode"
"MEANS"
"MGCD"
"MMP"
"MPQS"
"MSQLetternonsplit"
"MSQLettersplit"
"MSQnonsplit"
"MSQnonsplitBase"
"MSQsplit"
"MSQsplitBase"
"MSetPolynomial"
"MSumPolynomial"
"MacWilliamsTransform"
"MagicNumber"
"Main"
"MainInvolution"
"MakeBasket"
"MakeCoprime"
"MakeCyclotomic"
"MakeDirected"
"MakeHomWithPreimageHandler"
"MakeIsSquare"
"MakeMapWithPreimageHandler"
"MakeModCubes"
"MakePCMap"
"MakeProjectiveClosureMap"
"MakeRepsDB"
"MakeRepsSmall"
"MakeResiduesSEA"
"MakeResolutionGraph"
"MakeSpliceDiagram"
"MakeType"
"Manifold"
"ManifoldDatabase"
"ManinConstant"
"ManinSymbol"
"MantissaExponent"
"MapToMatrix"
"Mapping"
"Maps"
"MargulisCode"
"MarkGroebner"
"Mass"
"MasseyProduct"
"MatRep"
"MatRepCharacteristics"
"MatRepDegrees"
"MatRepFieldSizes"
"MatRepKeys"
"Match"
"Matrices"
"Matrix"
"MatrixAlgebra"
"MatrixGroup"
"MatrixLieAlgebra"
"MatrixOfElement"
"MatrixOfInequalities"
"MatrixOfIsomorphism"
"MatrixQuotient"
"MatrixRepresentation"
"MatrixRing"
"MatrixToLabelInternal"
"MatrixToPerm"
"MatrixToWord"
"MatrixUnit"
"MatrixWithGivenCharacteristicPolynomial"
"MattsonSolomonTransform"
"Max"
"MaxCones"
"MaxNorm"
"MaxOrthPCheck"
"MaxParabolics"
"MaxSub"
"MaxSubKeys"
"MaxSubsTF2"
"MaxSubsTF4"
"Maxdeg"
"MaximalAbelianSubfield"
"MaximalCoefficientCode"
"MaximalExtension"
"MaximalIdeals"
"MaximalIncreasingSequence"
"MaximalIncreasingSequences"
"MaximalIntegerSolution"
"MaximalLeftIdeals"
"MaximalNormalSubgroup"
"MaximalNumberOfCosets"
"MaximalOddOrderNormalSubgroup"
"MaximalOrder"
"MaximalOrderBasis"
"MaximalOrderFinite"
"MaximalOrderInfinite"
"MaximalOvergroup"
"MaximalParabolics"
"MaximalPartition"
"MaximalRightIdeals"
"MaximalSingularSubspace"
"MaximalSolution"
"MaximalSubfields"
"MaximalSubgroups"
"MaximalSubgroupsAlt"
"MaximalSubgroupsAltSym"
"MaximalSubgroupsData"
"MaximalSubgroupsH"
"MaximalSubgroupsSym"
"MaximalSubgroupsTF"
"MaximalSublattices"
"MaximalSubmodules"
"MaximalTotallyIsotropicSubspace"
"MaximalVertexFacetHeightMatrix"
"MaximalZeroOneSolution"
"Maximum"
"MaximumBettiDegree"
"MaximumClique"
"MaximumDegree"
"MaximumFlow"
"MaximumInDegree"
"MaximumIndependentSet"
"MaximumMatching"
"MaximumNorm"
"MaximumOutDegree"
"MaximumStoredIrreducibleDegree"
"Maxindeg"
"Maxoutdeg"
"McElieceEtAlAsymptoticBound"
"McEliecesAttack"
"Meataxe"
"MeetDFA"
"MelikianLieAlgebra"
"MemCompact"
"MemProfile"
"Memory"
"MergeFields"
"MergeFiles"
"MergeUnits"
"MetacyclicPGroups"
"Mij2EltRootTable"
"Mike1"
"MilnorNumber"
"Min"
"MinParabolics"
"MinRowsGeneratorMatrix"
"Mindeg"
"MinimalAlgebraGenerators"
"MinimalAndCharacteristicPolynomials"
"MinimalBaseRingCharacter"
"MinimalBasis"
"MinimalBlocks"
"MinimalCoefficientDegree"
"MinimalCyclotomicField"
"MinimalDecomposition"
"MinimalDegreeModel"
"MinimalElementConjugatingToPositive"
"MinimalElementConjugatingToSuperSummit"
"MinimalElementConjugatingToUltraSummit"
"MinimalExtensionBasis"
"MinimalField"
"MinimalFreeResolution"
"MinimalHeckePolynomial"
"MinimalIdeals"
"MinimalInequalities"
"MinimalInteger"
"MinimalIntegerSolution"
"MinimalIsogeny"
"MinimalLeeWords"
"MinimalLeftIdeals"
"MinimalModel"
"MinimalNormalSubgroup"
"MinimalNormalSubgroups"
"MinimalOverfields"
"MinimalOvergroup"
"MinimalOvergroups"
"MinimalParabolics"
"MinimalPartition"
"MinimalPartitions"
"MinimalPolynomial"
"MinimalPolynomialFrobenius"
"MinimalPositiveGenerators"
"MinimalPrimeComponents"
"MinimalQuadraticTwist"
"MinimalRGenerators"
"MinimalRelations"
"MinimalRightIdeals"
"MinimalSolution"
"MinimalSubmodule"
"MinimalSubmodules"
"MinimalSuperlattices"
"MinimalSupermodules"
"MinimalSyzygyModule"
"MinimalTwist"
"MinimalVectorSequence"
"MinimalWeierstrassModel"
"MinimalWords"
"MinimalZeroOneSolution"
"MinimisationMatrix"
"Minimise"
"MinimiseConicToMatrix"
"MinimiseReduce"
"MinimiseWeights"
"Minimize"
"MinimizeCubicSurface"
"MinimizeDFA"
"MinimizeDeg4delPezzo"
"MinimizeGenerators"
"MinimizePlaneQuartic"
"MinimizeReduce"
"MinimizeReduceCubicSurface"
"MinimizeReduceDeg4delPezzo"
"MinimizeReducePlaneQuartic"
"Minimum"
"MinimumCut"
"MinimumDegree"
"MinimumDistance"
"MinimumDominatingSet"
"MinimumEuclideanDistance"
"MinimumEuclideanWeight"
"MinimumInDegree"
"MinimumLeeDistance"
"MinimumLeeWeight"
"MinimumLeeWeightBounds"
"MinimumLeeWords"
"MinimumOutDegree"
"MinimumWeight"
"MinimumWeightBounds"
"MinimumWeightTree"
"MinimumWord"
"MinimumWords"
"Minindeg"
"MinkowskiBound"
"MinkowskiDecomposition"
"MinkowskiGramReduction"
"MinkowskiLattice"
"MinkowskiReduction"
"MinkowskiSpace"
"Minor"
"MinorBoundary"
"MinorLength"
"Minors"
"Minoutdeg"
"Minus"
"MinusInfinity"
"MinusTamagawaNumber"
"MinusVolume"
"MixedCanonicalForm"
"ModByPowerOf2"
"ModelParent"
"ModelToSequence"
"ModelToString"
"ModelType"
"Modexp"
"ModifyProcess"
"ModifySelfintersection"
"ModifyTransverseIntersection"
"Modinv"
"Modorder"
"Modsqrt"
"ModularAbelianVariety"
"ModularComposition"
"ModularCompositionApply"
"ModularCompositionSetup"
"ModularCompositions"
"ModularCurve"
"ModularCurveDatabase"
"ModularCurveQuotient"
"ModularCurves"
"ModularDegree"
"ModularEmbedding"
"ModularEquation"
"ModularForm"
"ModularForms"
"ModularHyperellipticCurve"
"ModularKernel"
"ModularNonHyperellipticCurveGenus3"
"ModularParameterization"
"ModularParametrisation"
"ModularParametrization"
"ModularPolarization"
"ModularSolution"
"ModularSymbol"
"ModularSymbolApply"
"ModularSymbolEven"
"ModularSymbolOdd"
"ModularSymbolRepresentation"
"ModularSymbolToIntegralHomology"
"ModularSymbolToRationalHomology"
"ModularSymbols"
"ModularSymbolsH"
"ModularSymbolsModSmallPrime"
"Module"
"ModuleExtension"
"ModuleExtensionComplement"
"ModuleHomomorphism"
"ModuleMap"
"ModuleMaps"
"ModuleOverSmallerField"
"ModuleProject"
"ModuleProjectM"
"ModuleSaturation"
"ModuleToZModule"
"ModuleWithBasis"
"Modules"
"ModulesOverCommonField"
"ModulesOverSmallerField"
"Moduli"
"ModuliPoints"
"Modulus"
"ModulusIsFinite"
"MoebiusMu"
"MoebiusStrip"
"MolienSeries"
"MolienSeriesApproximation"
"MonicDifferentialOperator"
"MonicModel"
"MonodromyPairing"
"MonodromyWeights"
"Monoid"
"Monomial"
"MonomialAutomorphismGroup"
"MonomialBasis"
"MonomialCoefficient"
"MonomialDivisionList"
"MonomialGroup"
"MonomialGroupStabilizer"
"MonomialLattice"
"MonomialMatrix"
"MonomialOrder"
"MonomialOrderWeightVectors"
"MonomialSubgroup"
"MonomialToCoxMonomialsLattice"
"MonomialToElementaryMatrix"
"MonomialToHomogeneousMatrix"
"MonomialToPowerSumMatrix"
"MonomialToSchurMatrix"
"Monomials"
"MonomialsOfDegree"
"MonomialsOfDegreeZero"
"MonomialsOfWeightedDegree"
"MooreDeterminant"
"MordellWeilGroup"
"MordellWeilLattice"
"MordellWeilRank"
"MordellWeilRankBounds"
"MordellWeilShaInformation"
"MordellWeilSubgroup"
"MoriCone"
"Morphism"
"MorphismAutomorphism"
"MorphismAutomorphisms"
"MorphismCategory"
"MorphismFromImages"
"MorphismFromImagesAndBaseMorphism"
"MorphismMap"
"MorphismMapHasPreimage"
"MotivicWeight"
"MovablePart"
"Mult"
"MultiKnapsackSolutions"
"MultiQuotientMaps"
"MultiRank"
"MultiSpaces"
"MultiTuple"
"Multidegree"
"Multinomial"
"MultipartiteGraph"
"MultiplicationByMMap"
"MultiplicationTable"
"MultiplicativeGroup"
"MultiplicativeJordanDecomposition"
"MultiplicativeOrder"
"MultiplicatorRing"
"Multiplicities"
"Multiplicity"
"Multiplier"
"MultiplyByTranspose"
"MultiplyColumn"
"MultiplyDivisor"
"MultiplyFrobenius"
"MultiplyRow"
"MultiplyTransformations"
"Multiset"
"MultisetToSet"
"Multisets"
"MultivaluedSection"
"MultivariatePolynomial"
"MurphyAlphaApproximation"
"MyAbelianGroup"
"MyBasis"
"MyCompletion"
"MyDumbExpand"
"MyEval"
"MyExpand"
"MyExtOrder"
"MyFPGroup"
"MyGCD"
"MyGetLowPrecisionExpand"
"MyGetLowPrecisionExpandAS"
"MyGradedMap"
"MyInvars"
"MyIsConjugate"
"MyIsConjugateQuotient"
"MyIsConjugateSubgroup"
"MyIsMaximal"
"MyIsSquare"
"MyMaximalOrder"
"MyPrimitivePart"
"MyRationalPoints"
"MyRelativeInvariant"
"NFS"
"NFSProcess"
"NFaces"
"NGrad"
"NMS"
"NMatReps"
"NMaxSubs"
"NNZEntries"
"NP"
"NPCGenerators"
"NPCgens"
"NPermReps"
"NSpin"
"NagataAutomorphism"
"Nagens"
"NaiveHeight"
"Nalggens"
"Name"
"Name2Mij"
"NameSimple"
"Names"
"NarrowClassGroup"
"NarrowClassNumber"
"NaturalActionGenerator"
"NaturalBlackBoxGroup"
"NaturalFreeAlgebraCover"
"NaturalGroup"
"NaturalMap"
"NaturalMaps"
"Nclasses"
"Ncols"
"NearLinearSpace"
"NefCone"
"NegationMap"
"Negative"
"NegativeGammaOrbitsOnRoots"
"NegativePrimeDivisors"
"NegativeRelativeRoots"
"Neighbor"
"NeighborClosure"
"Neighbors"
"Neighbour"
"NeighbourClosure"
"NeighbouringGerms"
"Neighbours"
"NewAndOldSubspacesUsingHeckeAction"
"NewEnv"
"NewLLLBasis"
"NewLevel"
"NewModularHyperellipticCurve"
"NewModularHyperellipticCurves"
"NewModularNonHyperellipticCurveGenus3"
"NewModularNonHyperellipticCurvesGenus3"
"NewQuotient"
"NewSaturation"
"NewStore"
"NewSubspace"
"NewSubvariety"
"Newform"
"NewformDecomposition"
"Newforms"
"NewtonPolygon"
"NewtonPolynomial"
"NewtonPolynomials"
"NewtonPolytope"
"NextClass"
"NextElement"
"NextExtension"
"NextFactor"
"NextGraph"
"NextModule"
"NextPermutation"
"NextPrime"
"NextRepresentation"
"NextSimpleQuotient"
"NextSubgroup"
"NextVector"
"Ngens"
"Nice"
"NiceRepresentativeModuloPowers"
"NiceRepresentativesModuloPowers"
"NicerQuaternionAlgebra"
"NilRadical"
"NilpotencyClass"
"NilpotentBoundary"
"NilpotentLength"
"NilpotentLieAlgebra"
"NilpotentOrbit"
"NilpotentOrbits"
"NilpotentPresentation"
"NilpotentQuotient"
"NilpotentSection"
"NilpotentSubgroups"
"Nilradical"
"NineDescent"
"NineSelmerSet"
"NoCommonComponent"
"NoetherNormalisation"
"NoetherNormalization"
"NoetherNumerator"
"NoetherWeights"
"NonCuspidalQRationalPoints"
"NonIdempotentActionGenerators"
"NonIdempotentGenerators"
"NonNilpotentElement"
"NonNormalizedLcm"
"NonPrimitiveAlternant"
"NonPrincipalPrimesUpTo"
"NonQFactorialLocus"
"NonReducedFibres"
"NonSimplicialCones"
"NonSpecialDivisor"
"NonZeroCoordinates"
"NonsolvableSubgroups"
"NonsplitAbelianSection"
"NonsplitCollector"
"NonsplitElementaryAbelianSection"
"NonsplitExtensionSpace"
"NonsplitSection"
"Nonsquare"
"NonvanishingForm"
"Norm"
"NormAbs"
"NormEquation"
"NormGroup"
"NormGroupDiscriminant"
"NormKernel"
"NormModule"
"NormOneGroup"
"NormOneSubgroup"
"NormResidueSymbol"
"NormSpace"
"NormalBasisGenerator"
"NormalClosure"
"NormalClosureMonteCarlo"
"NormalComplements"
"NormalCone"
"NormalElement"
"NormalFan"
"NormalForm"
"NormalLattice"
"NormalNumber"
"NormalSubfields"
"NormalSubgroup"
"NormalSubgroupRandomElement"
"NormalSubgroups"
"Normalisation"
"NormalisationCoefficient"
"Normalise"
"NormalisedCone"
"Normaliser"
"NormaliserCode"
"NormaliserMatrix"
"Normalization"
"NormalizationCoefficient"
"Normalize"
"NormalizeIdeals"
"Normalizer"
"NormalizerCode"
"NormalizerGLZ"
"NormalizerMatrix"
"Norms"
"Not"
"Nqubits"
"Nrels"
"Nrows"
"Nsgens"
"NthPrime"
"NuclearRank"
"NullGraph"
"NullHomotopy"
"NullSpace"
"Nullity"
"Nullspace"
"NullspaceMatrix"
"NullspaceOfTranspose"
"NullspaceOfTransposeMatrix"
"NumExtraspecialPairs"
"NumPosRoots"
"Number"
"NumberField"
"NumberFieldDatabase"
"NumberFieldSieve"
"NumberFields"
"NumberOfActionGenerators"
"NumberOfAffinePatches"
"NumberOfAlgebraicGenerators"
"NumberOfAntisymmetricForms"
"NumberOfBlocks"
"NumberOfBoundaryPoints"
"NumberOfCells"
"NumberOfClasses"
"NumberOfColumns"
"NumberOfComponents"
"NumberOfConstantWords"
"NumberOfConstraints"
"NumberOfCoordinates"
"NumberOfCurves"
"NumberOfDivisors"
"NumberOfEGenerators"
"NumberOfEdges"
"NumberOfElements"
"NumberOfExtensions"
"NumberOfFGenerators"
"NumberOfFaces"
"NumberOfFacets"
"NumberOfFields"
"NumberOfFixedSpaces"
"NumberOfGenerators"
"NumberOfGradings"
"NumberOfGraphs"
"NumberOfGroups"
"NumberOfGroupsSF"
"NumberOfGroupsp7"
"NumberOfInclusions"
"NumberOfInteriorPoints"
"NumberOfInvariantForms"
"NumberOfIrreducibleMatrixGroups"
"NumberOfIsogenyClasses"
"NumberOfK3Surfaces"
"NumberOfKGenerators"
"NumberOfLabels"
"NumberOfLattices"
"NumberOfLevels"
"NumberOfLines"
"NumberOfMatrices"
"NumberOfMetacyclicPGroups"
"NumberOfNames"
"NumberOfNewformClasses"
"NumberOfNonZeroEntries"
"NumberOfOperations"
"NumberOfPCGenerators"
"NumberOfPartitions"
"NumberOfPermutations"
"NumberOfPlacesDegECF"
"NumberOfPlacesOfDegreeOne"
"NumberOfPlacesOfDegreeOneECF"
"NumberOfPlacesOfDegreeOneECFBound"
"NumberOfPlacesOfDegreeOneOverExactConstantField"
"NumberOfPlacesOfDegreeOneOverExactConstantFieldBound"
"NumberOfPlacesOfDegreeOverExactConstantField"
"NumberOfPoints"
"NumberOfPointsAtInfinity"
"NumberOfPointsOnCubicSurface"
"NumberOfPointsOnSurface"
"NumberOfPositiveRoots"
"NumberOfPrimePolynomials"
"NumberOfPrimitiveAffineGroups"
"NumberOfPrimitiveAlmostSimpleGroups"
"NumberOfPrimitiveDiagonalGroups"
"NumberOfPrimitiveGroups"
"NumberOfPrimitiveProductGroups"
"NumberOfPrimitiveSolubleGroups"
"NumberOfProjectives"
"NumberOfPunctures"
"NumberOfQubits"
"NumberOfQuotientGradings"
"NumberOfRationalPoints"
"NumberOfRelations"
"NumberOfRelationsRequired"
"NumberOfRepresentations"
"NumberOfRows"
"NumberOfSkewRows"
"NumberOfSmallGroups"
"NumberOfSmoothDivisors"
"NumberOfSolubleIrreducibleMatrixGroups"
"NumberOfStandardTableaux"
"NumberOfStandardTableauxOnWeight"
"NumberOfStrings"
"NumberOfStrongGenerators"
"NumberOfSubgroupsAbelianPGroup"
"NumberOfSymmetricForms"
"NumberOfTableauxOnAlphabet"
"NumberOfTerms"
"NumberOfTransitiveGroups"
"NumberOfTransverseIntersections"
"NumberOfVariables"
"NumberOfVertices"
"NumberOfWords"
"NumberOfhGenerators"
"NumberOfxGenerators"
"NumberOfyGenerators"
"NumberingMap"
"NumbersOfPointsOnSurface"
"Numelt"
"Numeration"
"Numerator"
"NumeratorData"
"NumeratorSequence"
"NumericalDerivative"
"NumericalEigenvectors"
"O"
"OECM"
"OEIS"
"OEISDatabase"
"ObjectMap"
"ObjectMapHasPreimage"
"ObjectiveFunction"
"Obstruction"
"ObstructionDescentBuildingBlock"
"OddGraph"
"Oddity"
"OldClassInvariants"
"OldDerksenIdeal"
"OldGOMinus"
"OldGeneralOrthogonalGroupMinus"
"OldIrreducibleModules"
"OldOmegaMinus"
"OldQuadraticSpace"
"OldQuotient"
"OldSOMinus"
"OldSpecialOrthogonalGroupMinus"
"OldSubspace"
"OldSubvariety"
"Omega"
"OmegaMinus"
"OmegaPlus"
"One"
"OneCocycle"
"OneCohomology"
"OneCohomologyAb"
"OneCohomologyFP"
"OneCohomologyFP_"
"OneParameterSubgroupsLattice"
"OneSkeleton"
"OnlyUpToIsogeny"
"Open"
"OpenGraphFile"
"OpenSmallGroupDatabase"
"OpenTest"
"Operands"
"Operation"
"Operator"
"OperatorNorm"
"OppositeAlgebra"
"OptimalEdgeColouring"
"OptimalSkewness"
"OptimalVertexColouring"
"OptimisedRepresentation"
"OptimizedRepresentation"
"Or"
"Orbit"
"OrbitAction"
"OrbitActionBounded"
"OrbitBounded"
"OrbitClosure"
"OrbitImage"
"OrbitImageBounded"
"OrbitKernel"
"OrbitKernelBounded"
"OrbitLensInternal"
"OrbitMinsInternal"
"OrbitNumbersInternal"
"OrbitPartitionIsConjugate"
"OrbitPartitionStabilizer"
"OrbitRepresentatives"
"OrbitStabilizer"
"OrbitSum"
"OrbitalGraph"
"Orbits"
"OrbitsOfSpaces"
"OrbitsOnSimples"
"OrbitsPartition"
"Order"
"OrderAutomorphismGroupAbelianPGroup"
"OrderGL"
"OrderOfImageOfComponentGroupOfJ0N"
"OrderOfRootOfUnity"
"OrderedGenerators"
"OrderedIntegerMonoid"
"OrderedMonoid"
"OrderedPartitionStack"
"OrderedPartitionStackZero"
"Ordering"
"OreConditions"
"OrientatedGraph"
"Origin"
"OriginalRing"
"OrthogonalComplement"
"OrthogonalComponent"
"OrthogonalComponents"
"OrthogonalDecomposition"
"OrthogonalDirectSum"
"OrthogonalForm"
"OrthogonalFormCS"
"OrthogonalFormMinus"
"OrthogonalFormPlus"
"OrthogonalReflection"
"OrthogonalSum"
"OrthogonalTensorProduct"
"Orthogonalize"
"OrthogonalizeGram"
"Orthonormalize"
"OutDegree"
"OutEdges"
"OutNeighbors"
"OutNeighbours"
"OuterFPGroup"
"OuterFaces"
"OuterNormal"
"OuterNormals"
"OuterOrder"
"OuterShape"
"OuterVertices"
"OvalDerivation"
"OverDimension"
"OverconvergentHeckeSeries"
"OverconvergentHeckeSeriesDegreeBound"
"Overdatum"
"Overgroup"
"P1"
"P1Action"
"P1Classes"
"P1Normalize"
"P1P2toA3Ac2over12"
"P1Reduce"
"P2"
"PALPNormalForm"
"PCAut"
"PCAutAction"
"PCAutDeriv"
"PCAutIsSol"
"PCAutPrint"
"PCBFConjByWord"
"PCBFEltNew"
"PCBFElteq"
"PCBFEltne"
"PCBFMult"
"PCBFNew"
"PCBFNormalForm"
"PCBFRevert"
"PCClass"
"PCExponents"
"PCGO"
"PCGOMinus"
"PCGOPlus"
"PCGSp"
"PCGU"
"PCGenerators"
"PCGroup"
"PCMap"
"PCPresentation"
"PCPrimes"
"PCSO"
"PCSOMinus"
"PCSOPlus"
"PCSU"
"PGL"
"PGO"
"PGOMinus"
"PGOPlus"
"PGU"
"PGammaL"
"PGammaU"
"PGroupSection"
"PGroupToForms"
"PHom"
"POmega"
"POmegaMinus"
"POmegaPlus"
"POpen"
"PSL"
"PSL2"
"PSO"
"PSOMinus"
"PSOPlus"
"PSU"
"PSigmaL"
"PSigmaSp"
"PSigmaSz"
"PSigmaU"
"PSp"
"PSz"
"PackingRadius"
"PadCode"
"PadeHermiteApproximant"
"PairReduce"
"PairReduceGram"
"PaleyGraph"
"PaleyTournament"
"ParallelClass"
"ParallelClasses"
"ParallelSort"
"ParamDeg4DPSingLie"
"Parameters"
"Parametrization"
"ParametrizationMatrix"
"ParametrizationToPuiseux"
"ParametrizeAnticanonicalP1xP1"
"ParametrizeAnticanonicalSphere"
"ParametrizeBlowup"
"ParametrizeDegree5DelPezzo"
"ParametrizeDegree6DelPezzo"
"ParametrizeDegree7DelPezzo"
"ParametrizeDegree8DelPezzo"
"ParametrizeDegree9DelPezzo"
"ParametrizeDelPezzo"
"ParametrizeDelPezzoDeg6"
"ParametrizeDelPezzoDeg9"
"ParametrizeOrdinaryCurve"
"ParametrizePencil"
"ParametrizeProjectiveHypersurface"
"ParametrizeProjectiveSurface"
"ParametrizeQuadric"
"ParametrizeRNC"
"ParametrizeRationalNormalCurve"
"ParametrizeScroll"
"ParametrizeSingularDegree3DelPezzo"
"ParametrizeSingularDegree4DelPezzo"
"Parent"
"ParentCategory"
"ParentCell"
"ParentGraph"
"ParentPlane"
"ParentRing"
"ParityCheckMatrix"
"PartialDual"
"PartialFactorization"
"PartialFractionDecomposition"
"PartialLeeWeightDistribution"
"PartialPrimaryInvariantSpaces"
"PartialWeightDistribution"
"Partition"
"Partition2WGtable"
"PartitionAction"
"PartitionCovers"
"PartitionToWeight"
"Partitions"
"PascalTriangle"
"PatchGerms"
"Path"
"PathExists"
"PathGraph"
"PathTree"
"PathTreeCyclicModule"
"Paths"
"Peakwords"
"PellEquation"
"Pencil"
"PerfectForms"
"PerfectGroupDatabase"
"PerfectSubgroups"
"PeriodMapping"
"Periods"
"PermCond"
"PermRep"
"PermRepDegrees"
"PermRepKeys"
"PermRestrict"
"PermToDualMatrix"
"PermToMatrix"
"PermToWord"
"Permutation"
"PermutationAutomorphism"
"PermutationCharacter"
"PermutationCode"
"PermutationCondensation"
"PermutationGroup"
"PermutationMatrix"
"PermutationModule"
"PermutationRepresentation"
"PermutationSupport"
"Permutations"
"PermuteSequence"
"PermuteWeights"
"Pfaffian"
"Pfaffians"
"PhaseFlip"
"Phi"
"PhiInverse"
"Pi"
"PicardClass"
"PicardGaloisModule"
"PicardGroup"
"PicardGroupGeometric"
"PicardIntersectionPairing"
"PicardLattice"
"PicardNumber"
"PicardToClassGroupsMap"
"PicardToClassLatticesMap"
"PicnDescent"
"Pipe"
"Place"
"PlaceEnumCopy"
"PlaceEnumCurrent"
"PlaceEnumInit"
"PlaceEnumNext"
"PlaceEnumPosition"
"Places"
"PlacticIntegerMonoid"
"PlacticMonoid"
"PlanarDual"
"PlanarGraphDatabase"
"PlaneCurve"
"PlaneToDisc"
"Plethysm"
"PlotkinAsymptoticBound"
"PlotkinBound"
"PlotkinSum"
"Plurigenus"
"Point"
"PointDegree"
"PointDegrees"
"PointGraph"
"PointGroup"
"PointInInterior"
"PointIndexes"
"PointOnRegularModel"
"PointSearch"
"PointSet"
"PointToBlowUp"
"Points"
"PointsAtInfinity"
"PointsCubicModel"
"PointsFiniteField"
"PointsInGeneralPosition"
"PointsKnown"
"PointsOverSplittingField"
"PointsQI"
"PointsToLaurent"
"Polar"
"PolarToComplex"
"Polarisation"
"PolarisedVariety"
"PoleDivisor"
"Poles"
"PollardRho"
"PolyMapKernel"
"PolyToSeries"
"PolycyclicByFiniteGroup"
"PolycyclicGenerators"
"PolygonGraph"
"Polyhedron"
"PolyhedronInSublattice"
"PolyhedronWithInequalities"
"Polylog"
"PolylogD"
"PolylogDold"
"PolylogP"
"Polynomial"
"PolynomialAlgebra"
"PolynomialCoefficient"
"PolynomialMap"
"PolynomialPair"
"PolynomialRing"
"PolynomialSieve"
"PolynomialToElementarySymmetric"
"PolynomialToPowerSums"
"Polynomials"
"Polytope"
"PolytopeCanonicalFanoDim2"
"PolytopeCanonicalFanoDim3"
"PolytopeLDP"
"PolytopeOfProjectiveSpace"
"PolytopeOfWPS"
"PolytopeReflexiveFanoDim2"
"PolytopeReflexiveFanoDim3"
"PolytopeSmoothFano"
"PolytopeSmoothFanoDim2"
"PolytopeSmoothFanoDim3"
"PolytopeSmoothFanoDim4"
"PolytopeSmoothFanoDim5"
"PolytopeSmoothFanoDim6"
"PolytopeSmoothFanoDim7"
"PolytopeSmoothFanoDim8"
"PolytopeTerminalFanoDim2"
"PolytopeTerminalFanoDim3"
"PolytopeToLaurent"
"PolytopelReflexiveDim2"
"Pop"
"PosRootsWeightBasis"
"Position"
"PositiveConjugates"
"PositiveConjugatesProcess"
"PositiveCoroots"
"PositiveDefiniteForm"
"PositiveGammaOrbitsOnRoots"
"PositiveQuadrant"
"PositiveRelativeRoots"
"PositiveRoots"
"PositiveRootsPerm"
"PositiveSum"
"PossibleCanonicalDissidentPoints"
"PossibleDiscriminants"
"PossibleSimpleCanonicalDissidentPoints"
"PowHom"
"Power"
"PowerFormalSet"
"PowerFreePart"
"PowerGroup"
"PowerIdeal"
"PowerIndexedSet"
"PowerMap"
"PowerMultiset"
"PowerPolynomial"
"PowerProduct"
"PowerProductSimplify"
"PowerRSpace"
"PowerRelation"
"PowerResidueCode"
"PowerSequence"
"PowerSeries"
"PowerSeriesAlgebra"
"PowerSeriesRing"
"PowerSet"
"PowerStructure"
"PowerSumToCoefficients"
"PowerSumToElementaryMatrix"
"PowerSumToElementarySymmetric"
"PowerSumToHomogeneousMatrix"
"PowerSumToMonomialMatrix"
"PowerSumToSchurMatrix"
"PrePatchMaps"
"Precision"
"PrecisionBound"
"Preimage"
"PreimageConstructorViaInverse"
"PreimageIdeal"
"PreimageRing"
"PreparataCode"
"Preprune"
"Presentation"
"PresentationIsSmall"
"PresentationLength"
"PresentationMatrix"
"PreviousPrime"
"PrimDecomp"
"PrimalityCertificate"
"Primary"
"PrimaryAbelianBasis"
"PrimaryAbelianInvariants"
"PrimaryAlgebra"
"PrimaryBasis"
"PrimaryComponents"
"PrimaryDecomposition"
"PrimaryIdeal"
"PrimaryInvariantFactors"
"PrimaryInvariants"
"PrimaryRationalForm"
"PrimaryRepresentation"
"Prime"
"PrimeBasis"
"PrimeComponents"
"PrimeDivisors"
"PrimeFactorisation"
"PrimeField"
"PrimeForm"
"PrimeIdeal"
"PrimeOrderElement"
"PrimePolynomials"
"PrimePowerKernelMatrix"
"PrimePowerNullspaceMatrix"
"PrimePowerOrderElement"
"PrimePowerRepresentation"
"PrimeRing"
"Primes"
"PrimesInInterval"
"PrimesUpTo"
"PrimitiveEisensteinSeries"
"PrimitiveElement"
"PrimitiveGroup"
"PrimitiveGroupDatabaseLimit"
"PrimitiveGroupDescription"
"PrimitiveGroupIdentification"
"PrimitiveGroupLabelFromSims"
"PrimitiveGroupLabelToSims"
"PrimitiveGroupProcess"
"PrimitiveGroupSims"
"PrimitiveGroups"
"PrimitiveIdempotentData"
"PrimitiveIdempotents"
"PrimitiveLatticeVector"
"PrimitivePart"
"PrimitivePolynomial"
"PrimitiveQuotient"
"PrimitiveRoot"
"PrimitiveWreathProduct"
"PrincipalCharacter"
"PrincipalDivisor"
"PrincipalDivisorMap"
"PrincipalIdealMap"
"PrincipalPolarisation"
"PrincipalPrimesUpTo"
"PrincipalSeriesParameters"
"PrincipalUnitGroup"
"PrincipalUnitGroupGenerators"
"PrintBase"
"PrintCategory"
"PrintCoding"
"PrintCollector"
"PrintExtensions"
"PrintFile"
"PrintFileMagma"
"PrintGenerators"
"PrintGrpLie"
"PrintGrpLieElt"
"PrintMagma"
"PrintMapping"
"PrintMatgMagma"
"PrintModuleMagma"
"PrintModules"
"PrintName"
"PrintPairs"
"PrintPrimes"
"PrintProbabilityDistribution"
"PrintProcess"
"PrintQuotient"
"PrintRelat"
"PrintRelatorLengths"
"PrintRelators"
"PrintSeries"
"PrintSortedProbabilityDistribution"
"PrintStatus"
"PrintSylowSubgroupStructure"
"PrintSymbols"
"PrintTermsOfDegree"
"PrintToPrecision"
"PrintTreesSU"
"PrintoutData"
"Probability"
"ProbabilityDistribution"
"ProbableAutomorphismGroup"
"ProbableRadicalDecomposition"
"Probit"
"ProcessLadder"
"Product"
"ProductCode"
"ProductProjectiveSpace"
"ProductRepresentation"
"ProfileGraph"
"ProfileHTMLOutput"
"ProfilePrintByTotalCount"
"ProfilePrintByTotalTime"
"ProfilePrintChildrenByCount"
"ProfilePrintChildrenByTime"
"ProfilePrintDescendantsByCount"
"ProfilePrintDescendantsByTime"
"ProfilePrintGraphByCount"
"ProfilePrintGraphByTime"
"ProfilePruneGraphByCount"
"ProfilePruneGraphByTime"
"ProfileReset"
"Proj"
"ProjKilling"
"Projection"
"ProjectionCentres"
"ProjectionCodimensions"
"ProjectionFromNonsingularPoint"
"ProjectionIndices"
"ProjectionMap"
"ProjectionMatrix"
"ProjectionOnto"
"ProjectionOntoImage"
"ProjectionSubtypes"
"ProjectionTypes"
"Projections"
"ProjectiveClosure"
"ProjectiveClosureMap"
"ProjectiveCover"
"ProjectiveDimension"
"ProjectiveEmbedding"
"ProjectiveFunction"
"ProjectiveGammaLinearGroup"
"ProjectiveGammaUnitaryGroup"
"ProjectiveGeneralLinearGroup"
"ProjectiveGeneralOrthogonalGroup"
"ProjectiveGeneralOrthogonalGroupMinus"
"ProjectiveGeneralOrthogonalGroupPlus"
"ProjectiveGeneralUnitaryGroup"
"ProjectiveIndecomposable"
"ProjectiveIndecomposableDimensions"
"ProjectiveIndecomposableModules"
"ProjectiveIndecomposables"
"ProjectiveLine"
"ProjectiveMap"
"ProjectiveModule"
"ProjectiveOmega"
"ProjectiveOmegaMinus"
"ProjectiveOmegaPlus"
"ProjectiveOrder"
"ProjectivePatchMap"
"ProjectivePlane"
"ProjectivePolynomial"
"ProjectiveRationalFunction"
"ProjectiveRepresentative"
"ProjectiveResolution"
"ProjectiveResolutionPGroup"
"ProjectiveSigmaLinearGroup"
"ProjectiveSigmaSuzukiGroup"
"ProjectiveSigmaSymplecticGroup"
"ProjectiveSigmaUnitaryGroup"
"ProjectiveSpace"
"ProjectiveSpaceAsToricVariety"
"ProjectiveSpecialLinearGroup"
"ProjectiveSpecialOrthogonalGroup"
"ProjectiveSpecialOrthogonalGroupMinus"
"ProjectiveSpecialOrthogonalGroupPlus"
"ProjectiveSpecialUnitaryGroup"
"ProjectiveSuzukiGroup"
"ProjectiveSymplecticGroup"
"Projectivity"
"Prospector"
"Prune"
"PseudoAdd"
"PseudoAddMultiple"
"PseudoBasis"
"PseudoCholeskyForm"
"PseudoCholeskyFormToCholesky"
"PseudoDimension"
"PseudoGenerators"
"PseudoInverse"
"PseudoMatrix"
"PseudoMordellWeilGroup"
"PseudoRandom"
"PseudoReflection"
"PseudoReflectionGroup"
"PseudoRemainder"
"Pseudoreflection"
"Psi"
"PthPowerMapping"
"PuiseuxExpansion"
"PuiseuxExponents"
"PuiseuxExponentsCommon"
"PuiseuxSeriesRing"
"PuiseuxToParametrization"
"Pullback"
"PunctureCode"
"PureBraidGroup"
"PureLattice"
"PureRayIndices"
"PureRays"
"PurelyRamifiedExtension"
"PushThroughIsogeny"
"Pushforward"
"Pushout"
"Put"
"PutInZ"
"Puts"
"Pyramid"
"QECC"
"QECCLowerBound"
"QECCUpperBound"
"QFactorialisation"
"QMatrix"
"QNF"
"QRCode"
"QRCodeZ4"
"QSpace"
"QUAToIntegralUEAMap"
"Qround"
"QuadeIdeal"
"QuadraticClassGroupTwoPart"
"QuadraticField"
"QuadraticForm"
"QuadraticFormCS"
"QuadraticFormMatrix"
"QuadraticFormMinus"
"QuadraticFormPlus"
"QuadraticFormPolynomial"
"QuadraticFormType"
"QuadraticForms"
"QuadraticNorm"
"QuadraticNormForm"
"QuadraticOrder"
"QuadraticSpace"
"QuadraticTransformation"
"QuadraticTwist"
"QuadraticTwists"
"QuadricIntersection"
"QuantizedUEA"
"QuantizedUEAlgebra"
"QuantizedUniversalEnvelopingAlgebra"
"QuantumBasisElement"
"QuantumBinaryErrorGroup"
"QuantumCode"
"QuantumCompactFormat"
"QuantumCyclicCode"
"QuantumDimension"
"QuantumErrorGroup"
"QuantumExtendedFormat"
"QuantumQuasiCyclicCode"
"QuantumState"
"QuantumTwistedCode"
"Quartic"
"QuarticG4Covariant"
"QuarticG6Covariant"
"QuarticHSeminvariant"
"QuarticIInvariant"
"QuarticJInvariant"
"QuarticMinimise"
"QuarticNumberOfRealRoots"
"QuarticPSeminvariant"
"QuarticQSeminvariant"
"QuarticRSeminvariant"
"QuarticReduce"
"QuasiCyclicCode"
"QuasiTwistedCyclicCode"
"QuaternaryPlotkinSum"
"Quaternion"
"QuaternionAlgebra"
"QuaternionOrder"
"QuaternionicAutomorphismGroup"
"QuaternionicComplement"
"QuaternionicDual"
"QuaternionicGModule"
"QuaternionicMatrixGroupDatabase"
"QuaternionicTranspose"
"QuickLLL"
"QuickLLLGram"
"Quotient"
"QuotientComplex"
"QuotientDimension"
"QuotientFactorization"
"QuotientGenerators"
"QuotientGradings"
"QuotientGroup"
"QuotientMap"
"QuotientModule"
"QuotientModuleAction"
"QuotientModuleImage"
"QuotientRepresentation"
"QuotientRing"
"QuotientWithPullback"
"Quotrem"
"RCLazySeries"
"RF"
"RGenerators"
"RHS"
"RMatrixSpace"
"RMatrixSpaceWithBasis"
"RModule"
"RModuleWithAction"
"RModuleWithBasis"
"RPolynomial"
"RSAModulus"
"RSKCorrespondence"
"RSpace"
"RSpaceWithBasis"
"RSpaceWithModuli"
"RWSGroup"
"RWSMonoid"
"Radical"
"RadicalDecomposition"
"RadicalExtension"
"RadicalQuotient"
"RaisePrecision"
"RamificationDegree"
"RamificationDivisor"
"RamificationField"
"RamificationGroup"
"RamificationIndex"
"RamificationPoints"
"RamifiedPlaces"
"RamifiedPrimes"
"RamifiedRepresentation"
"Ranbig"
"Random"
"RandomAdditiveCode"
"RandomAutomorphism"
"RandomBaseChange"
"RandomBits"
"RandomCFP"
"RandomCone"
"RandomConjugate"
"RandomConsecutiveBits"
"RandomCurveByGenus"
"RandomDigraph"
"RandomElementOfOrder"
"RandomExtension"
"RandomGLnZ"
"RandomGenusOneModel"
"RandomGraph"
"RandomHookWalk"
"RandomIrreduciblePolynomial"
"RandomLinearCode"
"RandomLowerTriangularMatrix"
"RandomMatrix"
"RandomModel"
"RandomNodalCurve"
"RandomPartition"
"RandomPlace"
"RandomPlaneCurve"
"RandomPlanePoints"
"RandomPolytope"
"RandomPositiveCone"
"RandomPrime"
"RandomPrimePolynomial"
"RandomProcess"
"RandomProcessWithValues"
"RandomProcessWithWords"
"RandomProcessWithWordsAndValues"
"RandomQuantumCode"
"RandomRightIdeal"
"RandomSLnZ"
"RandomSchreier"
"RandomSchreierBounded"
"RandomSchreierCoding"
"RandomSequence"
"RandomSequenceBlumBlumShub"
"RandomSequenceRSA"
"RandomSparseMatrix"
"RandomSubcomplex"
"RandomSubset"
"RandomSymplecticMatrix"
"RandomTableau"
"RandomTransformation"
"RandomTree"
"RandomUnit"
"RandomUpperTriangularMatrix"
"RandomWord"
"Rank"
"RankBound"
"RankBounds"
"RankZ2"
"RanksOfPrimitiveIdempotents"
"RationalCharacterDecomposition"
"RationalCharacterSchurIndex"
"RationalCharacterTable"
"RationalCharacterTableRSpace"
"RationalCurve"
"RationalCuspidalSubgroup"
"RationalCusps"
"RationalDifferentialField"
"RationalExtensionRepresentation"
"RationalField"
"RationalForm"
"RationalFunction"
"RationalFunctionField"
"RationalFunctions"
"RationalGCD"
"RationalHomology"
"RationalMap"
"RationalMapping"
"RationalMatrixGroupDatabase"
"RationalPart"
"RationalPoint"
"RationalPoints"
"RationalPointsByFibration"
"RationalPointsGeneric"
"RationalPuiseux"
"RationalReconstruction"
"RationalRootDecomposition"
"RationalRoundUp"
"RationalScroll"
"RationalSequence"
"RationalSolutions"
"RationalTensorSearch"
"Rationals"
"RationalsAsNumberField"
"Ratpoints"
"RawBasket"
"RawCurve"
"RawEval"
"Ray"
"RayClassField"
"RayClassGroup"
"RayClassGroupDiscLog"
"RayLattice"
"RayLatticeMap"
"RayResidueRing"
"Rays"
"Re"
"Reachable"
"Read"
"ReadAtlasMatrix"
"ReadBinary"
"ReadBytes"
"ReadEntry1"
"ReadEntryQECC"
"ReadIntegralMatrix"
"ReadTest"
"Real"
"RealEmbeddings"
"RealExtensions"
"RealField"
"RealHomology"
"RealInjection"
"RealMatrix"
"RealPeriod"
"RealPlaces"
"RealRoots"
"RealSigns"
"RealTamagawaNumber"
"RealToIntegerExponent"
"RealVectorSpace"
"RealVolume"
"RealWeakApproximation"
"Realtime"
"RecToGRBskt"
"RecToGRCrvS"
"RecToGRPtS"
"RecToGRSch"
"ReciprocalPolynomial"
"Recognise3D4"
"RecogniseAdjoint"
"RecogniseAlternating"
"RecogniseAlternatingOrSymmetric"
"RecogniseAlternatingSquare"
"RecogniseClassical"
"RecogniseClassicalSSA"
"RecogniseDelta"
"RecogniseExchangeSSA"
"RecogniseExtendedSL"
"RecogniseExtendedSp"
"RecogniseG2"
"RecogniseLargeRee"
"RecogniseRee"
"RecogniseSL"
"RecogniseSL2"
"RecogniseSL3"
"RecogniseSU3"
"RecogniseSU4"
"RecogniseSp4Even"
"RecogniseSpOdd"
"RecogniseStarAlgebra"
"RecogniseSymmetric"
"RecogniseSymmetricSquare"
"RecogniseSz"
"RecognizeClassical"
"RecognizeExtendedSL"
"RecognizeExtendedSp"
"RecognizeLargeRee"
"RecognizeRee"
"RecognizeSL"
"RecognizeSL2"
"RecognizeSpOdd"
"RecognizeStarAlgebra"
"RecognizeSz"
"Reconstruct"
"ReconstructBasis"
"ReconstructLatticeBasis"
"ReconstructionEnvironment"
"Rectify"
"RecursiveCoefficientLazySeries"
"RecursiveGrphRes"
"RedoEnumeration"
"Reduce"
"ReduceBasis"
"ReduceCharacters"
"ReduceCluster"
"ReduceCubicSurface"
"ReduceDefiningGenerators"
"ReduceGenerators"
"ReduceGroebnerBasis"
"ReducePlaneCurve"
"ReduceQuadrics"
"ReduceToTriangleVertices"
"ReduceVector"
"ReducedAteTPairing"
"ReducedBasis"
"ReducedDecomposition"
"ReducedDiscriminant"
"ReducedEtaTPairing"
"ReducedFactorisation"
"ReducedForm"
"ReducedForms"
"ReducedGramMatrix"
"ReducedLegendreEquation"
"ReducedLegendreModel"
"ReducedLegendrePolynomial"
"ReducedMinimalWeierstrassModel"
"ReducedModel"
"ReducedOrbits"
"ReducedPoint"
"ReducedSubscheme"
"ReducedTatePairing"
"ReducedWamelenModel"
"Reduction"
"ReductionOrbit"
"ReductionStep"
"ReductionType"
"Reductions"
"Reductions_Factor"
"ReductiveLieAlgebraOld"
"ReductiveMatrixLieAlgebraOld"
"ReductiveRank"
"ReductiveType"
"Reductum"
"Ree"
"ReeBNpair"
"ReeConjugacy"
"ReeConjugacyClasses"
"ReeConstructiveMembership"
"ReeCrossCharacteristicReduction"
"ReeDiagonalisation"
"ReeElementToWord"
"ReeFindOrbitPoint"
"ReeFixedPoints"
"ReeGeneralRecogniser"
"ReeGroup"
"ReeInvolutionCentraliser"
"ReeIrreducibleRepresentation"
"ReeMaximalSubgroups"
"ReeMaximalSubgroupsConjugacy"
"ReePermutationRepresentation"
"ReePointStabiliser"
"ReeRecognition"
"ReeReduction"
"ReeRedundantSLPGenerators"
"ReeResetRandomProcess"
"ReeSLPCoercion"
"ReeStabiliser"
"ReeStandardConstructiveMembership"
"ReeStandardCopy"
"ReeStandardGenerators"
"ReeStandardMaximalSubgroups"
"ReeStandardMembership"
"ReeStandardRecogniser"
"ReeSylow"
"ReeSylowConjugacy"
"ReeSymmetricSquareDecompose"
"ReeTensorDecompose"
"ReedMullerCode"
"ReedMullerCodeQRMZ4"
"ReedMullerCodeRMZ4"
"ReedMullerCodeZ4"
"ReedMullerCodesLRMZ4"
"ReedMullerCodesRMZ4"
"ReedSolomonCode"
"ReesIdeal"
"RefineSection"
"Reflection"
"ReflectionFactors"
"ReflectionGroup"
"ReflectionMatrices"
"ReflectionMatrix"
"ReflectionPermutation"
"ReflectionPermutations"
"ReflectionSubgroup"
"ReflectionTable"
"ReflectionWord"
"ReflectionWords"
"Reflections"
"Regexp"
"RegularLDPCEnsemble"
"RegularModel"
"RegularRepresentation"
"RegularSequence"
"RegularSpliceDiagram"
"RegularSubgroups"
"Regularity"
"Regulator"
"RegulatorLowerBound"
"RelationFromUnit"
"RelationIdeal"
"RelationMatrix"
"RelationModule"
"Relations"
"RelativeBasis"
"RelativeField"
"RelativeInvariant"
"RelativePrecision"
"RelativePrecisionOfDerivation"
"RelativeProj"
"RelativeRank"
"RelativeRootDatum"
"RelativeRootElement"
"RelativeRootSpace"
"RelativeRoots"
"RelativeSelmerElement"
"RelevantCosets"
"Remove"
"RemoveBasisElt"
"RemoveColumn"
"RemoveConstraint"
"RemoveCrossTerms"
"RemoveEdge"
"RemoveEdges"
"RemoveFactor"
"RemoveFiles"
"RemoveHypersurface"
"RemoveIrreducibles"
"RemovePowersInPlace"
"RemoveRow"
"RemoveRowColumn"
"RemoveRowContents"
"RemoveVertex"
"RemoveVertices"
"RemoveWeight"
"RemoveZeroRows"
"Rep"
"RepChevalleyBasis"
"RepetitionCode"
"ReplacePrimes"
"ReplaceRelation"
"ReplicationNumber"
"Representation"
"RepresentationDegree"
"RepresentationDimension"
"RepresentationMatrix"
"RepresentationMatrixOfMatrix"
"RepresentationNumber"
"RepresentationSum"
"RepresentationType"
"Representations"
"Representative"
"RepresentativeCocycles"
"RepresentativePoint"
"RepresentativePoints"
"Representatives"
"RepresentsFreeModule"
"RepsDBGet"
"RepsSmallGet"
"Res_H2_G_QmodZ"
"RescaledDual"
"ResetMaximumMemoryUsage"
"ResetMinimumWeightBounds"
"Residual"
"Residue"
"ResidueClassDegree"
"ResidueClassField"
"ResidueClassRing"
"ResidueCode"
"ResidueField"
"ResidueMatrixRing"
"ResidueSystem"
"Resolution"
"ResolutionData"
"ResolutionGraph"
"ResolutionGraphVertex"
"ResolutionSpine"
"ResolveAffineCurve"
"ResolveAffineMonicSurface"
"ResolveFanMap"
"ResolveLinearSystem"
"ResolveProjectiveCurve"
"ResolveProjectiveSurface"
"ResolvedDualFan"
"Restrict"
"RestrictDegree"
"RestrictEndomorphism"
"RestrictField"
"RestrictPartitionLength"
"RestrictParts"
"RestrictResolution"
"RestrictedPartitions"
"RestrictedSubalgebra"
"Restriction"
"RestrictionChainMap"
"RestrictionData"
"RestrictionMap"
"RestrictionMatrix"
"RestrictionOfGenerators"
"RestrictionOfScalars"
"RestrictionOfScalarsToQ"
"RestrictionToImage"
"RestrictionToPatch"
"RestrictionToSubtorus"
"Resultant"
"ResumeEnumeration"
"Retrieve"
"Reverse"
"ReverseColumns"
"ReverseRows"
"Reversion"
"RevertClass"
"Rewind"
"Rewrite"
"ReynoldsOperator"
"Rho"
"RichelotIsogenousSurface"
"RichelotIsogenousSurfaces"
"RiemannRochBasis"
"RiemannRochCoordinates"
"RiemannRochDimension"
"RiemannRochPolytope"
"RiemannRochSpace"
"RiemannZeta"
"RightAction"
"RightAdjointMatrix"
"RightAnnihilator"
"RightCancellation"
"RightCosetSpace"
"RightDescentSet"
"RightExactExtension"
"RightGCD"
"RightGcd"
"RightGreatestCommonDivisor"
"RightHandFactors"
"RightIdeal"
"RightIdealClasses"
"RightInverse"
"RightInverseMorphism"
"RightIsomorphism"
"RightLCM"
"RightLcm"
"RightLeastCommonMultiple"
"RightMixedCanonicalForm"
"RightNormalForm"
"RightOrder"
"RightRegularModule"
"RightRepresentationMatrix"
"RightRing"
"RightString"
"RightStringLength"
"RightTransversal"
"RightZeroExtension"
"Ring"
"RingClassField"
"RingClassGroup"
"RingGeneratedBy"
"RingMap"
"RingOfFractions"
"RingOfIntegers"
"RombergQuadrature"
"Root"
"RootAction"
"RootAutomorphism"
"RootClosure"
"RootDatum"
"RootDecomposition"
"RootGSet"
"RootHeight"
"RootImages"
"RootLattice"
"RootNorm"
"RootNorms"
"RootNumber"
"RootOfUnity"
"RootPermutation"
"RootPosition"
"RootSequence"
"RootSide"
"RootSpace"
"RootSystem"
"RootSystemMatrix"
"RootVertex"
"Roots"
"RootsAndCoroots"
"RootsInSplittingField"
"RootsNonExact"
"RosenhainInvariants"
"Rotate"
"RotateRows"
"RotateWord"
"Rotation"
"Round"
"RoundDownDivisor"
"RoundReal"
"RoundUpDivisor"
"Row"
"RowInsert"
"RowLength"
"RowNullSpace"
"RowReductionHomomorphism"
"RowSequence"
"RowSkewLength"
"RowSpace"
"RowSubmatrix"
"RowSubmatrixRange"
"RowWeight"
"RowWeights"
"RowWord"
"Rows"
"Rowspace"
"RowvColSplit"
"Rtest"
"RubinSilverbergPolynomials"
"RuledSurface"
"S1"
"S2"
"SAT"
"SClassGroup"
"SClassGroupAbelianInvariants"
"SClassGroupExactSequence"
"SClassNumber"
"SEA"
"SFA"
"SFAElementary"
"SFAHomogeneous"
"SFAMonomial"
"SFAPower"
"SFASchur"
"SHA1"
"SIntegralDesbovesPoints"
"SIntegralLjunggrenPoints"
"SIntegralPoints"
"SIntegralQuarticPoints"
"SL"
"SL2Characteristic"
"SL2ElementToWord"
"SL2Presentation"
"SL2Triple"
"SL3ElementToWord"
"SL4Covariants"
"SL4Invariants"
"SLPGroup"
"SLPolynomialRing"
"SO"
"SOMinus"
"SOPlus"
"SPolynomial"
"SPrimesUpTo"
"SPrincipalDivisorMap"
"SPrintCategory"
"SQUFOF"
"SQ_check"
"SQextSetup"
"SQsplitSetup"
"SRegulator"
"SU"
"SU3ElementToWord"
"SUnitAction"
"SUnitCohomologyProcess"
"SUnitDiscLog"
"SUnitGroup"
"SUnitSubGroup"
"SVMForLattAuto"
"SVMForLattIso"
"SVPermutation"
"SVWord"
"SafeInverseUniformiser"
"SafeUniformiser"
"SafeUniformizer"
"SatisfiesSL2Presentation"
"SatisfiesSzPresentation"
"Saturate"
"SaturateSheaf"
"Saturation"
"ScalarField"
"ScalarLattice"
"ScalarMatrix"
"ScalarProduct"
"ScalarSparseMatrix"
"ScaleGenerators"
"ScaleMatrix"
"ScaledIgusaInvariants"
"ScaledLattice"
"ScalingFactor"
"Scheme"
"SchemeGraphMap"
"SchemeGraphMapToSchemeMap"
"SchemeMap"
"SchemeThrough"
"SchreierGenerators"
"SchreierGraph"
"SchreierSystem"
"SchreierVector"
"SchreierVectors"
"Schur"
"SchurIndex"
"SchurIndexGroup"
"SchurIndices"
"SchurNorm"
"SchurToElementaryMatrix"
"SchurToHomogeneousMatrix"
"SchurToMonomialMatrix"
"SchurToPowerSumMatrix"
"Search"
"SearchEqual"
"SearchForDecomposition"
"SearchForIsomorphism"
"SearchPGroups"
"Sec"
"SecantVariety"
"Sech"
"SecondaryInvariants"
"SecondaryInvariantsNonModular"
"SectionCentraliser"
"SectionCentralizer"
"Sections"
"Seek"
"Self"
"SelfComplementaryGraphDatabase"
"SelfIntersection"
"Selfintersection"
"Selfintersections"
"SelmerGroup"
"SemiInvariantsOfDegree"
"SemiLinearGroup"
"SemiOrthogonalBasis"
"SemiOrthogonalBasis2"
"SemiSimpleCohomologyProcess"
"SemiSimpleType"
"Semidir"
"SemidirectProduct"
"SemisimpleEFAModuleMaps"
"SemisimpleEFAModules"
"SemisimpleEFASeries"
"SemisimpleGeneratorData"
"SemisimpleLieAlgebraOld"
"SemisimpleMatrixLieAlgebraOld"
"SemisimpleRank"
"SemisimpleSubLie"
"SemisimpleSubLieDatabase"
"SemisimpleType"
"SeparatingElement"
"SeparationVertices"
"Seq"
"SeqFact"
"Seqelt"
"Seqint"
"Seqlist"
"Seqset"
"SequenceOfRadicalGenerators"
"SequenceToCompositionFactors"
"SequenceToConjugacyClasses"
"SequenceToElement"
"SequenceToFactorization"
"SequenceToInteger"
"SequenceToList"
"SequenceToMultiset"
"SequenceToProcess"
"SequenceToSet"
"SequenceToSubgroups"
"SeriesFactors"
"SeriesProcess"
"SerreBound"
"Set"
"SetAlgorithm"
"SetAllInvariantsOfDegree"
"SetArrows"
"SetAssertions"
"SetAutoColumns"
"SetAutoCompact"
"SetAxisMultiplicities"
"SetBaseGerm"
"SetBeep"
"SetBufferSize"
"SetCanonicalClass"
"SetClassGroupBoundFactorBasis"
"SetClassGroupBoundGenerators"
"SetClassGroupBoundMaps"
"SetClassGroupBounds"
"SetColumns"
"SetConicSubfieldMethodDegreeBound"
"SetDebugOnError"
"SetDefaultRealField"
"SetDefaultRealFieldPrecision"
"SetDefining"
"SetDisplayLevel"
"SetEchoInput"
"SetElementPrintFormat"
"SetEntry"
"SetEvaluationComparison"
"SetForceCFP"
"SetFreezeAll"
"SetGaloisMultiplicities"
"SetGlobalTCParameters"
"SetHeckeBound"
"SetHelpExternalBrowser"
"SetHelpExternalSystem"
"SetHelpUseExternalBrowser"
"SetHelpUseExternalSystem"
"SetHistorySize"
"SetIgnoreEof"
"SetIgnorePrompt"
"SetIgnoreSpaces"
"SetIloadAllowEsc"
"SetIndent"
"SetIntegerSolutionVariables"
"SetKantLevel"
"SetKantPrecision"
"SetKantPrinting"
"SetKaratsubaThreshold"
"SetLMGSchreierBound"
"SetLibraries"
"SetLibraryRoot"
"SetLineEditor"
"SetLogFile"
"SetLowerBound"
"SetMS"
"SetMark"
"SetMaximiseFunction"
"SetMemoryExtensionSize"
"SetMemoryLimit"
"SetMultiplicities"
"SetNeighbouringGerms"
"SetNthreads"
"SetObjectiveFunction"
"SetOptions"
"SetOrderMaximal"
"SetOrderTorsionUnit"
"SetOrderUnitsAreFundamental"
"SetOutputFile"
"SetPath"
"SetPowerPrinting"
"SetPrePatchMaps"
"SetPrecision"
"SetPresentation"
"SetPreviousSize"
"SetPrimalityProof"
"SetPrimaryInvariants"
"SetPrimitiveElement"
"SetPrintKetsInteger"
"SetPrintLevel"
"SetProcessParameters"
"SetProfile"
"SetProjectivePatchMaps"
"SetPrompt"
"SetQuaternionOrder"
"SetQuitOnError"
"SetRationalBasis"
"SetRows"
"SetSeed"
"SetSelfintersections"
"SetShellCompletion"
"SetShowPromptAlways"
"SetSparseGCD"
"SetTargetRing"
"SetToIndexedSet"
"SetToMultiset"
"SetToSequence"
"SetTraceback"
"SetTransGroupIDMany"
"SetTransverseIntersections"
"SetUpperBound"
"SetUserProcessData"
"SetVerbose"
"SetVerboseMS"
"SetViMode"
"SetsOfSingularPlaces"
"Setseq"
"Seysen"
"SeysenGram"
"Shape"
"Sheaf"
"SheafHomomorphism"
"SheafHoms"
"SheafOfDifferentials"
"ShephardTodd"
"ShephardToddNumber"
"ShephardToddOld"
"Shift"
"ShiftLeft"
"ShiftRight"
"ShiftToDegreeZero"
"ShiftValuation"
"ShimuraConjugates"
"ShimuraReduceUnit"
"ShortBasis"
"ShortCosets"
"ShortLift"
"ShortSchreierVectorCoding"
"ShortSubset"
"ShortVectors"
"ShortVectorsMatrix"
"ShortVectorsProcess"
"ShortenCode"
"ShortenStabilizerCode"
"ShortestPath"
"ShortestPaths"
"ShortestVectors"
"ShortestVectorsMatrix"
"ShowDL"
"ShowIdentifiers"
"ShowMemoryUsage"
"ShowOptions"
"ShowPrevious"
"ShowValues"
"ShrikhandeGraph"
"ShrinkingGenerator"
"SiegelTransformation"
"Sieve"
"SieveFactorBaseBound"
"SigTable"
"Sign"
"SignDecomposition"
"Signature"
"Signatures relevant to Any:"
"SiksekBound"
"SilvermanBound"
"SimNEQ"
"SimilarityGroup"
"SimpleCanonicalDissidentPoints"
"SimpleCohomologyDimensions"
"SimpleCohomologyProcess"
"SimpleCoreflectionMatrices"
"SimpleCoroots"
"SimpleEpimorphisms"
"SimpleExtension"
"SimpleGraphDatabase"
"SimpleGroupName"
"SimpleGroupOfLieType"
"SimpleGroupOrder"
"SimpleGroupsWithOrder"
"SimpleGroupsWithOrderDividing"
"SimpleHomologyDimensions"
"SimpleLieAlgebraOld"
"SimpleMatrixLieAlgebraOld"
"SimpleModule"
"SimpleOrders"
"SimpleParameters"
"SimpleQuotientAlgebras"
"SimpleQuotientProcess"
"SimpleQuotients"
"SimpleReflectionMatrices"
"SimpleReflectionPermutations"
"SimpleReflections"
"SimpleRelativeRoots"
"SimpleRoots"
"SimpleStarAlgebra"
"SimpleSubgroups"
"Simplex"
"SimplexAlphaCodeZ4"
"SimplexBetaCodeZ4"
"SimplexCode"
"SimplicialComplex"
"SimplicialProjectivePlane"
"SimplicialSubcone"
"SimplicialSubdivision"
"SimplifiedModel"
"Simplify"
"SimplifyLength"
"SimplifyOrder"
"SimplifyPresentation"
"SimplifyRep"
"SimplyConnectedVersion"
"SimpsonQuadrature"
"SimsSchreier"
"SimsSchreierCoding"
"Sin"
"Sincos"
"SingerDifferenceSet"
"SingleSolutionTest"
"SingletonAsymptoticBound"
"SingletonBound"
"SingularCones"
"SingularFibres"
"SingularPoints"
"SingularPointsOverSplittingField"
"SingularRadical"
"SingularRank"
"SingularRankPerCodimension"
"SingularSubscheme"
"Sinh"
"SixDescent"
"Size"
"SizeDFA"
"Skeleton"
"SkewHadamardDatabase"
"SkewShape"
"SkewWeight"
"Slope"
"SlopeValuation"
"Slopes"
"SmallBasis"
"SmallGraphDatabase"
"SmallGroup"
"SmallGroup2Database"
"SmallGroupDatabase"
"SmallGroupDatabaseLimit"
"SmallGroupDecoding"
"SmallGroupEncoding"
"SmallGroupIsInsoluble"
"SmallGroupIsInsolvable"
"SmallGroupIsSoluble"
"SmallGroupIsSolvable"
"SmallGroupProcess"
"SmallGroupSF"
"SmallGroupSFId"
"SmallGroups"
"SmallModularCurve"
"SmallPeriodMatrix"
"SmallRoots"
"SmallerField"
"SmallerFieldBasis"
"SmallerFieldImage"
"SmithForm"
"Sn"
"Socket"
"SocketInformation"
"Socle"
"SocleAction"
"SocleFactor"
"SocleFactors"
"SocleImage"
"SocleKernel"
"SocleQuotient"
"SocleSeries"
"SolAutCompatible"
"SolAutDerivations"
"SolAutInducible"
"SolAutModule"
"SolubleNormalQuotient"
"SolubleQuotient"
"SolubleQuotientProcess"
"SolubleRadical"
"SolubleResidual"
"SolubleSchreier"
"SolubleSchreierCoding"
"SolubleSubgroups"
"Solution"
"SolutionSpace"
"Solutions"
"SolvableAlgebra"
"SolvableLieAlgebra"
"SolvableQuotient"
"SolvableRadical"
"SolvableResidual"
"SolvableSchreier"
"SolvableSchreierCoding"
"SolvableSubgroups"
"Solve"
"SolveByRadicals"
"SolveEquations"
"SolveForInvariants"
"SolveInProductSpace"
"SolveZeroDimIdeal"
"Sort"
"SortByMP"
"SortDecomposition"
"SortRows"
"Sp"
"SpaceOfDifferentialsFirstKind"
"SpaceOfHolomorphicDifferentials"
"Span"
"SpanZ2CodeZ4"
"SpanningFan"
"SpanningForest"
"SpanningTree"
"SparseHeckeOperator"
"SparseIrreducibleRootDatum"
"SparseMatrix"
"SparseMatrixGAP"
"SparseMatrixStructure"
"SparseRootDatum"
"SparseStandardRootDatum"
"Spec"
"SpecialEvaluate"
"SpecialLieAlgebra"
"SpecialLinearGroup"
"SpecialOrthogonalGroup"
"SpecialOrthogonalGroupMinus"
"SpecialOrthogonalGroupPlus"
"SpecialPresentation"
"SpecialUnitaryGroup"
"SpecialWeights"
"Specialization"
"SpecifyInverseMorphisms"
"SpectralRadius"
"Spectrum"
"Sphere"
"SpherePackingBound"
"SphereVolume"
"SpheresPackingBound"
"Spin"
"SpinAction"
"SpinMinus"
"SpinOrbit"
"SpinPlus"
"SpinWithImages"
"SpinorCharacters"
"SpinorGenera"
"SpinorGenerators"
"SpinorGenus"
"SpinorNorm"
"SpinorRepresentatives"
"Splice"
"SpliceDiagram"
"SpliceDiagramVertex"
"Split"
"SplitAbelianSection"
"SplitAllByValues"
"SplitCell"
"SplitCellsByValues"
"SplitCollector"
"SplitElementaryAbelianSection"
"SplitExtension"
"SplitExtensionSpace"
"SplitMaximalToralSubalgebra"
"SplitRealPlace"
"SplitRootDatum"
"SplitSection"
"SplitToralSubalgebra"
"SplitViaConic"
"SplitViaMinimalField"
"Splitcomponents"
"SplittingCartanSubalgebra"
"SplittingField"
"SplittingsOfCell"
"Sprint"
"Sqrt"
"SqrtDiscriminantPolynomial"
"SquareFree"
"SquareFreeFactorization"
"SquareLatticeGraph"
"SquareRoot"
"Squarefree"
"SquarefreeFactorization"
"SquarefreePart"
"SquarefreePartialFractionDecomposition"
"SquarefreeRoots"
"SrAutomorphism"
"SrivastavaCode"
"Stabiliser"
"StabiliserCode"
"StabiliserGroup"
"StabiliserMatrix"
"StabiliserOfSpaces"
"Stabilizer"
"StabilizerCode"
"StabilizerGroup"
"StabilizerLadder"
"StabilizerMatrix"
"StandardAction"
"StandardActionGroup"
"StandardBasis"
"StandardCusp"
"StandardForm"
"StandardFormConjugationMatrices"
"StandardFormDFA"
"StandardFormField"
"StandardFormInfo"
"StandardGenerators"
"StandardGeneratorsForLargeRee"
"StandardGraph"
"StandardGroup"
"StandardLattice"
"StandardLengthening"
"StandardMaximalTorus"
"StandardMetacyclicPGroup"
"StandardParabolicSubgroup"
"StandardPresentation"
"StandardRepresentation"
"StandardRootDatum"
"StandardRootSystem"
"StandardSimplex"
"StandardTableaux"
"StandardTableauxOfWeight"
"Star"
"StarInvolution"
"StarOnGroupAlgebra"
"StartEnumeration"
"StartNewClass"
"Stauduhar"
"SteenrodOperation"
"SteinWatkinsDatabase"
"SteinitzClass"
"SteinitzForm"
"Step1"
"Step2"
"SternsAttack"
"StirlingFirst"
"StirlingSecond"
"StitchProcesses"
"StoRModule"
"StoreClear"
"StoreFactor"
"StoreGet"
"StoreIsDefined"
"StoreKeys"
"StoreRemove"
"StoreSet"
"Stratum"
"StringToBytes"
"StringToCode"
"StringToInteger"
"StringToIntegerSequence"
"StringToLower"
"StringToRational"
"StringToUpper"
"Strings"
"Strip"
"StripWhiteSpace"
"StrippedCoding"
"StrongApproximation"
"StrongGeneratorLevel"
"StrongGenerators"
"StrongGeneratorsAtLevel"
"StronglyConnectedComponents"
"StronglyHorizontalVertices"
"StronglyIrregularValues"
"StronglyRegularGraphsDatabase"
"StructureConstant"
"StructureConstants"
"StructureSheaf"
"Sub"
"SubOrder"
"SubWeights"
"SubalgebraModule"
"SubalgebrasInclusionGraph"
"SubcanonicalCurve"
"Subcode"
"SubcodeBetweenCode"
"SubcodeWordsOfWeight"
"Subcomplex"
"SubfieldCode"
"SubfieldLattice"
"SubfieldRepresentationCode"
"SubfieldRepresentationParityCode"
"SubfieldSubcode"
"SubfieldSubplane"
"Subfields"
"Subgraph"
"Subgroup"
"SubgroupChain"
"SubgroupClasses"
"SubgroupElements"
"SubgroupElementsCT"
"SubgroupLattice"
"SubgroupLatticeOld"
"SubgroupOfTorus"
"SubgroupScheme"
"SubgroupToMatrix"
"Subgroups"
"SubgroupsData"
"SubgroupsLift"
"SubgroupsMeet"
"Sublattice"
"SublatticeClasses"
"SublatticeLattice"
"Sublattices"
"Submatrix"
"SubmatrixRange"
"Submodule"
"SubmoduleAction"
"SubmoduleClasses"
"SubmoduleImage"
"SubmoduleLattice"
"SubmoduleLatticeAbort"
"Submodules"
"SubnormalSeries"
"Subring"
"Subsequences"
"Subsets"
"Substitute"
"SubstituteCyclicJoins"
"SubstituteString"
"Substring"
"SubsystemSubgroup"
"Subword"
"SuccessiveMinima"
"SuggestedPrecision"
"Sum"
"SumNorm"
"SumOf"
"SumOfBettiNumbersOfSimpleModules"
"SumOfDivisors"
"SumOfImages"
"SumOfMorphismImages"
"Summands"
"SuperGroup"
"SuperScheme"
"SuperSummitCanonicalLength"
"SuperSummitInfimum"
"SuperSummitProcess"
"SuperSummitRepresentative"
"SuperSummitSet"
"SuperSummitSupremum"
"Superlattice"
"SupersingularEllipticCurve"
"SupersingularInvariants"
"SupersingularModule"
"SupersingularPoints"
"SupersingularPolynomial"
"Supplement"
"Supplements"
"Support"
"SupportOverSplittingField"
"SupportingCone"
"SupportingHyperplane"
"SupportsExtension"
"Supremum"
"SurjectivePart"
"Suspension"
"SuzukiBNpair"
"SuzukiConjugacy"
"SuzukiConjugateRecogniser"
"SuzukiConstructiveMembership"
"SuzukiCyclicEigenvalues"
"SuzukiFindOvoidPoints"
"SuzukiGeneralRecogniser"
"SuzukiGroup"
"SuzukiIrreducibleRepresentation"
"SuzukiMaximalSubgroups"
"SuzukiMaximalSubgroupsConjugacy"
"SuzukiNonSplit6Dim"
"SuzukiOddCharacteristicReduction"
"SuzukiPermutationRepresentation"
"SuzukiPointStabiliser"
"SuzukiRecognition"
"SuzukiReduction"
"SuzukiResetRandomProcess"
"SuzukiSmallFieldReduction"
"SuzukiStabiliser"
"SuzukiStandardConstructiveMembership"
"SuzukiStandardGeneratorsNaturalRep"
"SuzukiStandardMaximalSubgroups"
"SuzukiStandardMembership"
"SuzukiStandardRecogniser"
"SuzukiSylow"
"SuzukiSylowConjugacy"
"SuzukiTensorDecompose"
"SwapColumns"
"SwapElements"
"SwapExtension"
"SwapRows"
"SwinnertonDyerPolynomial"
"Switch"
"SwitchNullMatrix"
"Sylow"
"SylowBasis"
"SylowSubgroup"
"SylowSystem"
"SylvesterMatrix"
"Sym"
"SymmetricBilinearForm"
"SymmetricBilinearFormCS"
"SymmetricBilinearFormMinus"
"SymmetricBilinearFormPlus"
"SymmetricBilinearFormType"
"SymmetricCentralizer"
"SymmetricCharacter"
"SymmetricCharacterDegrees"
"SymmetricCharacterTable"
"SymmetricCharacterValue"
"SymmetricCharacterValues"
"SymmetricComponents"
"SymmetricElementToStandardWord"
"SymmetricElementToWord"
"SymmetricForms"
"SymmetricFunctionAlgebra"
"SymmetricFunctionAlgebraElementary"
"SymmetricFunctionAlgebraHomogeneous"
"SymmetricFunctionAlgebraMonomial"
"SymmetricFunctionAlgebraPower"
"SymmetricFunctionAlgebraSchur"
"SymmetricGroup"
"SymmetricHermitianForms"
"SymmetricMatrix"
"SymmetricNormaliser"
"SymmetricNormalizer"
"SymmetricPower"
"SymmetricPower2"
"SymmetricPowerK"
"SymmetricQuaternionicForms"
"SymmetricRepresentation"
"SymmetricRepresentationOrthogonal"
"SymmetricRepresentationSeminormal"
"SymmetricSquare"
"SymmetricSquarePreimage"
"SymmetricWeightEnumerator"
"Symmetrization"
"SymplecticComponent"
"SymplecticComponents"
"SymplecticDirectSum"
"SymplecticDual"
"SymplecticForm"
"SymplecticFormCS"
"SymplecticGroup"
"SymplecticInnerProduct"
"SymplecticMatrixGroupDatabase"
"SymplecticSpace"
"SymplecticTensorProduct"
"SymplecticTransvection"
"Syndrome"
"SyndromeSpace"
"SysAssignNamesNum"
"System"
"SystemNormaliser"
"SystemNormalizer"
"SystemOfEigenvalues"
"SyzygyMatrix"
"SyzygyModule"
"Sz"
"SzBlackBoxGenerators"
"SzBlackBoxMembership"
"SzClassMap"
"SzClassRepresentative"
"SzConjugacyClasses"
"SzElementToWord"
"SzIsConjugate"
"SzPresentation"
"SzRationalConjugacyClasses"
"SzRedundantSLPGenerators"
"SzSLPCoercion"
"TMPolyCharOdd"
"TMPolyCharOddCheck"
"Tableau"
"TableauIntegerMonoid"
"TableauMonoid"
"Tableaux"
"TableauxOfShape"
"TableauxOnShapeWithContent"
"TableauxWithContent"
"TaftDecomposition"
"TailVector"
"Tails"
"TamagawaNumber"
"TamagawaNumbers"
"TameOrder"
"Tan"
"Tangent"
"TangentAngle"
"TangentCone"
"TangentLine"
"TangentSheaf"
"TangentSpace"
"TangentVariety"
"Tanh"
"TannerGraph"
"TargetRestriction"
"TargetRing"
"TateLichtenbaumPairing"
"TatePairing"
"TeichmuellerLift"
"TeichmuellerSystem"
"Tell"
"Tempname"
"Tensor"
"TensorBasis"
"TensorCond"
"TensorCondensation"
"TensorFactors"
"TensorInducedAction"
"TensorInducedBasis"
"TensorInducedPermutations"
"TensorPower"
"TensorProduct"
"TensorProductAction"
"TensorWreathProduct"
"Term"
"TerminalIndex"
"TerminalPolarisation"
"TerminalVertex"
"Terminalisation"
"Terms"
"TestEquations"
"TestHeckeRep"
"TestHomomorphism"
"TestLists"
"TestPicnDesc"
"TestReeConjugacy"
"TestWG"
"Theta"
"ThetaOperator"
"ThetaSeries"
"ThetaSeriesIntegral"
"ThetaSeriesIntegralLimited"
"ThetaSeriesLimited"
"ThetaSeriesModularForm"
"ThetaSeriesModularFormSpace"
"ThreeDescent"
"ThreeDescentByIsogeny"
"ThreeDescentCubic"
"ThreeIsogenyDescent"
"ThreeIsogenyDescentCubic"
"ThreeIsogenySelmerGroups"
"ThreeSelmerElement"
"ThreeSelmerGroup"
"ThreeTorsionMatrices"
"ThreeTorsionOrbits"
"ThreeTorsionPoints"
"ThreeTorsionType"
"Thue"
"TietzeProcess"
"TitsGroup"
"TjurinaNumber"
"To2DUpperHalfSpaceFundamentalDomian"
"ToAnalyticJacobian"
"ToBianchiCone"
"ToLiE"
"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI"
"ToddCoxeterSchreierCoding"
"Top"
"TopQuotients"
"Tor"
"ToralRootDatum"
"ToralRootSystem"
"ToricAffinePatch"
"ToricCode"
"ToricFunctionField"
"ToricIdentityMap"
"ToricIsAffine"
"ToricIsProjective"
"ToricLattice"
"ToricLiftRationalFunction"
"ToricRestrictRationalFunction"
"ToricVariety"
"ToricVarietyMap"
"ToroidalAutomorphism"
"TorsionBasis"
"TorsionBound"
"TorsionCoefficients"
"TorsionFreeRank"
"TorsionFreeSubgroup"
"TorsionInvariants"
"TorsionLowerBound"
"TorsionMultiple"
"TorsionSubgroup"
"TorsionSubgroupScheme"
"TorsionSubmodule"
"TorsionUnitGroup"
"Torus"
"TorusTerm"
"TotalDegree"
"TotalDegreeAbstract"
"TotalLinking"
"TotalNumberOfCosets"
"TotallyRamifiedExtension"
"TotallySingularComplement"
"TppMatrix"
"Trace"
"TraceAbs"
"TraceInnerProduct"
"TraceMatrix"
"TraceOfFrobenius"
"TraceOfProduct"
"TraceSortDecomposition"
"TraceZeroSubspace"
"Traceback"
"TracesOfFrobenius"
"TrailingCoefficient"
"TrailingTerm"
"Trans2"
"Trans32Identify"
"TransformBilinearForm"
"TransformForm"
"TransformRelations"
"Transformation"
"TransformationMatrix"
"TransitiveDirectProduct"
"TransitiveGroup"
"TransitiveGroupDatabase"
"TransitiveGroupDatabaseLimit"
"TransitiveGroupDescription"
"TransitiveGroupFundamentalInvariants"
"TransitiveGroupIdentification"
"TransitiveGroupProcess"
"TransitiveGroups"
"TransitiveQuotient"
"Transitivity"
"Translate"
"Translation"
"TranslationMap"
"TranslationOfSimplex"
"TranslationToInfinity"
"Transport"
"Transpose"
"TransposePartition"
"Transvection"
"TransvectionFactors"
"Transversal"
"TransversalElt"
"TransversalNonParabolic"
"TransversalParabolic"
"TransversalProcess"
"TransversalProcessNext"
"TransversalProcessRemaining"
"TransversalWords"
"TransverseIndex"
"TransverseIntersections"
"TransverseType"
"TrapezoidalQuadrature"
"TrialDivision"
"TriangularDecomposition"
"TriangularGraph"
"Triangulation"
"TriangulationOfBoundary"
"Trim"
"Trinomials"
"TrivialLieRepresentationDecomposition"
"TrivialModule"
"TrivialOneCocycle"
"TrivialRepresentation"
"TrivialRootDatum"
"TrivialRootSystem"
"TrivialSubgroup"
"Trivialize"
"TrivializeNew"
"Truncate"
"TruncateCoefficients"
"TruncatedHyperball"
"Truncation"
"Tuple"
"TupleToList"
"Tuplist"
"TwelveDescent"
"Twist"
"TwistedBasis"
"TwistedCartanName"
"TwistedGroup"
"TwistedGroupOfLieType"
"TwistedLieAlgebra"
"TwistedPolynomials"
"TwistedQRCode"
"TwistedRootDatum"
"TwistedTori"
"TwistedToriOrders"
"TwistedTorus"
"TwistedTorusOrder"
"TwistedWindingElement"
"TwistedWindingSubmodule"
"TwistingDegree"
"Twists"
"TwoCocycle"
"TwoCover"
"TwoCoverDescent"
"TwoCoverPullback"
"TwoDescendantsOverTwoIsogenyDescendant"
"TwoDescent"
"TwoElement"
"TwoElementNormal"
"TwoGenerators"
"TwoGenus"
"TwoIsogeny"
"TwoIsogenyDescent"
"TwoIsogenySelmerGroups"
"TwoSelmerElement"
"TwoSelmerGroup"
"TwoSelmerGroupData"
"TwoSelmerGroupNew"
"TwoSelmerGroupOld"
"TwoSelmerGroupTest"
"TwoSequencePolynomial"
"TwoSidedIdealClassGroup"
"TwoSidedIdealClasses"
"TwoTorsionMatrices"
"TwoTorsionOrbits"
"TwoTorsionPolynomial"
"TwoTorsionSubgroup"
"TwoTransitiveGroupIdentification"
"Type"
"TypeOfContraction"
"TypeOfSequence"
"Types"
"TypesOfContractions"
"UltraSummitProcess"
"UltraSummitRepresentative"
"UltraSummitSet"
"UncapacitatedGraph"
"Uncondense"
"Undefine"
"UnderlyingDigraph"
"UnderlyingElement"
"UnderlyingField"
"UnderlyingGraph"
"UnderlyingMultiDigraph"
"UnderlyingMultiGraph"
"UnderlyingNetwork"
"UnderlyingRing"
"UnderlyingSet"
"UnderlyingToriMap"
"UnderlyingVertex"
"Ungetc"
"UniformizingElement"
"UniformizingParameter"
"UnimodularExtension"
"Union"
"UnionOfLines"
"UnipotentBasis"
"UnipotentMatrixGroup"
"UnipotentStabiliser"
"UnitDisc"
"UnitEquation"
"UnitGenerators"
"UnitGroup"
"UnitGroupAsSubgroup"
"UnitGroupGenerators"
"UnitRank"
"UnitTrivialSubgroup"
"UnitVector"
"UnitalFeet"
"UnitaryDirectSum"
"UnitaryForm"
"UnitaryFormCS"
"UnitaryReflection"
"UnitarySpace"
"UnitaryTensorProduct"
"UnitaryTransvection"
"Units"
"Unity"
"UnivariateEliminationIdealGenerator"
"UnivariateEliminationIdealGenerators"
"UnivariatePolynomial"
"UniversalEnvelopingAlgebra"
"UniversalMap"
"UniversalPropertyOfCokernel"
"Universe"
"UniverseCode"
"UnlabelledCayleyGraph"
"UnlabelledGraph"
"UnlabelledSchreierGraph"
"Unnormalise"
"Unnormalize"
"UnprojectionCentres"
"UnprojectionCodimensions"
"UnprojectionIndices"
"UnprojectionSubtypes"
"UnprojectionTypes"
"Unprojections"
"UnramifiedExtension"
"UnramifiedQuotientRing"
"UnramifiedSquareSymbol"
"UnsetBounds"
"UnsetGlobalTCParameters"
"UnsetLogFile"
"UnsetOutputFile"
"UntwistedOvergroup"
"UntwistedRootDatum"
"UnweightedGraph"
"UpdateGraphLabels"
"UpdateHadamardDatabase"
"UpdateLevels"
"UpperCentralSeries"
"UpperHalfPlane"
"UpperHalfPlaneUnionCusps"
"UpperHalfPlaneWithCusps"
"UpperTriangularMatrix"
"UseFFT"
"UseFlag"
"UseIFFT"
"UseImult"
"UseSmod"
"UseTwistedHopfStructure"
"UserBasePoints"
"UserGenerators"
"UserMapCreateRaw"
"UserMapImageMapRootDtm"
"UserMapPreimageMapRootDtm"
"UserProcess"
"UserRepresentation"
"UsesBrandt"
"UsesMestre"
"VNullspace"
"Valence"
"Valency"
"ValidateCryptographicCurve"
"Valuation"
"ValuationRing"
"ValuationsOfRoots"
"ValueList"
"ValueMap"
"ValueRing"
"ValuesOnUnitGenerators"
"VanLintBound"
"VariableExtension"
"VariableWeights"
"Variant"
"Variety"
"VarietySequence"
"VarietySizeOverAlgebraicClosure"
"Vector"
"VectorSpace"
"VectorSpaceOverQ"
"VectorSpaceWithBasis"
"Verify"
"VerifyMinimumDistanceLowerBound"
"VerifyMinimumDistanceUpperBound"
"VerifyMinimumLeeDistanceLowerBound"
"VerifyMinimumLeeDistanceUpperBound"
"VerifyMinimumLeeWeightLowerBound"
"VerifyMinimumLeeWeightUpperBound"
"VerifyMinimumWeightLowerBound"
"VerifyMinimumWeightUpperBound"
"VerifyRelation"
"VerschiebungImage"
"VerschiebungMap"
"Vertex"
"VertexConnectivity"
"VertexFacetHeightMatrix"
"VertexFacetIncidenceMatrix"
"VertexLabel"
"VertexLabels"
"VertexPath"
"VertexSeparator"
"VertexSet"
"VerticalJoin"
"Vertices"
"ViewWithJavaview"
"ViewWithJmol"
"VirtualDecomposition"
"VirtualRayIndices"
"VirtualRays"
"Volume"
"VolumeOfBoundary"
"Voronoi"
"VoronoiCell"
"VoronoiData"
"VoronoiGraph"
"WG2GroupRep"
"WG2HeckeRep"
"WGelement2WGtable"
"WGidealgens2WGtable"
"WGtable2WG"
"WPS"
"WZWFusion"
"WaitForConnection"
"WaitForIO"
"WallDecomposition"
"WallForm"
"WallIsometry"
"WeakApproximation"
"WeakDegree"
"WeakOrder"
"WeakPopovForm"
"WeakValuation"
"WeberClassPolynomial"
"WeberF"
"WeberF1"
"WeberF2"
"WeberPolynomial"
"WeberToHilbertClassPolynomial"
"WedderburnDecomposition"
"WeierstrassModel"
"WeierstrassPlaces"
"WeierstrassPoints"
"WeierstrassSeries"
"Weight"
"WeightClass"
"WeightDistribution"
"WeightEnumerator"
"WeightLattice"
"WeightOneHalfData"
"WeightOrbit"
"WeightSequence"
"WeightSpace"
"WeightSpaces"
"WeightToPartition"
"WeightVectors"
"WeightedDegree"
"WeightedDynkinDiagram"
"WeightedProjectiveSpace"
"Weights"
"WeightsAndMultiplicities"
"WeightsAndVectors"
"WeightsOfFlip"
"Weil"
"WeilDescent"
"WeilDescentComposita"
"WeilDescentCompositaMap"
"WeilDescentDegree"
"WeilDescentDeltas"
"WeilDescentFrobeniusExtension"
"WeilDescentFrobeniusExtensions"
"WeilDescentGenus"
"WeilDescentPrimitiveReducedCompositum"
"WeilDescentRationalParametrization"
"WeilDescentReducedCompositum"
"WeilDescentReducedDelta_1"
"WeilHeight"
"WeilPairing"
"WeilPolynomialOverFieldExtension"
"WeilPolynomialToRankBound"
"WeilRepresentation"
"WeilRestriction"
"WeilToClassGroupsMap"
"WeilToClassLatticesMap"
"WeylGroup"
"WeylMatrix"
"WeylWord"
"WeylWordFromAction"
"WhiteheadReduction"
"Width"
"Widths"
"WindingElement"
"WindingElementProjection"
"WindingLattice"
"WindingSubmodule"
"WittDecomposition"
"WittDesign"
"WittIndex"
"WittInvariant"
"WittInvariants"
"WittLieAlgebra"
"WittRing"
"Word"
"WordAcceptor"
"WordAcceptorSize"
"WordAcceptorTable"
"WordCount"
"WordDifferenceAutomaton"
"WordDifferenceSize"
"WordDifferenceTable"
"WordDifferences"
"WordGroup"
"WordInStrongGenerators"
"WordMap"
"WordOnCorootSpace"
"WordOnRoot"
"WordOnRootSpace"
"WordProblem"
"WordProblemData"
"WordStrip"
"WordToDualMatrix"
"WordToMatrix"
"WordToPerm"
"WordToSequence"
"WordToTableau"
"WordWrap"
"Words"
"WordsGramMatrix"
"WordsMatrix"
"WordsOfBoundedLeeWeight"
"WordsOfBoundedWeight"
"WordsOfLeeWeight"
"WordsTransposedMatrix"
"WreathProduct"
"Write"
"WriteBinary"
"WriteBytes"
"WriteFanoData"
"WriteGModuleOver"
"WriteGModuleOverExtensionOf"
"WriteHadamardDatabase"
"WriteIntegralMatrix"
"WriteK3Data"
"WriteNewtonPolytopeToPSFile"
"WriteOver"
"WriteOverElement"
"WriteOverLargerField"
"WriteOverMatrix"
"WriteOverSmallerField"
"WritePolytopeToJVX"
"WritePolytopeToJmolFile"
"WritePolytopeToPALP"
"WritePolytopeToPSFile"
"WritePolytopeToSvgFile"
"WritePolytopesToJVX"
"WriteRawHadamardData"
"WriteRepresentationOver"
"WriteWG"
"WronskianDeterminant"
"WronskianMatrix"
"WronskianOrders"
"X0NQuotient"
"XGCD"
"XXX_VarietySequence"
"Xgcd"
"Xor"
"YYY_SupersingularInvariants"
"YoungSubgroup"
"YoungSubgroupLadder"
"Z4CodeFromBinaryChain"
"Z4Dimension"
"Z4Type"
"ZBasis"
"ZClasses"
"ZGenerators"
"ZSpace"
"ZariskiDecomposition"
"ZechLog"
"Zero"
"ZeroChainMap"
"ZeroCocycle"
"ZeroCode"
"ZeroComplex"
"ZeroCone"
"ZeroCoordinates"
"ZeroDivisor"
"ZeroExtension"
"ZeroFan"
"ZeroGammaOrbitsOnRoots"
"ZeroMap"
"ZeroMatrix"
"ZeroModularAbelianVariety"
"ZeroModule"
"ZeroRootLattice"
"ZeroRootSpace"
"ZeroSequence"
"ZeroSubgroup"
"ZeroSubspace"
"ZeroSubvariety"
"ZeroSumCode"
"Zeroes"
"Zeros"
"ZetaFunction"
"ZetaFunctionsByDeformation"
"ZimmertBound"
"ZinovievCode"
"aInvariants"
"all_ram_extensions_of_deg_p_m_j"
"bInvariants"
"c9LatticeRecord"
"cInvariants"
"calculateAlbertAlgebra"
"calculateBigReeTwistingMapCBMs"
"fPolynomial"
"fValue"
"fValueProof"
"fVector"
"hPolynomial"
"hVector"
"has_element_of_norm_sub"
"isValidSuzukiOrder"
"jFunction"
"jInvariant"
"jInvariantMap"
"jNInvariant"
"jParameter"
"jPoints"
"kArc"
"mainInvolution"
"mfdevel"
"myFindLieAlgebra"
"nCovering"
"nIsogeny"
"nTorsionSubgroup"
"pAdicDiagonalization"
"pAdicEllipticLogarithm"
"pAdicEllipticLogarithmOfCombination"
"pAdicEmbeddings"
"pAdicField"
"pAdicHeight"
"pAdicHeightPairingMatrix"
"pAdicLSeries"
"pAdicQuotientRing"
"pAdicRegulator"
"pAdicRing"
"pCentralSeries"
"pClass"
"pClosure"
"pCore"
"pCover"
"pCoveringGroup"
"pElementaryAbelianNormalSubgroup"
"pExcess"
"pFundamentalUnits"
"pIntegralGModule"
"pIntegralModel"
"pIsogenyDescent"
"pMap"
"pMatrixRing"
"pMaximalOrder"
"pMaximalSubmodules"
"pMinimalWeierstrassModel"
"pMinimise"
"pMinus1"
"pMultiplicator"
"pMultiplicatorRank"
"pNewModularDegree"
"pNormalModel"
"pPlus1"
"pPowerTorsion"
"pPrimaryComponent"
"pPrimaryInvariants"
"pQuotient"
"pQuotientProcess"
"pRadical"
"pRank"
"pRanks"
"pSelmerGroup"
"pSignature"
"pSubalgebra"
"pSubgroup"
"p_hom"
"qCoverDescent"
"qCoverPartialDescent"
"qEigenform"
"qEigenformReductions"
"qExpansion"
"qExpansionBasis"
"qExpansionExpressions"
"qExpansionsOfGenerators"
"qIntegralBasis"
]
commentStart: "// "
|
[
{
"context": "a: '{\"username\":\"' + username + '\",\"password\":\"' + password + '\", \"remember\":' + remember + '}',\n type",
"end": 1262,
"score": 0.87144535779953,
"start": 1254,
"tag": "PASSWORD",
"value": "password"
}
] | scripts/layout.coffee | KSP-SpaceDock/SpaceDock-Frontend | 2 | $(document).ready () ->
window.userContext (done) ->
if window.user != null
$('#user-login').show()
$('#user-noLogin').hide()
$('#usernameDisplay').html('Welcome, ' + window.user.username + ' <b class="caret"></b>')
$('#view-user-profile').attr('href', '/profile/' + window.user.username)
$('#edit-user-profile').attr('href', '/profile/' + window.user.username + '/edit')
else
$('#user-login').hide()
$('#user-noLogin').show()
$('#return_to').val(window.location.href)
$('#loginSubmit').click (e) ->
$.loadingBlockShow({
imgPath: '/static/default.svg',
text: '',
style: {
position: 'fixed',
width: '100%',
height: '100%',
background: 'rgba(0, 0, 0, .8)',
left: 0,
top: 0,
zIndex: 10000
}
});
window.loginUser($('input#username').val(), $('input#password').val(), $('input#remember-me').is(":checked"), window.location.href)
window.loginUser = (username, password, remember, returnto) ->
$.ajax(backend + "/api/login",{
data: '{"username":"' + username + '","password":"' + password + '", "remember":' + remember + '}',
type:"POST",
xhrFields: { withCredentials:true },
dataType:"json",
contentType: "application/json",
success: (data) ->
$.loadingBlockHide()
if data.error
if 3055 in data.codes
window.location.href = '/account-pending'
text = 'Something went wrong with your login request:<br><br>'
$.each(data.reasons, (index,element) ->
text = text + element + '<br>'
)
$.Zebra_Dialog(text, {
'type': 'error',
'title': 'Login failed!'
})
else
window.location.href = returnto
,error: (xhr,a,b) ->
$.loadingBlockHide()
data = $.parseJSON(xhr.responseText);
if 3055 in data.codes
window.location.href = '/account-pending'
text = 'Something went wrong with your login request:<br><br>'
$.each(data.reasons, (index,element) ->
text = text + element + '<br>'
)
$.Zebra_Dialog(text, {
'type': 'error',
'title': 'Login failed!'
})
}) | 175962 | $(document).ready () ->
window.userContext (done) ->
if window.user != null
$('#user-login').show()
$('#user-noLogin').hide()
$('#usernameDisplay').html('Welcome, ' + window.user.username + ' <b class="caret"></b>')
$('#view-user-profile').attr('href', '/profile/' + window.user.username)
$('#edit-user-profile').attr('href', '/profile/' + window.user.username + '/edit')
else
$('#user-login').hide()
$('#user-noLogin').show()
$('#return_to').val(window.location.href)
$('#loginSubmit').click (e) ->
$.loadingBlockShow({
imgPath: '/static/default.svg',
text: '',
style: {
position: 'fixed',
width: '100%',
height: '100%',
background: 'rgba(0, 0, 0, .8)',
left: 0,
top: 0,
zIndex: 10000
}
});
window.loginUser($('input#username').val(), $('input#password').val(), $('input#remember-me').is(":checked"), window.location.href)
window.loginUser = (username, password, remember, returnto) ->
$.ajax(backend + "/api/login",{
data: '{"username":"' + username + '","password":"' + <PASSWORD> + '", "remember":' + remember + '}',
type:"POST",
xhrFields: { withCredentials:true },
dataType:"json",
contentType: "application/json",
success: (data) ->
$.loadingBlockHide()
if data.error
if 3055 in data.codes
window.location.href = '/account-pending'
text = 'Something went wrong with your login request:<br><br>'
$.each(data.reasons, (index,element) ->
text = text + element + '<br>'
)
$.Zebra_Dialog(text, {
'type': 'error',
'title': 'Login failed!'
})
else
window.location.href = returnto
,error: (xhr,a,b) ->
$.loadingBlockHide()
data = $.parseJSON(xhr.responseText);
if 3055 in data.codes
window.location.href = '/account-pending'
text = 'Something went wrong with your login request:<br><br>'
$.each(data.reasons, (index,element) ->
text = text + element + '<br>'
)
$.Zebra_Dialog(text, {
'type': 'error',
'title': 'Login failed!'
})
}) | true | $(document).ready () ->
window.userContext (done) ->
if window.user != null
$('#user-login').show()
$('#user-noLogin').hide()
$('#usernameDisplay').html('Welcome, ' + window.user.username + ' <b class="caret"></b>')
$('#view-user-profile').attr('href', '/profile/' + window.user.username)
$('#edit-user-profile').attr('href', '/profile/' + window.user.username + '/edit')
else
$('#user-login').hide()
$('#user-noLogin').show()
$('#return_to').val(window.location.href)
$('#loginSubmit').click (e) ->
$.loadingBlockShow({
imgPath: '/static/default.svg',
text: '',
style: {
position: 'fixed',
width: '100%',
height: '100%',
background: 'rgba(0, 0, 0, .8)',
left: 0,
top: 0,
zIndex: 10000
}
});
window.loginUser($('input#username').val(), $('input#password').val(), $('input#remember-me').is(":checked"), window.location.href)
window.loginUser = (username, password, remember, returnto) ->
$.ajax(backend + "/api/login",{
data: '{"username":"' + username + '","password":"' + PI:PASSWORD:<PASSWORD>END_PI + '", "remember":' + remember + '}',
type:"POST",
xhrFields: { withCredentials:true },
dataType:"json",
contentType: "application/json",
success: (data) ->
$.loadingBlockHide()
if data.error
if 3055 in data.codes
window.location.href = '/account-pending'
text = 'Something went wrong with your login request:<br><br>'
$.each(data.reasons, (index,element) ->
text = text + element + '<br>'
)
$.Zebra_Dialog(text, {
'type': 'error',
'title': 'Login failed!'
})
else
window.location.href = returnto
,error: (xhr,a,b) ->
$.loadingBlockHide()
data = $.parseJSON(xhr.responseText);
if 3055 in data.codes
window.location.href = '/account-pending'
text = 'Something went wrong with your login request:<br><br>'
$.each(data.reasons, (index,element) ->
text = text + element + '<br>'
)
$.Zebra_Dialog(text, {
'type': 'error',
'title': 'Login failed!'
})
}) |
[
{
"context": "sterr = new Error 'test'\n testcontext = name: 'foo'\n\n add2 = (n) ->\n equal this, testcontext",
"end": 3031,
"score": 0.6259588003158569,
"start": 3028,
"tag": "NAME",
"value": "foo"
}
] | node_modules/async-q/test.coffee | HossamBadawy/miniProject | 0 | async = require './'
Q = require 'q'
require('mocha')
chai = require 'chai'
chai.use require 'chai-as-promised'
{ assert: { strictEqual: equal, deepEqual, isRejected, fail, becomes, ok }
} = chai
eachIterator = (args, x) -> Q.delay(x*25).then -> args.push x
mapIterator = (call_order, x) ->
Q.delay(x*25).then ->
call_order.push x
x*2
filterIterator = (x) -> Q.delay(x*25).thenResolve x % 2
detectIterator = (call_order, x) ->
Q.delay(x*25).then ->
call_order.push x
x is 2
eachNoCallbackIterator = (x) ->
equal x, 1
Q()
getFunctionsObject = (call_order) ->
one: -> Q.delay(125).then ->
call_order.push 1
1
two: -> Q.delay(200).then ->
call_order.push 2
2
three: -> Q.delay(50).then ->
call_order.push 3
[3, 3]
getFunctionsArray = (call_order) ->
[
-> Q.delay(50).then ->
call_order.push 1
1
-> Q.delay(100).then ->
call_order.push 2
2
-> Q.delay(25).then ->
call_order.push 3
[3, 3]
]
describe 'forever()', ->
it "runs until it doesn't", ->
counter = 0
isRejected async.forever(->
throw 'too big!' if ++counter is 50
Q(counter)
), /^too big!$/
it 'accepts a promise for a function', ->
counter = 0
isRejected async.forever(Q(->
throw 'too big!' if ++counter is 50
Q(counter)
)), /^too big!$/
applyOneTwoThree = (call_order) ->
[
(val) ->
equal val, 5
Q.delay(100).then ->
call_order.push 'one'
1
(val) ->
equal val, 5
Q.delay(50).then ->
call_order.push 'two'
2
(val) ->
equal val, 5
Q.delay(150).then ->
call_order.push 'three'
3
]
describe 'applyEach()', ->
it 'immediately applies', ->
async.applyEach(applyOneTwoThree(call_order = []), 5).then ->
deepEqual call_order, ['two', 'one', 'three']
it 'partially applies', ->
async.applyEach(applyOneTwoThree(call_order = []))(5).then ->
deepEqual call_order, ['two', 'one', 'three']
describe 'applyEachSeries()', ->
it 'runs serially', ->
async.applyEachSeries(applyOneTwoThree(call_order = []), 5).then ->
deepEqual call_order, ['one', 'two', 'three']
describe 'compose()', ->
it 'composes functions', ->
add2 = (n) ->
equal n, 3
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal n, 5
Q.delay(15).thenResolve n*3
add1 = (n) ->
equal n, 15
Q.delay(100).thenResolve n+1
add2mul3add1 = async.compose add1, mul3, add2
becomes add2mul3add1(3), 16
it 'handles errors', ->
testerr = new Error 'test'
add2 = (n) ->
equal n, 3
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal n, 5
Q.delay(15).thenReject testerr
add1 = (n) ->
fail 'add1 should not get called'
Q.delay(100).thenResolve n+1
add2mul3add1 = async.compose add1, mul3, add2
isRejected add2mul3add1(3), testerr
it 'binds properly', ->
testerr = new Error 'test'
testcontext = name: 'foo'
add2 = (n) ->
equal this, testcontext
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal this, testcontext
Q.delay(15).thenResolve n*3
add2mul3 = async.compose mul3, add2
becomes add2mul3.call(testcontext, 3), 15
describe 'auto()', ->
it 'runs', ->
callOrder = []
async.auto(
task1: ['task2', -> Q.delay(300).then -> callOrder.push 'task1']
task2: -> Q.delay(50).then -> callOrder.push 'task2'
task3: ['task2', -> callOrder.push 'task3']
task4: ['task1', 'task2', -> callOrder.push 'task4']
task5: ['task2', -> Q.delay(200).then -> callOrder.push 'task5']
task6: ['task2', -> Q.delay(100).then -> callOrder.push 'task6']
).then ->
deepEqual callOrder,
['task2', 'task3', 'task6', 'task5', 'task1', 'task4']
it 'petrifies', ->
callOrder = []
async.auto(
task1: ['task2', -> Q.delay(100).then -> callOrder.push 'task1']
task2: -> Q.delay(200).then -> callOrder.push 'task2'
task3: ['task2', -> callOrder.push 'task3']
task4: ['task1', 'task2', -> callOrder.push 'task4']
).then ->
deepEqual callOrder, ['task2', 'task3', 'task1', 'task4']
it 'has results', ->
callOrder = []
async.auto(
task1: [
'task2'
(results) ->
equal results.task2, 'task2'
Q.delay(25).then ->
callOrder.push 'task1'
['task1a', 'task1b']
]
task2: -> Q.delay(50).then ->
callOrder.push 'task2'
'task2'
task3: [
'task2'
(results) ->
equal results.task2, 'task2'
callOrder.push 'task3'
undefined
]
task4: [
'task1'
'task2'
(results) ->
deepEqual results.task1, ['task1a', 'task1b']
equal results.task2, 'task2'
callOrder.push 'task4'
'task4'
]
).then (results) ->
deepEqual callOrder, ['task2', 'task3', 'task1', 'task4']
deepEqual results,
task1: ['task1a', 'task1b']
task2: 'task2'
task3: undefined
task4: 'task4'
it 'runs with an empty object', -> async.auto {}
it 'errors out properly', ->
isRejected async.auto(
task1: -> throw 'testerror'
task2: ['task1', -> fail 'task2 should not be called']
task3: -> throw 'testerror2'
), /^testerror$/
describe 'waterfall()', ->
it 'runs in the proper order', ->
call_order = []
async.waterfall([
->
call_order.push 'fn1'
Q.delay(0).thenResolve ['one', 'two']
([arg1, arg2]) ->
call_order.push 'fn2'
equal arg1, 'one'
equal arg2, 'two'
Q.delay(25).thenResolve [arg1, arg2, 'three']
([arg1, arg2, arg3]) ->
call_order.push 'fn3'
deepEqual [arg1, arg2, arg3], ['one', 'two', 'three']
'four'
(arg4) ->
call_order.push 'fn4'
'test'
]).then (result) ->
equal result, 'test'
deepEqual call_order, ['fn1', 'fn2', 'fn3', 'fn4']
it 'handles an empty array', -> async.waterfall []
it 'handles errors', ->
isRejected async.waterfall([
-> throw 'errzor'
-> fail 'next function should not be called'
]), /^errzor$/
it 'accepts a promise for an array of tasks', ->
becomes async.waterfall(Q([
-> 10
(n) -> n + 30
(n) -> n + 2
])), 42
describe 'parallel()', ->
it 'returns proper results', ->
call_order = []
async.parallel(getFunctionsArray call_order).then (results) ->
deepEqual call_order, [3, 1, 2]
deepEqual results, [1, 2, [3, 3]]
it 'handles an empty array', ->
becomes async.parallel([]), []
it 'handles errors', ->
isRejected(
async.parallel([ (-> throw 'error1'), -> throw 'error2' ])
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.parallel(getFunctionsObject call_order).then (results) ->
deepEqual call_order, [3, 1, 2]
deepEqual results, one: 1, two: 2, three: [3, 3]
it 'accepts a promise', ->
becomes async.parallel(Q(getFunctionsArray [])), [1, 2, [3, 3]]
describe 'parallelLimit()', ->
it 'returns proper results', ->
call_order = []
async.parallelLimit(getFunctionsArray(call_order), 2).then (results) ->
deepEqual call_order, [1, 3, 2]
deepEqual results, [1, 2, [3, 3]]
it 'handles an empty array', ->
becomes async.parallelLimit([], 2), []
it 'handles errors', ->
isRejected(
async.parallelLimit([(-> throw 'error1'), -> throw 'error2'], 1)
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.parallelLimit(getFunctionsObject(call_order), 2).then (results) ->
deepEqual call_order, [1, 3, 2]
deepEqual results, one: 1, two: 2, three: [3, 3]
it 'accepts a promise', ->
becomes async.parallelLimit(getFunctionsArray([]), 2), [1, 2, [3, 3]]
describe 'series()', ->
it 'returns proper results', ->
call_order = []
async.series(getFunctionsArray call_order).then (results) ->
deepEqual results, [1, 2, [3, 3]]
deepEqual call_order, [1, 2, 3]
it 'handles an empty array', ->
becomes async.series([]), []
it 'handles errors', ->
isRejected(
async.series([
-> throw 'error1'
->
fail 'should not be called'
'error2'
])
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.series(getFunctionsObject call_order).then (results) ->
deepEqual results, one: 1, two: 2, three: [3,3]
deepEqual call_order, [1,2,3]
it 'accepts a promise', ->
becomes async.series(getFunctionsArray []), [1, 2, [3, 3]]
describe 'each()', ->
it 'runs in parallel', ->
args = []
async.each([1, 3, 2], eachIterator.bind(this, args)).then ->
deepEqual args, [1, 2, 3]
it 'accepts an empty array', ->
async.each([], -> fail 'iterator should not be called')
it 'handles errors', ->
isRejected async.each([1, 2, 3], -> throw 'error1'), /^error1$/
it 'is aliased to forEach', -> equal async.forEach, async.each
it 'accepts promises', ->
args = []
async.each(Q([1, 3, 2]), Q(eachIterator.bind(this, args))).then ->
deepEqual args, [1, 2, 3]
describe 'eachSeries()', ->
it 'returns proper results', ->
args = []
async.eachSeries([1, 3, 2], eachIterator.bind(this, args)).then ->
deepEqual args, [1, 3, 2]
it 'accepts an empty array', ->
async.eachSeries([], -> fail 'iterator should not be called')
it 'handles errors', ->
call_order = []
async.eachSeries([1, 2, 3], (x) ->
call_order.push x
throw 'error1'
)
.then(-> fail 'then() should not be invoked')
.catch (err) ->
equal err, 'error1'
deepEqual call_order, [1]
it 'is aliased to forEachSeries', ->
equal async.forEachSeries, async.eachSeries
it 'accepts promises', ->
args = []
async.eachSeries(Q([1, 3, 2]), Q(eachIterator.bind(this, args))).then ->
deepEqual args, [1, 3, 2]
describe 'eachLimit()', ->
it 'accepts an empty array', ->
async.eachLimit([], 2, -> fail 'iterator should not be called')
it 'can handle limit < input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, 2, (x) -> Q.delay(x*5).then -> args.push x).then ->
deepEqual args, arr
it 'can handle limit = input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, arr.length, eachIterator.bind(this, args)).then ->
deepEqual args, arr
it 'can handle limit > input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, 20, eachIterator.bind(this, args)).then ->
deepEqual args, arr
it 'can handle limit = 0', ->
async.eachLimit([0..5], 0, -> fail 'iterator should not be called')
it 'can handle errors', ->
isRejected(
async.eachLimit [0,1,2], 3, (x) -> throw 'error1' if x is 2
/^error1$/
)
it 'is aliased to forEachLimit', -> equal async.forEachLimit, async.eachLimit
it 'accepts promises', ->
args = []
arr = [0..9]
async.eachLimit(Q(arr), Q(2), Q((x) -> Q.delay(x*5).then -> args.push x))
.then ->
deepEqual args, arr
describe 'map()', ->
it 'returns proper results', ->
call_order = []
async.map([1, 3, 2], mapIterator.bind(this, call_order)).then (results) ->
deepEqual call_order, [1, 2, 3]
deepEqual results, [2, 6, 4]
it 'does not modify original array', ->
a = [1, 2, 3]
async.map(a, (x) -> x*2).then (results) ->
deepEqual results, [2, 4, 6]
deepEqual a, [1, 2, 3]
it 'handles errors', ->
isRejected async.map([1, 2, 3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.map(Q([1, 3, 2]), Q(mapIterator.bind(this, []))), [2, 6, 4]
describe 'mapSeries()', ->
it 'returns proper results', ->
call_order = []
async.mapSeries([1, 3, 2], mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, [1, 3, 2]
deepEqual res, [2, 6, 4]
it 'handles errors', ->
isRejected async.mapSeries([1, 2, 3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.mapSeries(Q([1, 3, 2]), Q(mapIterator.bind(this, []))),
[2, 6, 4]
describe 'mapLimit()', ->
it 'accepts an empty array', ->
async.mapLimit [], 2, -> fail 'iterator should not be called'
it 'can handle limit < input.length', ->
call_order = []
async.mapLimit([2,4,3], 2, mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, [2, 4, 3], 'proper order'
deepEqual res, [4, 8, 6], 'right results'
it 'can handle limit = input.length', ->
args = []
arr = [0..9]
async.mapLimit(arr, arr.length, mapIterator.bind(this, args)).then (res) ->
deepEqual args, arr
deepEqual res, arr.map (n) -> n*2
it 'can handle limit > input.length', ->
call_order = []
arr = [0..9]
async.mapLimit(arr, 20, mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, arr
deepEqual res, arr.map (n) -> n*2
it 'can handle limit = 0', ->
async.mapLimit([0..5], 0, -> fail 'iterator should not be called')
it 'can handle errors', ->
isRejected(
async.mapLimit [0,1,2], 3, (x) -> throw 'error1' if x is 2
/^error1$/
)
it 'accepts promises', ->
becomes async.mapLimit(Q([2,4,3]), Q(2), Q(mapIterator.bind(this, []))),
[4, 8, 6]
describe 'reduce()', ->
it 'returns proper result', ->
call_order = []
async.reduce([1, 2, 3], 0, (a, x) ->
call_order.push x
a + x
).then (res) ->
equal res, 6
deepEqual call_order, [1, 2, 3]
it 'works async', ->
becomes async.reduce([1, 3, 2], 0, (a, x) ->
Q.delay(Math.random()*100).thenResolve a+x
), 6
it 'handles errors', ->
isRejected async.reduce([1, 2, 3], 0, -> throw 'error1'), /^error1$/
it 'is aliased to inject', -> equal async.inject, async.reduce
it 'is aliased to foldl', -> equal async.foldl, async.reduce
it 'accepts promises', ->
becomes async.reduce(Q([1, 3, 2]), Q(0), Q((a, x) -> a+x)), 6
describe 'reduceRight()', ->
it 'returns proper result', ->
call_order = []
a = [1, 2, 3]
async.reduceRight(a, 0, (a, x) ->
call_order.push x
a + x
).then (res) ->
equal res, 6
deepEqual call_order, [3, 2, 1]
deepEqual a, [1, 2, 3]
it 'is aliased to foldr', -> equal async.foldr, async.reduceRight
it 'accepts promises', ->
becomes async.reduceRight(Q([1, 2, 3]), Q(0), Q((a, x) -> a+x)), 6
describe 'filter()', ->
it 'returns proper results', ->
becomes async.filter([3, 1, 2], filterIterator), [3, 1]
it 'does not modify input', ->
a = [3, 1, 2]
async.filter(a, (x) -> Q x % 2).then (res) ->
deepEqual res, [3,1]
deepEqual a, [3, 1, 2]
it 'is aliased to select', -> equal async.select, async.filter
it 'accepts promises', ->
becomes async.filter(Q([3, 1, 2]), Q(filterIterator)), [3, 1]
describe 'filterSeries()', ->
it 'returns proper results', ->
becomes async.filterSeries([3, 1, 2], filterIterator), [3, 1]
it 'is aliased to selectSeries', ->
equal async.selectSeries, async.filterSeries
it 'accepts promises', ->
becomes async.filterSeries(Q([3, 1, 2]), Q(filterIterator)), [3, 1]
describe 'reject()', ->
it 'returns proper results', ->
becomes async.reject([3, 1, 2], filterIterator), [2]
it 'does not modify input', ->
a = [3, 1, 2]
async.reject(a, (x) -> Q x % 2).then (res) ->
deepEqual res, [2]
deepEqual a, [3, 1, 2]
it 'accepts promises', ->
becomes async.reject(Q([3, 1, 2]), Q(filterIterator)), [2]
describe 'rejectSeries()', ->
it 'returns proper results', ->
becomes async.rejectSeries([3, 1, 2], filterIterator), [2]
it 'accepts promises', ->
becomes async.rejectSeries(Q([3, 1, 2]), Q(filterIterator)), [2]
describe 'some()', ->
it 'finds something', ->
becomes async.some([3, 1, 2], (x) -> Q.delay(0).thenResolve x is 1), true
it 'finds nothing', ->
becomes async.some([3, 2, 1], (x) -> Q x is 10), false
it 'is aliased to any', -> equal async.any, async.some
it 'returns early on match', ->
call_order = []
async.some([1, 2, 3], (x) -> Q.delay(x*25).then ->
call_order.push x
x is 1
).then(-> call_order.push 'resolved')
.delay(100)
.then(-> deepEqual call_order, [1, 'resolved', 2, 3])
it 'accepts promises', ->
becomes async.some(Q([3, 1, 2]), Q((x) -> Q.delay(0).thenResolve x is 1)),
true
describe 'every()', ->
it 'matches everything', ->
becomes async.every([1, 2, 3], (x) -> Q.delay(0).thenResolve x < 4), true
it 'matches not everything', ->
becomes async.every([1, 2, 3], (x) -> Q.delay(0).thenResolve x % 2), false
it 'is aliased to all', -> equal async.all, async.every
it 'returns early on mis-match', ->
call_order = []
async.every([1, 2, 3], (x) -> Q.delay(x*25).then ->
call_order.push x
x is 1
).then(-> call_order.push 'resolved')
.delay(100)
.then(-> deepEqual call_order, [1, 2, 'resolved', 3])
it 'accepts promises', ->
becomes async.every(Q([1, 2, 3]), Q((x) -> Q.delay(0).thenResolve x < 4)),
true
describe 'detect()', ->
it 'returns proper results', ->
call_order = []
async.detect([3, 2, 1], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(100)
.then -> deepEqual call_order, [1, 2, 'resolved', 3]
it 'returns one of multiple matches', ->
call_order = []
async.detect([3,2,2,1,2], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(100)
.then ->
deepEqual call_order.filter((c) -> c isnt 'resolved'), [1, 2, 2, 2, 3]
i = call_order.indexOf 'resolved'
ok (i < 5), 'short circuited early'
it 'handles errors', ->
isRejected(
async.detect([1, 2, 3], (x) -> if x is 2 then throw 'error1' else false)
/^error1$/
)
it 'accepts promises', ->
becomes async.detect(Q([1, 2, 3]), Q(detectIterator.bind(this, []))), 2
describe 'detectSeries()', ->
it 'returns proper results', ->
call_order = []
async.detectSeries([3,2,1], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(200)
.then -> deepEqual call_order, [3, 2, 'resolved']
it 'returns one of multiple matches', ->
call_order = []
async.detectSeries([3,2,2,1,2], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(200)
.then -> deepEqual call_order, [3, 2, 'resolved']
it 'accepts promises', ->
becomes async.detectSeries(Q([3,2,1]), Q(detectIterator.bind(this, []))), 2
describe 'sortBy()', ->
it 'returns proper results', ->
becomes(
async.sortBy([{a:1},{a:15},{a:6}], (x) -> Q.delay(0).thenResolve x.a)
[{a:1},{a:6},{a:15}]
)
it 'accepts promises', ->
becomes async.sortBy(Q([{a:2},{a:1}]), Q((x) -> Q(x.a))), [{a:1},{a:2}]
describe 'concat()', ->
it 'returns just-in-time results', ->
call_order = []
iterator = (x) ->
Q.delay(x*25).then ->
call_order.push x
[x..1]
async.concat([1,3,2], iterator).then (res) ->
deepEqual res, [1, 2, 1, 3, 2, 1]
deepEqual call_order, [1, 2, 3]
it 'handles errors', ->
isRejected async.concat([1,2,3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
iterator = (x) -> Q.delay(x*25).then -> [x..1]
becomes async.concat(Q([1,3,2]), Q(iterator)), [1, 2, 1, 3, 2, 1]
describe 'concatSeries()', ->
it 'returns ordered results', ->
call_order = []
iterator = (x) ->
Q.delay(x*25).then ->
call_order.push x
[x..1]
async.concatSeries([1,3,2], iterator).then (res) ->
deepEqual res, [1,3,2,1,2,1]
deepEqual call_order, [1,3,2]
it 'handles errors', ->
isRejected async.concatSeries([1,2,3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
iterator = (x) -> Q.delay(x*25).then -> [x..1]
becomes async.concatSeries(Q([1,3,2]), Q(iterator)), [1,3,2,1,2,1]
describe 'until()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.until(
->
call_order.push ['test', count]
count is 5
->
call_order.push ['iterator', count]
count++
).then ->
deepEqual call_order, [
['test', 0]
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.until((-> throw 'error1'), ->), /^error1$/
it 'handles iterator errors', ->
isRejected async.until((-> false), -> throw 'error1'), /^error1$/
it 'accepts promises', ->
count = 0
async.until(Q(-> count is 5), Q(-> count++)).then -> equal count, 5
describe 'doUntil()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.doUntil(
->
call_order.push ['iterator', count]
count++
->
call_order.push ['test', count]
count is 5
).then ->
deepEqual call_order, [
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.doUntil((->), -> throw 'error1'), /^error1$/
it 'handles iterator errors', ->
isRejected async.doUntil((-> throw 'error1'), -> false), /^error1$/
it 'accepts promises', ->
count = 0
async.doUntil(Q(-> count++), Q(-> count is 5)).then -> equal count, 5
describe 'whilst()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.whilst(
->
call_order.push ['test', count]
count < 5
->
call_order.push ['iterator', count]
count++
).then ->
deepEqual call_order, [
['test', 0]
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.whilst((-> throw 'error1'), ->), /^error1$/
it 'handles iterator errors', ->
isRejected async.whilst((-> true), -> throw 'error1'), /^error1$/
it 'accepts promises', ->
count = 0
async.whilst(Q(-> count < 5), Q(-> count++)).then -> equal count, 5
describe 'doWhilst()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.doWhilst(
->
call_order.push ['iterator', count]
count++
->
call_order.push ['test', count]
count < 5
).then ->
deepEqual call_order, [
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.doWhilst((->), -> throw 'error1'), /^error1$/
it 'handles iterator errors', ->
isRejected async.doWhilst((-> throw 'error1'), -> true), /^error1$/
it 'accepts promises', ->
count = 0
async.doWhilst(Q(-> count++), Q(-> count < 5)).then -> equal count, 5
describe 'queue()', ->
testQueue = (concurrency, changeTo=null) ->
call_order = []
delays = [160, 80, 240, 80]
# worker1: --1-4
# worker2: -2---3
# order of completion: 2,1,4,3
q = async.queue(
(task) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{task}"
'arg'
concurrency
)
concurrency ?= 1
push1 = q.push(1).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 1'
push2 = q.push(2).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 2'
push3 = q.push(3).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 3'
push4 = q.push(4)
push4.start.then -> call_order.push 'started 4'
push4.then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 4'
equal q.length(), 4, 'queue should be length 4 after all pushes'
equal q.concurrency, concurrency,
"concurrency should be #{concurrency} after pushes"
if changeTo?
concurrency = q.concurrency = changeTo
drain = Q.promise (resolve, reject) ->
q.on 'drain', -> process.nextTick ->
try
co = if concurrency is 2
[ 'process 2', 'resolved 2'
'process 1', 'resolved 1', 'started 4',
'process 4', 'resolved 4'
'process 3', 'resolved 3' ]
else
[ 'process 1', 'resolved 1'
'process 2', 'resolved 2'
'process 3', 'resolved 3', 'started 4',
'process 4', 'resolved 4' ]
deepEqual call_order, co, 'call_order should be correct'
equal q.concurrency, concurrency,
"concurrency should be #{concurrency} in drain()"
equal q.length(), 0, 'queue should be length 0 in drain()'
resolve()
catch err
reject err
Q.all [push1, push2, push3, push4, drain]
it 'returns proper results', -> testQueue 2
it 'defaults to concurrency of 1', -> testQueue()
it 'handles errors', ->
results = []
q = async.queue (({name}) -> throw 'fooError' if name is 'foo'), 2
drain = Q.promise (resolve, reject) ->
q.on 'drain', -> process.nextTick ->
try
deepEqual results, ['bar', 'fooError']
resolve()
catch err
reject err
push1 = q.push(name: 'bar')
.then(-> results.push 'bar')
.catch(-> results.push 'barError')
push2 = q.push(name: 'foo')
.then(-> results.push 'foo')
.catch(-> results.push 'fooError')
Q.all [drain, push1, push2]
it 'allows concurrency change', -> testQueue(2, 1)
it 'supports unshift()', ->
queue_order = []
q = async.queue ((task) -> queue_order.push task), 1
Q.all([4..1].map(q.unshift.bind q)).then ->
deepEqual queue_order, [1, 2, 3, 4]
it 'allows pushing multiple tasks at once', ->
call_order = []
delays = [160,80,240,80]
q = async.queue(
(task) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{task}"
task
2
)
pushes = q.push([1, 2, 3, 4]).map (p) ->
p.then (arg) -> call_order.push "resolved #{arg}"
equal q.length(), 4, 'queue length is 4 after bulk push'
equal q.concurrency, 2, 'concurrency is 2 after bulk push'
Q.all(pushes).then ->
deepEqual call_order, [
'process 2', 'resolved 2'
'process 1', 'resolved 1'
'process 4', 'resolved 4'
'process 3', 'resolved 3'
]
equal q.concurrency, 2, 'concurrency is 2 after completion'
equal q.length(), 0, 'queue length is 0 after completion'
describe 'cargo()', ->
it 'returns proper results', ->
call_order = []
delays = [160, 160, 80]
# worker: --12--34--5-
# order of completion: 1,2,3,4,5
c = async.cargo(
(tasks) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{tasks}"
'arg'
2
)
push1 = c.push(1).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 1'
push2 = c.push(2).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 2'
equal c.length(), 2
# async pushes
push3 = Q.delay(60).then ->
c.push(3).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 3'
push45 = Q.delay(120).then ->
push4 = c.push(4).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 4'
equal c.length(), 2
push5 = c.push(5).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 5'
Q.all [push4, push5]
Q.all([push1, push2, push3, push45]).then ->
deepEqual call_order, [
'process 1,2', 'resolved 1', 'resolved 2'
'process 3,4', 'resolved 3', 'resolved 4'
'process 5', 'resolved 5'
]
equal c.length(), 0
it 'allows pushing multiple tasks at once', ->
call_order = []
delays = [120, 40]
# worker: -123-4-
# order of completion: 1,2,3,4
c = async.cargo(
(tasks) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{tasks}"
tasks.join()
3
)
pushes = c.push([1..4]).map (p) -> p.then (arg) ->
call_order.push "resolved #{arg}"
equal c.length(), 4
Q.all(pushes).then ->
deepEqual call_order, [
'process 1,2,3', 'resolved 1,2,3'
'resolved 1,2,3', 'resolved 1,2,3'
'process 4', 'resolved 4'
]
equal c.length(), 0
describe 'memoize()', ->
it 'memoizes a function', ->
call_order = []
fn = (arg1, arg2) ->
call_order.push ['fn', arg1, arg2]
Q arg1 + arg2
fn2 = async.memoize fn
Q.all([
becomes(fn2(1, 2), 3)
becomes(fn2(1, 2), 3)
becomes(fn2(2, 2), 4)
]).then -> deepEqual call_order, [['fn', 1, 2], ['fn', 2, 2]]
it 'handles errors', ->
fn = (arg1, arg2) -> throw 'error1'
isRejected async.memoize(fn)(1, 2), /^error1$/
it 'handles multiple async calls', ->
fn = (arg1, arg2) -> Q.delay(10).then -> [arg1, arg2]
fn2 = async.memoize fn
Q.all [
becomes fn2(1, 2), [1, 2]
becomes fn2(1, 2), [1, 2]
]
it 'accepts a custom hash function', ->
fn = (arg1, arg2) -> Q arg1 + arg2
fn2 = async.memoize fn, -> 'custom hash'
Q.all [
becomes fn2(1, 2), 3
becomes fn2(2, 2), 3
]
it 'lets you futz with the cache', ->
fn = async.memoize (arg) -> fail 'Function should never be called'
fn.memo.foo = 'bar'
becomes fn('foo'), 'bar'
describe 'unmemoize()', ->
it 'returns the original function', ->
call_order = []
fn = (arg1, arg2) ->
call_order.push ['fn', arg1, arg2]
Q arg1 + arg2
fn2 = async.memoize fn
fn3 = async.unmemoize fn2
Q.all([
becomes(fn3(1, 2), 3)
becomes(fn3(1, 2), 3)
becomes(fn3(2, 2), 4)
]).then -> deepEqual call_order, [['fn',1,2],['fn',1,2,],['fn',2,2]]
it 'works on not-memoized functions', ->
fn = (arg1, arg2) -> Q arg1 + arg2
fn2 = async.unmemoize fn
becomes fn2(1, 2), 3
describe 'times()', ->
it 'returns proper results', ->
becomes async.times(5, (n) -> Q n), [0..4]
it 'maintains order', ->
becomes async.times(3, (n) -> Q.delay((3-n)*25).thenResolve n), [0..2]
it 'accepts n=0', ->
async.times(0, -> fail 'iterator should not be called')
it 'handles errors', ->
isRejected async.times(3, -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.times(Q(5), Q((n) -> Q n)), [0..4]
describe 'timesSeries()', ->
it 'returns proper results', ->
call_order = []
async.timesSeries(
5
(n) ->
Q.delay(100-n*10).then ->
call_order.push n
n
).then (res) ->
deepEqual call_order, [0..4]
deepEqual res, [0..4]
it 'handles errors', ->
isRejected async.timesSeries(5, -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.timesSeries(Q(5), Q((n) -> Q n)), [0..4]
### FIXME spews output for some reason
['log', 'dir'].forEach (name) ->
describe "#{name}()", ->
it "calls console.#{name}() on results", ->
fn = (arg1) ->
equal arg1, 'one'
Q.delay(0).thenResolve 'test'
fn_err = (arg1) ->
equal arg1, 'one'
Q.delay(0).thenReject 'error'
_console_fn = console[name]
_error = console.error
console[name] = (val) ->
console[name] = _console_fn
equal val, 'test'
equal arguments.length, 1
async[name](fn, 'one').then ->
console.error = (val) ->
console.error = _error
equal val, 'error'
async[name] fn_err, 'one'
###
| 88088 | async = require './'
Q = require 'q'
require('mocha')
chai = require 'chai'
chai.use require 'chai-as-promised'
{ assert: { strictEqual: equal, deepEqual, isRejected, fail, becomes, ok }
} = chai
eachIterator = (args, x) -> Q.delay(x*25).then -> args.push x
mapIterator = (call_order, x) ->
Q.delay(x*25).then ->
call_order.push x
x*2
filterIterator = (x) -> Q.delay(x*25).thenResolve x % 2
detectIterator = (call_order, x) ->
Q.delay(x*25).then ->
call_order.push x
x is 2
eachNoCallbackIterator = (x) ->
equal x, 1
Q()
getFunctionsObject = (call_order) ->
one: -> Q.delay(125).then ->
call_order.push 1
1
two: -> Q.delay(200).then ->
call_order.push 2
2
three: -> Q.delay(50).then ->
call_order.push 3
[3, 3]
getFunctionsArray = (call_order) ->
[
-> Q.delay(50).then ->
call_order.push 1
1
-> Q.delay(100).then ->
call_order.push 2
2
-> Q.delay(25).then ->
call_order.push 3
[3, 3]
]
describe 'forever()', ->
it "runs until it doesn't", ->
counter = 0
isRejected async.forever(->
throw 'too big!' if ++counter is 50
Q(counter)
), /^too big!$/
it 'accepts a promise for a function', ->
counter = 0
isRejected async.forever(Q(->
throw 'too big!' if ++counter is 50
Q(counter)
)), /^too big!$/
applyOneTwoThree = (call_order) ->
[
(val) ->
equal val, 5
Q.delay(100).then ->
call_order.push 'one'
1
(val) ->
equal val, 5
Q.delay(50).then ->
call_order.push 'two'
2
(val) ->
equal val, 5
Q.delay(150).then ->
call_order.push 'three'
3
]
describe 'applyEach()', ->
it 'immediately applies', ->
async.applyEach(applyOneTwoThree(call_order = []), 5).then ->
deepEqual call_order, ['two', 'one', 'three']
it 'partially applies', ->
async.applyEach(applyOneTwoThree(call_order = []))(5).then ->
deepEqual call_order, ['two', 'one', 'three']
describe 'applyEachSeries()', ->
it 'runs serially', ->
async.applyEachSeries(applyOneTwoThree(call_order = []), 5).then ->
deepEqual call_order, ['one', 'two', 'three']
describe 'compose()', ->
it 'composes functions', ->
add2 = (n) ->
equal n, 3
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal n, 5
Q.delay(15).thenResolve n*3
add1 = (n) ->
equal n, 15
Q.delay(100).thenResolve n+1
add2mul3add1 = async.compose add1, mul3, add2
becomes add2mul3add1(3), 16
it 'handles errors', ->
testerr = new Error 'test'
add2 = (n) ->
equal n, 3
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal n, 5
Q.delay(15).thenReject testerr
add1 = (n) ->
fail 'add1 should not get called'
Q.delay(100).thenResolve n+1
add2mul3add1 = async.compose add1, mul3, add2
isRejected add2mul3add1(3), testerr
it 'binds properly', ->
testerr = new Error 'test'
testcontext = name: '<NAME>'
add2 = (n) ->
equal this, testcontext
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal this, testcontext
Q.delay(15).thenResolve n*3
add2mul3 = async.compose mul3, add2
becomes add2mul3.call(testcontext, 3), 15
describe 'auto()', ->
it 'runs', ->
callOrder = []
async.auto(
task1: ['task2', -> Q.delay(300).then -> callOrder.push 'task1']
task2: -> Q.delay(50).then -> callOrder.push 'task2'
task3: ['task2', -> callOrder.push 'task3']
task4: ['task1', 'task2', -> callOrder.push 'task4']
task5: ['task2', -> Q.delay(200).then -> callOrder.push 'task5']
task6: ['task2', -> Q.delay(100).then -> callOrder.push 'task6']
).then ->
deepEqual callOrder,
['task2', 'task3', 'task6', 'task5', 'task1', 'task4']
it 'petrifies', ->
callOrder = []
async.auto(
task1: ['task2', -> Q.delay(100).then -> callOrder.push 'task1']
task2: -> Q.delay(200).then -> callOrder.push 'task2'
task3: ['task2', -> callOrder.push 'task3']
task4: ['task1', 'task2', -> callOrder.push 'task4']
).then ->
deepEqual callOrder, ['task2', 'task3', 'task1', 'task4']
it 'has results', ->
callOrder = []
async.auto(
task1: [
'task2'
(results) ->
equal results.task2, 'task2'
Q.delay(25).then ->
callOrder.push 'task1'
['task1a', 'task1b']
]
task2: -> Q.delay(50).then ->
callOrder.push 'task2'
'task2'
task3: [
'task2'
(results) ->
equal results.task2, 'task2'
callOrder.push 'task3'
undefined
]
task4: [
'task1'
'task2'
(results) ->
deepEqual results.task1, ['task1a', 'task1b']
equal results.task2, 'task2'
callOrder.push 'task4'
'task4'
]
).then (results) ->
deepEqual callOrder, ['task2', 'task3', 'task1', 'task4']
deepEqual results,
task1: ['task1a', 'task1b']
task2: 'task2'
task3: undefined
task4: 'task4'
it 'runs with an empty object', -> async.auto {}
it 'errors out properly', ->
isRejected async.auto(
task1: -> throw 'testerror'
task2: ['task1', -> fail 'task2 should not be called']
task3: -> throw 'testerror2'
), /^testerror$/
describe 'waterfall()', ->
it 'runs in the proper order', ->
call_order = []
async.waterfall([
->
call_order.push 'fn1'
Q.delay(0).thenResolve ['one', 'two']
([arg1, arg2]) ->
call_order.push 'fn2'
equal arg1, 'one'
equal arg2, 'two'
Q.delay(25).thenResolve [arg1, arg2, 'three']
([arg1, arg2, arg3]) ->
call_order.push 'fn3'
deepEqual [arg1, arg2, arg3], ['one', 'two', 'three']
'four'
(arg4) ->
call_order.push 'fn4'
'test'
]).then (result) ->
equal result, 'test'
deepEqual call_order, ['fn1', 'fn2', 'fn3', 'fn4']
it 'handles an empty array', -> async.waterfall []
it 'handles errors', ->
isRejected async.waterfall([
-> throw 'errzor'
-> fail 'next function should not be called'
]), /^errzor$/
it 'accepts a promise for an array of tasks', ->
becomes async.waterfall(Q([
-> 10
(n) -> n + 30
(n) -> n + 2
])), 42
describe 'parallel()', ->
it 'returns proper results', ->
call_order = []
async.parallel(getFunctionsArray call_order).then (results) ->
deepEqual call_order, [3, 1, 2]
deepEqual results, [1, 2, [3, 3]]
it 'handles an empty array', ->
becomes async.parallel([]), []
it 'handles errors', ->
isRejected(
async.parallel([ (-> throw 'error1'), -> throw 'error2' ])
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.parallel(getFunctionsObject call_order).then (results) ->
deepEqual call_order, [3, 1, 2]
deepEqual results, one: 1, two: 2, three: [3, 3]
it 'accepts a promise', ->
becomes async.parallel(Q(getFunctionsArray [])), [1, 2, [3, 3]]
describe 'parallelLimit()', ->
it 'returns proper results', ->
call_order = []
async.parallelLimit(getFunctionsArray(call_order), 2).then (results) ->
deepEqual call_order, [1, 3, 2]
deepEqual results, [1, 2, [3, 3]]
it 'handles an empty array', ->
becomes async.parallelLimit([], 2), []
it 'handles errors', ->
isRejected(
async.parallelLimit([(-> throw 'error1'), -> throw 'error2'], 1)
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.parallelLimit(getFunctionsObject(call_order), 2).then (results) ->
deepEqual call_order, [1, 3, 2]
deepEqual results, one: 1, two: 2, three: [3, 3]
it 'accepts a promise', ->
becomes async.parallelLimit(getFunctionsArray([]), 2), [1, 2, [3, 3]]
describe 'series()', ->
it 'returns proper results', ->
call_order = []
async.series(getFunctionsArray call_order).then (results) ->
deepEqual results, [1, 2, [3, 3]]
deepEqual call_order, [1, 2, 3]
it 'handles an empty array', ->
becomes async.series([]), []
it 'handles errors', ->
isRejected(
async.series([
-> throw 'error1'
->
fail 'should not be called'
'error2'
])
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.series(getFunctionsObject call_order).then (results) ->
deepEqual results, one: 1, two: 2, three: [3,3]
deepEqual call_order, [1,2,3]
it 'accepts a promise', ->
becomes async.series(getFunctionsArray []), [1, 2, [3, 3]]
describe 'each()', ->
it 'runs in parallel', ->
args = []
async.each([1, 3, 2], eachIterator.bind(this, args)).then ->
deepEqual args, [1, 2, 3]
it 'accepts an empty array', ->
async.each([], -> fail 'iterator should not be called')
it 'handles errors', ->
isRejected async.each([1, 2, 3], -> throw 'error1'), /^error1$/
it 'is aliased to forEach', -> equal async.forEach, async.each
it 'accepts promises', ->
args = []
async.each(Q([1, 3, 2]), Q(eachIterator.bind(this, args))).then ->
deepEqual args, [1, 2, 3]
describe 'eachSeries()', ->
it 'returns proper results', ->
args = []
async.eachSeries([1, 3, 2], eachIterator.bind(this, args)).then ->
deepEqual args, [1, 3, 2]
it 'accepts an empty array', ->
async.eachSeries([], -> fail 'iterator should not be called')
it 'handles errors', ->
call_order = []
async.eachSeries([1, 2, 3], (x) ->
call_order.push x
throw 'error1'
)
.then(-> fail 'then() should not be invoked')
.catch (err) ->
equal err, 'error1'
deepEqual call_order, [1]
it 'is aliased to forEachSeries', ->
equal async.forEachSeries, async.eachSeries
it 'accepts promises', ->
args = []
async.eachSeries(Q([1, 3, 2]), Q(eachIterator.bind(this, args))).then ->
deepEqual args, [1, 3, 2]
describe 'eachLimit()', ->
it 'accepts an empty array', ->
async.eachLimit([], 2, -> fail 'iterator should not be called')
it 'can handle limit < input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, 2, (x) -> Q.delay(x*5).then -> args.push x).then ->
deepEqual args, arr
it 'can handle limit = input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, arr.length, eachIterator.bind(this, args)).then ->
deepEqual args, arr
it 'can handle limit > input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, 20, eachIterator.bind(this, args)).then ->
deepEqual args, arr
it 'can handle limit = 0', ->
async.eachLimit([0..5], 0, -> fail 'iterator should not be called')
it 'can handle errors', ->
isRejected(
async.eachLimit [0,1,2], 3, (x) -> throw 'error1' if x is 2
/^error1$/
)
it 'is aliased to forEachLimit', -> equal async.forEachLimit, async.eachLimit
it 'accepts promises', ->
args = []
arr = [0..9]
async.eachLimit(Q(arr), Q(2), Q((x) -> Q.delay(x*5).then -> args.push x))
.then ->
deepEqual args, arr
describe 'map()', ->
it 'returns proper results', ->
call_order = []
async.map([1, 3, 2], mapIterator.bind(this, call_order)).then (results) ->
deepEqual call_order, [1, 2, 3]
deepEqual results, [2, 6, 4]
it 'does not modify original array', ->
a = [1, 2, 3]
async.map(a, (x) -> x*2).then (results) ->
deepEqual results, [2, 4, 6]
deepEqual a, [1, 2, 3]
it 'handles errors', ->
isRejected async.map([1, 2, 3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.map(Q([1, 3, 2]), Q(mapIterator.bind(this, []))), [2, 6, 4]
describe 'mapSeries()', ->
it 'returns proper results', ->
call_order = []
async.mapSeries([1, 3, 2], mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, [1, 3, 2]
deepEqual res, [2, 6, 4]
it 'handles errors', ->
isRejected async.mapSeries([1, 2, 3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.mapSeries(Q([1, 3, 2]), Q(mapIterator.bind(this, []))),
[2, 6, 4]
describe 'mapLimit()', ->
it 'accepts an empty array', ->
async.mapLimit [], 2, -> fail 'iterator should not be called'
it 'can handle limit < input.length', ->
call_order = []
async.mapLimit([2,4,3], 2, mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, [2, 4, 3], 'proper order'
deepEqual res, [4, 8, 6], 'right results'
it 'can handle limit = input.length', ->
args = []
arr = [0..9]
async.mapLimit(arr, arr.length, mapIterator.bind(this, args)).then (res) ->
deepEqual args, arr
deepEqual res, arr.map (n) -> n*2
it 'can handle limit > input.length', ->
call_order = []
arr = [0..9]
async.mapLimit(arr, 20, mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, arr
deepEqual res, arr.map (n) -> n*2
it 'can handle limit = 0', ->
async.mapLimit([0..5], 0, -> fail 'iterator should not be called')
it 'can handle errors', ->
isRejected(
async.mapLimit [0,1,2], 3, (x) -> throw 'error1' if x is 2
/^error1$/
)
it 'accepts promises', ->
becomes async.mapLimit(Q([2,4,3]), Q(2), Q(mapIterator.bind(this, []))),
[4, 8, 6]
describe 'reduce()', ->
it 'returns proper result', ->
call_order = []
async.reduce([1, 2, 3], 0, (a, x) ->
call_order.push x
a + x
).then (res) ->
equal res, 6
deepEqual call_order, [1, 2, 3]
it 'works async', ->
becomes async.reduce([1, 3, 2], 0, (a, x) ->
Q.delay(Math.random()*100).thenResolve a+x
), 6
it 'handles errors', ->
isRejected async.reduce([1, 2, 3], 0, -> throw 'error1'), /^error1$/
it 'is aliased to inject', -> equal async.inject, async.reduce
it 'is aliased to foldl', -> equal async.foldl, async.reduce
it 'accepts promises', ->
becomes async.reduce(Q([1, 3, 2]), Q(0), Q((a, x) -> a+x)), 6
describe 'reduceRight()', ->
it 'returns proper result', ->
call_order = []
a = [1, 2, 3]
async.reduceRight(a, 0, (a, x) ->
call_order.push x
a + x
).then (res) ->
equal res, 6
deepEqual call_order, [3, 2, 1]
deepEqual a, [1, 2, 3]
it 'is aliased to foldr', -> equal async.foldr, async.reduceRight
it 'accepts promises', ->
becomes async.reduceRight(Q([1, 2, 3]), Q(0), Q((a, x) -> a+x)), 6
describe 'filter()', ->
it 'returns proper results', ->
becomes async.filter([3, 1, 2], filterIterator), [3, 1]
it 'does not modify input', ->
a = [3, 1, 2]
async.filter(a, (x) -> Q x % 2).then (res) ->
deepEqual res, [3,1]
deepEqual a, [3, 1, 2]
it 'is aliased to select', -> equal async.select, async.filter
it 'accepts promises', ->
becomes async.filter(Q([3, 1, 2]), Q(filterIterator)), [3, 1]
describe 'filterSeries()', ->
it 'returns proper results', ->
becomes async.filterSeries([3, 1, 2], filterIterator), [3, 1]
it 'is aliased to selectSeries', ->
equal async.selectSeries, async.filterSeries
it 'accepts promises', ->
becomes async.filterSeries(Q([3, 1, 2]), Q(filterIterator)), [3, 1]
describe 'reject()', ->
it 'returns proper results', ->
becomes async.reject([3, 1, 2], filterIterator), [2]
it 'does not modify input', ->
a = [3, 1, 2]
async.reject(a, (x) -> Q x % 2).then (res) ->
deepEqual res, [2]
deepEqual a, [3, 1, 2]
it 'accepts promises', ->
becomes async.reject(Q([3, 1, 2]), Q(filterIterator)), [2]
describe 'rejectSeries()', ->
it 'returns proper results', ->
becomes async.rejectSeries([3, 1, 2], filterIterator), [2]
it 'accepts promises', ->
becomes async.rejectSeries(Q([3, 1, 2]), Q(filterIterator)), [2]
describe 'some()', ->
it 'finds something', ->
becomes async.some([3, 1, 2], (x) -> Q.delay(0).thenResolve x is 1), true
it 'finds nothing', ->
becomes async.some([3, 2, 1], (x) -> Q x is 10), false
it 'is aliased to any', -> equal async.any, async.some
it 'returns early on match', ->
call_order = []
async.some([1, 2, 3], (x) -> Q.delay(x*25).then ->
call_order.push x
x is 1
).then(-> call_order.push 'resolved')
.delay(100)
.then(-> deepEqual call_order, [1, 'resolved', 2, 3])
it 'accepts promises', ->
becomes async.some(Q([3, 1, 2]), Q((x) -> Q.delay(0).thenResolve x is 1)),
true
describe 'every()', ->
it 'matches everything', ->
becomes async.every([1, 2, 3], (x) -> Q.delay(0).thenResolve x < 4), true
it 'matches not everything', ->
becomes async.every([1, 2, 3], (x) -> Q.delay(0).thenResolve x % 2), false
it 'is aliased to all', -> equal async.all, async.every
it 'returns early on mis-match', ->
call_order = []
async.every([1, 2, 3], (x) -> Q.delay(x*25).then ->
call_order.push x
x is 1
).then(-> call_order.push 'resolved')
.delay(100)
.then(-> deepEqual call_order, [1, 2, 'resolved', 3])
it 'accepts promises', ->
becomes async.every(Q([1, 2, 3]), Q((x) -> Q.delay(0).thenResolve x < 4)),
true
describe 'detect()', ->
it 'returns proper results', ->
call_order = []
async.detect([3, 2, 1], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(100)
.then -> deepEqual call_order, [1, 2, 'resolved', 3]
it 'returns one of multiple matches', ->
call_order = []
async.detect([3,2,2,1,2], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(100)
.then ->
deepEqual call_order.filter((c) -> c isnt 'resolved'), [1, 2, 2, 2, 3]
i = call_order.indexOf 'resolved'
ok (i < 5), 'short circuited early'
it 'handles errors', ->
isRejected(
async.detect([1, 2, 3], (x) -> if x is 2 then throw 'error1' else false)
/^error1$/
)
it 'accepts promises', ->
becomes async.detect(Q([1, 2, 3]), Q(detectIterator.bind(this, []))), 2
describe 'detectSeries()', ->
it 'returns proper results', ->
call_order = []
async.detectSeries([3,2,1], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(200)
.then -> deepEqual call_order, [3, 2, 'resolved']
it 'returns one of multiple matches', ->
call_order = []
async.detectSeries([3,2,2,1,2], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(200)
.then -> deepEqual call_order, [3, 2, 'resolved']
it 'accepts promises', ->
becomes async.detectSeries(Q([3,2,1]), Q(detectIterator.bind(this, []))), 2
describe 'sortBy()', ->
it 'returns proper results', ->
becomes(
async.sortBy([{a:1},{a:15},{a:6}], (x) -> Q.delay(0).thenResolve x.a)
[{a:1},{a:6},{a:15}]
)
it 'accepts promises', ->
becomes async.sortBy(Q([{a:2},{a:1}]), Q((x) -> Q(x.a))), [{a:1},{a:2}]
describe 'concat()', ->
it 'returns just-in-time results', ->
call_order = []
iterator = (x) ->
Q.delay(x*25).then ->
call_order.push x
[x..1]
async.concat([1,3,2], iterator).then (res) ->
deepEqual res, [1, 2, 1, 3, 2, 1]
deepEqual call_order, [1, 2, 3]
it 'handles errors', ->
isRejected async.concat([1,2,3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
iterator = (x) -> Q.delay(x*25).then -> [x..1]
becomes async.concat(Q([1,3,2]), Q(iterator)), [1, 2, 1, 3, 2, 1]
describe 'concatSeries()', ->
it 'returns ordered results', ->
call_order = []
iterator = (x) ->
Q.delay(x*25).then ->
call_order.push x
[x..1]
async.concatSeries([1,3,2], iterator).then (res) ->
deepEqual res, [1,3,2,1,2,1]
deepEqual call_order, [1,3,2]
it 'handles errors', ->
isRejected async.concatSeries([1,2,3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
iterator = (x) -> Q.delay(x*25).then -> [x..1]
becomes async.concatSeries(Q([1,3,2]), Q(iterator)), [1,3,2,1,2,1]
describe 'until()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.until(
->
call_order.push ['test', count]
count is 5
->
call_order.push ['iterator', count]
count++
).then ->
deepEqual call_order, [
['test', 0]
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.until((-> throw 'error1'), ->), /^error1$/
it 'handles iterator errors', ->
isRejected async.until((-> false), -> throw 'error1'), /^error1$/
it 'accepts promises', ->
count = 0
async.until(Q(-> count is 5), Q(-> count++)).then -> equal count, 5
describe 'doUntil()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.doUntil(
->
call_order.push ['iterator', count]
count++
->
call_order.push ['test', count]
count is 5
).then ->
deepEqual call_order, [
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.doUntil((->), -> throw 'error1'), /^error1$/
it 'handles iterator errors', ->
isRejected async.doUntil((-> throw 'error1'), -> false), /^error1$/
it 'accepts promises', ->
count = 0
async.doUntil(Q(-> count++), Q(-> count is 5)).then -> equal count, 5
describe 'whilst()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.whilst(
->
call_order.push ['test', count]
count < 5
->
call_order.push ['iterator', count]
count++
).then ->
deepEqual call_order, [
['test', 0]
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.whilst((-> throw 'error1'), ->), /^error1$/
it 'handles iterator errors', ->
isRejected async.whilst((-> true), -> throw 'error1'), /^error1$/
it 'accepts promises', ->
count = 0
async.whilst(Q(-> count < 5), Q(-> count++)).then -> equal count, 5
describe 'doWhilst()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.doWhilst(
->
call_order.push ['iterator', count]
count++
->
call_order.push ['test', count]
count < 5
).then ->
deepEqual call_order, [
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.doWhilst((->), -> throw 'error1'), /^error1$/
it 'handles iterator errors', ->
isRejected async.doWhilst((-> throw 'error1'), -> true), /^error1$/
it 'accepts promises', ->
count = 0
async.doWhilst(Q(-> count++), Q(-> count < 5)).then -> equal count, 5
describe 'queue()', ->
testQueue = (concurrency, changeTo=null) ->
call_order = []
delays = [160, 80, 240, 80]
# worker1: --1-4
# worker2: -2---3
# order of completion: 2,1,4,3
q = async.queue(
(task) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{task}"
'arg'
concurrency
)
concurrency ?= 1
push1 = q.push(1).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 1'
push2 = q.push(2).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 2'
push3 = q.push(3).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 3'
push4 = q.push(4)
push4.start.then -> call_order.push 'started 4'
push4.then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 4'
equal q.length(), 4, 'queue should be length 4 after all pushes'
equal q.concurrency, concurrency,
"concurrency should be #{concurrency} after pushes"
if changeTo?
concurrency = q.concurrency = changeTo
drain = Q.promise (resolve, reject) ->
q.on 'drain', -> process.nextTick ->
try
co = if concurrency is 2
[ 'process 2', 'resolved 2'
'process 1', 'resolved 1', 'started 4',
'process 4', 'resolved 4'
'process 3', 'resolved 3' ]
else
[ 'process 1', 'resolved 1'
'process 2', 'resolved 2'
'process 3', 'resolved 3', 'started 4',
'process 4', 'resolved 4' ]
deepEqual call_order, co, 'call_order should be correct'
equal q.concurrency, concurrency,
"concurrency should be #{concurrency} in drain()"
equal q.length(), 0, 'queue should be length 0 in drain()'
resolve()
catch err
reject err
Q.all [push1, push2, push3, push4, drain]
it 'returns proper results', -> testQueue 2
it 'defaults to concurrency of 1', -> testQueue()
it 'handles errors', ->
results = []
q = async.queue (({name}) -> throw 'fooError' if name is 'foo'), 2
drain = Q.promise (resolve, reject) ->
q.on 'drain', -> process.nextTick ->
try
deepEqual results, ['bar', 'fooError']
resolve()
catch err
reject err
push1 = q.push(name: 'bar')
.then(-> results.push 'bar')
.catch(-> results.push 'barError')
push2 = q.push(name: 'foo')
.then(-> results.push 'foo')
.catch(-> results.push 'fooError')
Q.all [drain, push1, push2]
it 'allows concurrency change', -> testQueue(2, 1)
it 'supports unshift()', ->
queue_order = []
q = async.queue ((task) -> queue_order.push task), 1
Q.all([4..1].map(q.unshift.bind q)).then ->
deepEqual queue_order, [1, 2, 3, 4]
it 'allows pushing multiple tasks at once', ->
call_order = []
delays = [160,80,240,80]
q = async.queue(
(task) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{task}"
task
2
)
pushes = q.push([1, 2, 3, 4]).map (p) ->
p.then (arg) -> call_order.push "resolved #{arg}"
equal q.length(), 4, 'queue length is 4 after bulk push'
equal q.concurrency, 2, 'concurrency is 2 after bulk push'
Q.all(pushes).then ->
deepEqual call_order, [
'process 2', 'resolved 2'
'process 1', 'resolved 1'
'process 4', 'resolved 4'
'process 3', 'resolved 3'
]
equal q.concurrency, 2, 'concurrency is 2 after completion'
equal q.length(), 0, 'queue length is 0 after completion'
describe 'cargo()', ->
it 'returns proper results', ->
call_order = []
delays = [160, 160, 80]
# worker: --12--34--5-
# order of completion: 1,2,3,4,5
c = async.cargo(
(tasks) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{tasks}"
'arg'
2
)
push1 = c.push(1).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 1'
push2 = c.push(2).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 2'
equal c.length(), 2
# async pushes
push3 = Q.delay(60).then ->
c.push(3).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 3'
push45 = Q.delay(120).then ->
push4 = c.push(4).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 4'
equal c.length(), 2
push5 = c.push(5).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 5'
Q.all [push4, push5]
Q.all([push1, push2, push3, push45]).then ->
deepEqual call_order, [
'process 1,2', 'resolved 1', 'resolved 2'
'process 3,4', 'resolved 3', 'resolved 4'
'process 5', 'resolved 5'
]
equal c.length(), 0
it 'allows pushing multiple tasks at once', ->
call_order = []
delays = [120, 40]
# worker: -123-4-
# order of completion: 1,2,3,4
c = async.cargo(
(tasks) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{tasks}"
tasks.join()
3
)
pushes = c.push([1..4]).map (p) -> p.then (arg) ->
call_order.push "resolved #{arg}"
equal c.length(), 4
Q.all(pushes).then ->
deepEqual call_order, [
'process 1,2,3', 'resolved 1,2,3'
'resolved 1,2,3', 'resolved 1,2,3'
'process 4', 'resolved 4'
]
equal c.length(), 0
describe 'memoize()', ->
it 'memoizes a function', ->
call_order = []
fn = (arg1, arg2) ->
call_order.push ['fn', arg1, arg2]
Q arg1 + arg2
fn2 = async.memoize fn
Q.all([
becomes(fn2(1, 2), 3)
becomes(fn2(1, 2), 3)
becomes(fn2(2, 2), 4)
]).then -> deepEqual call_order, [['fn', 1, 2], ['fn', 2, 2]]
it 'handles errors', ->
fn = (arg1, arg2) -> throw 'error1'
isRejected async.memoize(fn)(1, 2), /^error1$/
it 'handles multiple async calls', ->
fn = (arg1, arg2) -> Q.delay(10).then -> [arg1, arg2]
fn2 = async.memoize fn
Q.all [
becomes fn2(1, 2), [1, 2]
becomes fn2(1, 2), [1, 2]
]
it 'accepts a custom hash function', ->
fn = (arg1, arg2) -> Q arg1 + arg2
fn2 = async.memoize fn, -> 'custom hash'
Q.all [
becomes fn2(1, 2), 3
becomes fn2(2, 2), 3
]
it 'lets you futz with the cache', ->
fn = async.memoize (arg) -> fail 'Function should never be called'
fn.memo.foo = 'bar'
becomes fn('foo'), 'bar'
describe 'unmemoize()', ->
it 'returns the original function', ->
call_order = []
fn = (arg1, arg2) ->
call_order.push ['fn', arg1, arg2]
Q arg1 + arg2
fn2 = async.memoize fn
fn3 = async.unmemoize fn2
Q.all([
becomes(fn3(1, 2), 3)
becomes(fn3(1, 2), 3)
becomes(fn3(2, 2), 4)
]).then -> deepEqual call_order, [['fn',1,2],['fn',1,2,],['fn',2,2]]
it 'works on not-memoized functions', ->
fn = (arg1, arg2) -> Q arg1 + arg2
fn2 = async.unmemoize fn
becomes fn2(1, 2), 3
describe 'times()', ->
it 'returns proper results', ->
becomes async.times(5, (n) -> Q n), [0..4]
it 'maintains order', ->
becomes async.times(3, (n) -> Q.delay((3-n)*25).thenResolve n), [0..2]
it 'accepts n=0', ->
async.times(0, -> fail 'iterator should not be called')
it 'handles errors', ->
isRejected async.times(3, -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.times(Q(5), Q((n) -> Q n)), [0..4]
describe 'timesSeries()', ->
it 'returns proper results', ->
call_order = []
async.timesSeries(
5
(n) ->
Q.delay(100-n*10).then ->
call_order.push n
n
).then (res) ->
deepEqual call_order, [0..4]
deepEqual res, [0..4]
it 'handles errors', ->
isRejected async.timesSeries(5, -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.timesSeries(Q(5), Q((n) -> Q n)), [0..4]
### FIXME spews output for some reason
['log', 'dir'].forEach (name) ->
describe "#{name}()", ->
it "calls console.#{name}() on results", ->
fn = (arg1) ->
equal arg1, 'one'
Q.delay(0).thenResolve 'test'
fn_err = (arg1) ->
equal arg1, 'one'
Q.delay(0).thenReject 'error'
_console_fn = console[name]
_error = console.error
console[name] = (val) ->
console[name] = _console_fn
equal val, 'test'
equal arguments.length, 1
async[name](fn, 'one').then ->
console.error = (val) ->
console.error = _error
equal val, 'error'
async[name] fn_err, 'one'
###
| true | async = require './'
Q = require 'q'
require('mocha')
chai = require 'chai'
chai.use require 'chai-as-promised'
{ assert: { strictEqual: equal, deepEqual, isRejected, fail, becomes, ok }
} = chai
eachIterator = (args, x) -> Q.delay(x*25).then -> args.push x
mapIterator = (call_order, x) ->
Q.delay(x*25).then ->
call_order.push x
x*2
filterIterator = (x) -> Q.delay(x*25).thenResolve x % 2
detectIterator = (call_order, x) ->
Q.delay(x*25).then ->
call_order.push x
x is 2
eachNoCallbackIterator = (x) ->
equal x, 1
Q()
getFunctionsObject = (call_order) ->
one: -> Q.delay(125).then ->
call_order.push 1
1
two: -> Q.delay(200).then ->
call_order.push 2
2
three: -> Q.delay(50).then ->
call_order.push 3
[3, 3]
getFunctionsArray = (call_order) ->
[
-> Q.delay(50).then ->
call_order.push 1
1
-> Q.delay(100).then ->
call_order.push 2
2
-> Q.delay(25).then ->
call_order.push 3
[3, 3]
]
describe 'forever()', ->
it "runs until it doesn't", ->
counter = 0
isRejected async.forever(->
throw 'too big!' if ++counter is 50
Q(counter)
), /^too big!$/
it 'accepts a promise for a function', ->
counter = 0
isRejected async.forever(Q(->
throw 'too big!' if ++counter is 50
Q(counter)
)), /^too big!$/
applyOneTwoThree = (call_order) ->
[
(val) ->
equal val, 5
Q.delay(100).then ->
call_order.push 'one'
1
(val) ->
equal val, 5
Q.delay(50).then ->
call_order.push 'two'
2
(val) ->
equal val, 5
Q.delay(150).then ->
call_order.push 'three'
3
]
describe 'applyEach()', ->
it 'immediately applies', ->
async.applyEach(applyOneTwoThree(call_order = []), 5).then ->
deepEqual call_order, ['two', 'one', 'three']
it 'partially applies', ->
async.applyEach(applyOneTwoThree(call_order = []))(5).then ->
deepEqual call_order, ['two', 'one', 'three']
describe 'applyEachSeries()', ->
it 'runs serially', ->
async.applyEachSeries(applyOneTwoThree(call_order = []), 5).then ->
deepEqual call_order, ['one', 'two', 'three']
describe 'compose()', ->
it 'composes functions', ->
add2 = (n) ->
equal n, 3
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal n, 5
Q.delay(15).thenResolve n*3
add1 = (n) ->
equal n, 15
Q.delay(100).thenResolve n+1
add2mul3add1 = async.compose add1, mul3, add2
becomes add2mul3add1(3), 16
it 'handles errors', ->
testerr = new Error 'test'
add2 = (n) ->
equal n, 3
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal n, 5
Q.delay(15).thenReject testerr
add1 = (n) ->
fail 'add1 should not get called'
Q.delay(100).thenResolve n+1
add2mul3add1 = async.compose add1, mul3, add2
isRejected add2mul3add1(3), testerr
it 'binds properly', ->
testerr = new Error 'test'
testcontext = name: 'PI:NAME:<NAME>END_PI'
add2 = (n) ->
equal this, testcontext
Q.delay(50).thenResolve n+2
mul3 = (n) ->
equal this, testcontext
Q.delay(15).thenResolve n*3
add2mul3 = async.compose mul3, add2
becomes add2mul3.call(testcontext, 3), 15
describe 'auto()', ->
it 'runs', ->
callOrder = []
async.auto(
task1: ['task2', -> Q.delay(300).then -> callOrder.push 'task1']
task2: -> Q.delay(50).then -> callOrder.push 'task2'
task3: ['task2', -> callOrder.push 'task3']
task4: ['task1', 'task2', -> callOrder.push 'task4']
task5: ['task2', -> Q.delay(200).then -> callOrder.push 'task5']
task6: ['task2', -> Q.delay(100).then -> callOrder.push 'task6']
).then ->
deepEqual callOrder,
['task2', 'task3', 'task6', 'task5', 'task1', 'task4']
it 'petrifies', ->
callOrder = []
async.auto(
task1: ['task2', -> Q.delay(100).then -> callOrder.push 'task1']
task2: -> Q.delay(200).then -> callOrder.push 'task2'
task3: ['task2', -> callOrder.push 'task3']
task4: ['task1', 'task2', -> callOrder.push 'task4']
).then ->
deepEqual callOrder, ['task2', 'task3', 'task1', 'task4']
it 'has results', ->
callOrder = []
async.auto(
task1: [
'task2'
(results) ->
equal results.task2, 'task2'
Q.delay(25).then ->
callOrder.push 'task1'
['task1a', 'task1b']
]
task2: -> Q.delay(50).then ->
callOrder.push 'task2'
'task2'
task3: [
'task2'
(results) ->
equal results.task2, 'task2'
callOrder.push 'task3'
undefined
]
task4: [
'task1'
'task2'
(results) ->
deepEqual results.task1, ['task1a', 'task1b']
equal results.task2, 'task2'
callOrder.push 'task4'
'task4'
]
).then (results) ->
deepEqual callOrder, ['task2', 'task3', 'task1', 'task4']
deepEqual results,
task1: ['task1a', 'task1b']
task2: 'task2'
task3: undefined
task4: 'task4'
it 'runs with an empty object', -> async.auto {}
it 'errors out properly', ->
isRejected async.auto(
task1: -> throw 'testerror'
task2: ['task1', -> fail 'task2 should not be called']
task3: -> throw 'testerror2'
), /^testerror$/
describe 'waterfall()', ->
it 'runs in the proper order', ->
call_order = []
async.waterfall([
->
call_order.push 'fn1'
Q.delay(0).thenResolve ['one', 'two']
([arg1, arg2]) ->
call_order.push 'fn2'
equal arg1, 'one'
equal arg2, 'two'
Q.delay(25).thenResolve [arg1, arg2, 'three']
([arg1, arg2, arg3]) ->
call_order.push 'fn3'
deepEqual [arg1, arg2, arg3], ['one', 'two', 'three']
'four'
(arg4) ->
call_order.push 'fn4'
'test'
]).then (result) ->
equal result, 'test'
deepEqual call_order, ['fn1', 'fn2', 'fn3', 'fn4']
it 'handles an empty array', -> async.waterfall []
it 'handles errors', ->
isRejected async.waterfall([
-> throw 'errzor'
-> fail 'next function should not be called'
]), /^errzor$/
it 'accepts a promise for an array of tasks', ->
becomes async.waterfall(Q([
-> 10
(n) -> n + 30
(n) -> n + 2
])), 42
describe 'parallel()', ->
it 'returns proper results', ->
call_order = []
async.parallel(getFunctionsArray call_order).then (results) ->
deepEqual call_order, [3, 1, 2]
deepEqual results, [1, 2, [3, 3]]
it 'handles an empty array', ->
becomes async.parallel([]), []
it 'handles errors', ->
isRejected(
async.parallel([ (-> throw 'error1'), -> throw 'error2' ])
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.parallel(getFunctionsObject call_order).then (results) ->
deepEqual call_order, [3, 1, 2]
deepEqual results, one: 1, two: 2, three: [3, 3]
it 'accepts a promise', ->
becomes async.parallel(Q(getFunctionsArray [])), [1, 2, [3, 3]]
describe 'parallelLimit()', ->
it 'returns proper results', ->
call_order = []
async.parallelLimit(getFunctionsArray(call_order), 2).then (results) ->
deepEqual call_order, [1, 3, 2]
deepEqual results, [1, 2, [3, 3]]
it 'handles an empty array', ->
becomes async.parallelLimit([], 2), []
it 'handles errors', ->
isRejected(
async.parallelLimit([(-> throw 'error1'), -> throw 'error2'], 1)
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.parallelLimit(getFunctionsObject(call_order), 2).then (results) ->
deepEqual call_order, [1, 3, 2]
deepEqual results, one: 1, two: 2, three: [3, 3]
it 'accepts a promise', ->
becomes async.parallelLimit(getFunctionsArray([]), 2), [1, 2, [3, 3]]
describe 'series()', ->
it 'returns proper results', ->
call_order = []
async.series(getFunctionsArray call_order).then (results) ->
deepEqual results, [1, 2, [3, 3]]
deepEqual call_order, [1, 2, 3]
it 'handles an empty array', ->
becomes async.series([]), []
it 'handles errors', ->
isRejected(
async.series([
-> throw 'error1'
->
fail 'should not be called'
'error2'
])
/^error1$/
)
it 'accepts an object', ->
call_order = []
async.series(getFunctionsObject call_order).then (results) ->
deepEqual results, one: 1, two: 2, three: [3,3]
deepEqual call_order, [1,2,3]
it 'accepts a promise', ->
becomes async.series(getFunctionsArray []), [1, 2, [3, 3]]
describe 'each()', ->
it 'runs in parallel', ->
args = []
async.each([1, 3, 2], eachIterator.bind(this, args)).then ->
deepEqual args, [1, 2, 3]
it 'accepts an empty array', ->
async.each([], -> fail 'iterator should not be called')
it 'handles errors', ->
isRejected async.each([1, 2, 3], -> throw 'error1'), /^error1$/
it 'is aliased to forEach', -> equal async.forEach, async.each
it 'accepts promises', ->
args = []
async.each(Q([1, 3, 2]), Q(eachIterator.bind(this, args))).then ->
deepEqual args, [1, 2, 3]
describe 'eachSeries()', ->
it 'returns proper results', ->
args = []
async.eachSeries([1, 3, 2], eachIterator.bind(this, args)).then ->
deepEqual args, [1, 3, 2]
it 'accepts an empty array', ->
async.eachSeries([], -> fail 'iterator should not be called')
it 'handles errors', ->
call_order = []
async.eachSeries([1, 2, 3], (x) ->
call_order.push x
throw 'error1'
)
.then(-> fail 'then() should not be invoked')
.catch (err) ->
equal err, 'error1'
deepEqual call_order, [1]
it 'is aliased to forEachSeries', ->
equal async.forEachSeries, async.eachSeries
it 'accepts promises', ->
args = []
async.eachSeries(Q([1, 3, 2]), Q(eachIterator.bind(this, args))).then ->
deepEqual args, [1, 3, 2]
describe 'eachLimit()', ->
it 'accepts an empty array', ->
async.eachLimit([], 2, -> fail 'iterator should not be called')
it 'can handle limit < input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, 2, (x) -> Q.delay(x*5).then -> args.push x).then ->
deepEqual args, arr
it 'can handle limit = input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, arr.length, eachIterator.bind(this, args)).then ->
deepEqual args, arr
it 'can handle limit > input.length', ->
args = []
arr = [0..9]
async.eachLimit(arr, 20, eachIterator.bind(this, args)).then ->
deepEqual args, arr
it 'can handle limit = 0', ->
async.eachLimit([0..5], 0, -> fail 'iterator should not be called')
it 'can handle errors', ->
isRejected(
async.eachLimit [0,1,2], 3, (x) -> throw 'error1' if x is 2
/^error1$/
)
it 'is aliased to forEachLimit', -> equal async.forEachLimit, async.eachLimit
it 'accepts promises', ->
args = []
arr = [0..9]
async.eachLimit(Q(arr), Q(2), Q((x) -> Q.delay(x*5).then -> args.push x))
.then ->
deepEqual args, arr
describe 'map()', ->
it 'returns proper results', ->
call_order = []
async.map([1, 3, 2], mapIterator.bind(this, call_order)).then (results) ->
deepEqual call_order, [1, 2, 3]
deepEqual results, [2, 6, 4]
it 'does not modify original array', ->
a = [1, 2, 3]
async.map(a, (x) -> x*2).then (results) ->
deepEqual results, [2, 4, 6]
deepEqual a, [1, 2, 3]
it 'handles errors', ->
isRejected async.map([1, 2, 3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.map(Q([1, 3, 2]), Q(mapIterator.bind(this, []))), [2, 6, 4]
describe 'mapSeries()', ->
it 'returns proper results', ->
call_order = []
async.mapSeries([1, 3, 2], mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, [1, 3, 2]
deepEqual res, [2, 6, 4]
it 'handles errors', ->
isRejected async.mapSeries([1, 2, 3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.mapSeries(Q([1, 3, 2]), Q(mapIterator.bind(this, []))),
[2, 6, 4]
describe 'mapLimit()', ->
it 'accepts an empty array', ->
async.mapLimit [], 2, -> fail 'iterator should not be called'
it 'can handle limit < input.length', ->
call_order = []
async.mapLimit([2,4,3], 2, mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, [2, 4, 3], 'proper order'
deepEqual res, [4, 8, 6], 'right results'
it 'can handle limit = input.length', ->
args = []
arr = [0..9]
async.mapLimit(arr, arr.length, mapIterator.bind(this, args)).then (res) ->
deepEqual args, arr
deepEqual res, arr.map (n) -> n*2
it 'can handle limit > input.length', ->
call_order = []
arr = [0..9]
async.mapLimit(arr, 20, mapIterator.bind(this, call_order)).then (res) ->
deepEqual call_order, arr
deepEqual res, arr.map (n) -> n*2
it 'can handle limit = 0', ->
async.mapLimit([0..5], 0, -> fail 'iterator should not be called')
it 'can handle errors', ->
isRejected(
async.mapLimit [0,1,2], 3, (x) -> throw 'error1' if x is 2
/^error1$/
)
it 'accepts promises', ->
becomes async.mapLimit(Q([2,4,3]), Q(2), Q(mapIterator.bind(this, []))),
[4, 8, 6]
describe 'reduce()', ->
it 'returns proper result', ->
call_order = []
async.reduce([1, 2, 3], 0, (a, x) ->
call_order.push x
a + x
).then (res) ->
equal res, 6
deepEqual call_order, [1, 2, 3]
it 'works async', ->
becomes async.reduce([1, 3, 2], 0, (a, x) ->
Q.delay(Math.random()*100).thenResolve a+x
), 6
it 'handles errors', ->
isRejected async.reduce([1, 2, 3], 0, -> throw 'error1'), /^error1$/
it 'is aliased to inject', -> equal async.inject, async.reduce
it 'is aliased to foldl', -> equal async.foldl, async.reduce
it 'accepts promises', ->
becomes async.reduce(Q([1, 3, 2]), Q(0), Q((a, x) -> a+x)), 6
describe 'reduceRight()', ->
it 'returns proper result', ->
call_order = []
a = [1, 2, 3]
async.reduceRight(a, 0, (a, x) ->
call_order.push x
a + x
).then (res) ->
equal res, 6
deepEqual call_order, [3, 2, 1]
deepEqual a, [1, 2, 3]
it 'is aliased to foldr', -> equal async.foldr, async.reduceRight
it 'accepts promises', ->
becomes async.reduceRight(Q([1, 2, 3]), Q(0), Q((a, x) -> a+x)), 6
describe 'filter()', ->
it 'returns proper results', ->
becomes async.filter([3, 1, 2], filterIterator), [3, 1]
it 'does not modify input', ->
a = [3, 1, 2]
async.filter(a, (x) -> Q x % 2).then (res) ->
deepEqual res, [3,1]
deepEqual a, [3, 1, 2]
it 'is aliased to select', -> equal async.select, async.filter
it 'accepts promises', ->
becomes async.filter(Q([3, 1, 2]), Q(filterIterator)), [3, 1]
describe 'filterSeries()', ->
it 'returns proper results', ->
becomes async.filterSeries([3, 1, 2], filterIterator), [3, 1]
it 'is aliased to selectSeries', ->
equal async.selectSeries, async.filterSeries
it 'accepts promises', ->
becomes async.filterSeries(Q([3, 1, 2]), Q(filterIterator)), [3, 1]
describe 'reject()', ->
it 'returns proper results', ->
becomes async.reject([3, 1, 2], filterIterator), [2]
it 'does not modify input', ->
a = [3, 1, 2]
async.reject(a, (x) -> Q x % 2).then (res) ->
deepEqual res, [2]
deepEqual a, [3, 1, 2]
it 'accepts promises', ->
becomes async.reject(Q([3, 1, 2]), Q(filterIterator)), [2]
describe 'rejectSeries()', ->
it 'returns proper results', ->
becomes async.rejectSeries([3, 1, 2], filterIterator), [2]
it 'accepts promises', ->
becomes async.rejectSeries(Q([3, 1, 2]), Q(filterIterator)), [2]
describe 'some()', ->
it 'finds something', ->
becomes async.some([3, 1, 2], (x) -> Q.delay(0).thenResolve x is 1), true
it 'finds nothing', ->
becomes async.some([3, 2, 1], (x) -> Q x is 10), false
it 'is aliased to any', -> equal async.any, async.some
it 'returns early on match', ->
call_order = []
async.some([1, 2, 3], (x) -> Q.delay(x*25).then ->
call_order.push x
x is 1
).then(-> call_order.push 'resolved')
.delay(100)
.then(-> deepEqual call_order, [1, 'resolved', 2, 3])
it 'accepts promises', ->
becomes async.some(Q([3, 1, 2]), Q((x) -> Q.delay(0).thenResolve x is 1)),
true
describe 'every()', ->
it 'matches everything', ->
becomes async.every([1, 2, 3], (x) -> Q.delay(0).thenResolve x < 4), true
it 'matches not everything', ->
becomes async.every([1, 2, 3], (x) -> Q.delay(0).thenResolve x % 2), false
it 'is aliased to all', -> equal async.all, async.every
it 'returns early on mis-match', ->
call_order = []
async.every([1, 2, 3], (x) -> Q.delay(x*25).then ->
call_order.push x
x is 1
).then(-> call_order.push 'resolved')
.delay(100)
.then(-> deepEqual call_order, [1, 2, 'resolved', 3])
it 'accepts promises', ->
becomes async.every(Q([1, 2, 3]), Q((x) -> Q.delay(0).thenResolve x < 4)),
true
describe 'detect()', ->
it 'returns proper results', ->
call_order = []
async.detect([3, 2, 1], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(100)
.then -> deepEqual call_order, [1, 2, 'resolved', 3]
it 'returns one of multiple matches', ->
call_order = []
async.detect([3,2,2,1,2], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(100)
.then ->
deepEqual call_order.filter((c) -> c isnt 'resolved'), [1, 2, 2, 2, 3]
i = call_order.indexOf 'resolved'
ok (i < 5), 'short circuited early'
it 'handles errors', ->
isRejected(
async.detect([1, 2, 3], (x) -> if x is 2 then throw 'error1' else false)
/^error1$/
)
it 'accepts promises', ->
becomes async.detect(Q([1, 2, 3]), Q(detectIterator.bind(this, []))), 2
describe 'detectSeries()', ->
it 'returns proper results', ->
call_order = []
async.detectSeries([3,2,1], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(200)
.then -> deepEqual call_order, [3, 2, 'resolved']
it 'returns one of multiple matches', ->
call_order = []
async.detectSeries([3,2,2,1,2], detectIterator.bind(this, call_order))
.then (res) ->
call_order.push 'resolved'
equal res, 2
.delay(200)
.then -> deepEqual call_order, [3, 2, 'resolved']
it 'accepts promises', ->
becomes async.detectSeries(Q([3,2,1]), Q(detectIterator.bind(this, []))), 2
describe 'sortBy()', ->
it 'returns proper results', ->
becomes(
async.sortBy([{a:1},{a:15},{a:6}], (x) -> Q.delay(0).thenResolve x.a)
[{a:1},{a:6},{a:15}]
)
it 'accepts promises', ->
becomes async.sortBy(Q([{a:2},{a:1}]), Q((x) -> Q(x.a))), [{a:1},{a:2}]
describe 'concat()', ->
it 'returns just-in-time results', ->
call_order = []
iterator = (x) ->
Q.delay(x*25).then ->
call_order.push x
[x..1]
async.concat([1,3,2], iterator).then (res) ->
deepEqual res, [1, 2, 1, 3, 2, 1]
deepEqual call_order, [1, 2, 3]
it 'handles errors', ->
isRejected async.concat([1,2,3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
iterator = (x) -> Q.delay(x*25).then -> [x..1]
becomes async.concat(Q([1,3,2]), Q(iterator)), [1, 2, 1, 3, 2, 1]
describe 'concatSeries()', ->
it 'returns ordered results', ->
call_order = []
iterator = (x) ->
Q.delay(x*25).then ->
call_order.push x
[x..1]
async.concatSeries([1,3,2], iterator).then (res) ->
deepEqual res, [1,3,2,1,2,1]
deepEqual call_order, [1,3,2]
it 'handles errors', ->
isRejected async.concatSeries([1,2,3], -> throw 'error1'), /^error1$/
it 'accepts promises', ->
iterator = (x) -> Q.delay(x*25).then -> [x..1]
becomes async.concatSeries(Q([1,3,2]), Q(iterator)), [1,3,2,1,2,1]
describe 'until()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.until(
->
call_order.push ['test', count]
count is 5
->
call_order.push ['iterator', count]
count++
).then ->
deepEqual call_order, [
['test', 0]
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.until((-> throw 'error1'), ->), /^error1$/
it 'handles iterator errors', ->
isRejected async.until((-> false), -> throw 'error1'), /^error1$/
it 'accepts promises', ->
count = 0
async.until(Q(-> count is 5), Q(-> count++)).then -> equal count, 5
describe 'doUntil()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.doUntil(
->
call_order.push ['iterator', count]
count++
->
call_order.push ['test', count]
count is 5
).then ->
deepEqual call_order, [
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.doUntil((->), -> throw 'error1'), /^error1$/
it 'handles iterator errors', ->
isRejected async.doUntil((-> throw 'error1'), -> false), /^error1$/
it 'accepts promises', ->
count = 0
async.doUntil(Q(-> count++), Q(-> count is 5)).then -> equal count, 5
describe 'whilst()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.whilst(
->
call_order.push ['test', count]
count < 5
->
call_order.push ['iterator', count]
count++
).then ->
deepEqual call_order, [
['test', 0]
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.whilst((-> throw 'error1'), ->), /^error1$/
it 'handles iterator errors', ->
isRejected async.whilst((-> true), -> throw 'error1'), /^error1$/
it 'accepts promises', ->
count = 0
async.whilst(Q(-> count < 5), Q(-> count++)).then -> equal count, 5
describe 'doWhilst()', ->
it 'returns proper results', ->
call_order = []
count = 0
async.doWhilst(
->
call_order.push ['iterator', count]
count++
->
call_order.push ['test', count]
count < 5
).then ->
deepEqual call_order, [
['iterator', 0], ['test', 1]
['iterator', 1], ['test', 2]
['iterator', 2], ['test', 3]
['iterator', 3], ['test', 4]
['iterator', 4], ['test', 5]
]
equal count, 5
it 'handles test errors', ->
isRejected async.doWhilst((->), -> throw 'error1'), /^error1$/
it 'handles iterator errors', ->
isRejected async.doWhilst((-> throw 'error1'), -> true), /^error1$/
it 'accepts promises', ->
count = 0
async.doWhilst(Q(-> count++), Q(-> count < 5)).then -> equal count, 5
describe 'queue()', ->
testQueue = (concurrency, changeTo=null) ->
call_order = []
delays = [160, 80, 240, 80]
# worker1: --1-4
# worker2: -2---3
# order of completion: 2,1,4,3
q = async.queue(
(task) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{task}"
'arg'
concurrency
)
concurrency ?= 1
push1 = q.push(1).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 1'
push2 = q.push(2).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 2'
push3 = q.push(3).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 3'
push4 = q.push(4)
push4.start.then -> call_order.push 'started 4'
push4.then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 4'
equal q.length(), 4, 'queue should be length 4 after all pushes'
equal q.concurrency, concurrency,
"concurrency should be #{concurrency} after pushes"
if changeTo?
concurrency = q.concurrency = changeTo
drain = Q.promise (resolve, reject) ->
q.on 'drain', -> process.nextTick ->
try
co = if concurrency is 2
[ 'process 2', 'resolved 2'
'process 1', 'resolved 1', 'started 4',
'process 4', 'resolved 4'
'process 3', 'resolved 3' ]
else
[ 'process 1', 'resolved 1'
'process 2', 'resolved 2'
'process 3', 'resolved 3', 'started 4',
'process 4', 'resolved 4' ]
deepEqual call_order, co, 'call_order should be correct'
equal q.concurrency, concurrency,
"concurrency should be #{concurrency} in drain()"
equal q.length(), 0, 'queue should be length 0 in drain()'
resolve()
catch err
reject err
Q.all [push1, push2, push3, push4, drain]
it 'returns proper results', -> testQueue 2
it 'defaults to concurrency of 1', -> testQueue()
it 'handles errors', ->
results = []
q = async.queue (({name}) -> throw 'fooError' if name is 'foo'), 2
drain = Q.promise (resolve, reject) ->
q.on 'drain', -> process.nextTick ->
try
deepEqual results, ['bar', 'fooError']
resolve()
catch err
reject err
push1 = q.push(name: 'bar')
.then(-> results.push 'bar')
.catch(-> results.push 'barError')
push2 = q.push(name: 'foo')
.then(-> results.push 'foo')
.catch(-> results.push 'fooError')
Q.all [drain, push1, push2]
it 'allows concurrency change', -> testQueue(2, 1)
it 'supports unshift()', ->
queue_order = []
q = async.queue ((task) -> queue_order.push task), 1
Q.all([4..1].map(q.unshift.bind q)).then ->
deepEqual queue_order, [1, 2, 3, 4]
it 'allows pushing multiple tasks at once', ->
call_order = []
delays = [160,80,240,80]
q = async.queue(
(task) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{task}"
task
2
)
pushes = q.push([1, 2, 3, 4]).map (p) ->
p.then (arg) -> call_order.push "resolved #{arg}"
equal q.length(), 4, 'queue length is 4 after bulk push'
equal q.concurrency, 2, 'concurrency is 2 after bulk push'
Q.all(pushes).then ->
deepEqual call_order, [
'process 2', 'resolved 2'
'process 1', 'resolved 1'
'process 4', 'resolved 4'
'process 3', 'resolved 3'
]
equal q.concurrency, 2, 'concurrency is 2 after completion'
equal q.length(), 0, 'queue length is 0 after completion'
describe 'cargo()', ->
it 'returns proper results', ->
call_order = []
delays = [160, 160, 80]
# worker: --12--34--5-
# order of completion: 1,2,3,4,5
c = async.cargo(
(tasks) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{tasks}"
'arg'
2
)
push1 = c.push(1).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 1'
push2 = c.push(2).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 2'
equal c.length(), 2
# async pushes
push3 = Q.delay(60).then ->
c.push(3).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 3'
push45 = Q.delay(120).then ->
push4 = c.push(4).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 4'
equal c.length(), 2
push5 = c.push(5).then (arg) ->
equal arg, 'arg'
call_order.push 'resolved 5'
Q.all [push4, push5]
Q.all([push1, push2, push3, push45]).then ->
deepEqual call_order, [
'process 1,2', 'resolved 1', 'resolved 2'
'process 3,4', 'resolved 3', 'resolved 4'
'process 5', 'resolved 5'
]
equal c.length(), 0
it 'allows pushing multiple tasks at once', ->
call_order = []
delays = [120, 40]
# worker: -123-4-
# order of completion: 1,2,3,4
c = async.cargo(
(tasks) ->
Q.delay(delays.shift()).then ->
call_order.push "process #{tasks}"
tasks.join()
3
)
pushes = c.push([1..4]).map (p) -> p.then (arg) ->
call_order.push "resolved #{arg}"
equal c.length(), 4
Q.all(pushes).then ->
deepEqual call_order, [
'process 1,2,3', 'resolved 1,2,3'
'resolved 1,2,3', 'resolved 1,2,3'
'process 4', 'resolved 4'
]
equal c.length(), 0
describe 'memoize()', ->
it 'memoizes a function', ->
call_order = []
fn = (arg1, arg2) ->
call_order.push ['fn', arg1, arg2]
Q arg1 + arg2
fn2 = async.memoize fn
Q.all([
becomes(fn2(1, 2), 3)
becomes(fn2(1, 2), 3)
becomes(fn2(2, 2), 4)
]).then -> deepEqual call_order, [['fn', 1, 2], ['fn', 2, 2]]
it 'handles errors', ->
fn = (arg1, arg2) -> throw 'error1'
isRejected async.memoize(fn)(1, 2), /^error1$/
it 'handles multiple async calls', ->
fn = (arg1, arg2) -> Q.delay(10).then -> [arg1, arg2]
fn2 = async.memoize fn
Q.all [
becomes fn2(1, 2), [1, 2]
becomes fn2(1, 2), [1, 2]
]
it 'accepts a custom hash function', ->
fn = (arg1, arg2) -> Q arg1 + arg2
fn2 = async.memoize fn, -> 'custom hash'
Q.all [
becomes fn2(1, 2), 3
becomes fn2(2, 2), 3
]
it 'lets you futz with the cache', ->
fn = async.memoize (arg) -> fail 'Function should never be called'
fn.memo.foo = 'bar'
becomes fn('foo'), 'bar'
describe 'unmemoize()', ->
it 'returns the original function', ->
call_order = []
fn = (arg1, arg2) ->
call_order.push ['fn', arg1, arg2]
Q arg1 + arg2
fn2 = async.memoize fn
fn3 = async.unmemoize fn2
Q.all([
becomes(fn3(1, 2), 3)
becomes(fn3(1, 2), 3)
becomes(fn3(2, 2), 4)
]).then -> deepEqual call_order, [['fn',1,2],['fn',1,2,],['fn',2,2]]
it 'works on not-memoized functions', ->
fn = (arg1, arg2) -> Q arg1 + arg2
fn2 = async.unmemoize fn
becomes fn2(1, 2), 3
describe 'times()', ->
it 'returns proper results', ->
becomes async.times(5, (n) -> Q n), [0..4]
it 'maintains order', ->
becomes async.times(3, (n) -> Q.delay((3-n)*25).thenResolve n), [0..2]
it 'accepts n=0', ->
async.times(0, -> fail 'iterator should not be called')
it 'handles errors', ->
isRejected async.times(3, -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.times(Q(5), Q((n) -> Q n)), [0..4]
describe 'timesSeries()', ->
it 'returns proper results', ->
call_order = []
async.timesSeries(
5
(n) ->
Q.delay(100-n*10).then ->
call_order.push n
n
).then (res) ->
deepEqual call_order, [0..4]
deepEqual res, [0..4]
it 'handles errors', ->
isRejected async.timesSeries(5, -> throw 'error1'), /^error1$/
it 'accepts promises', ->
becomes async.timesSeries(Q(5), Q((n) -> Q n)), [0..4]
### FIXME spews output for some reason
['log', 'dir'].forEach (name) ->
describe "#{name}()", ->
it "calls console.#{name}() on results", ->
fn = (arg1) ->
equal arg1, 'one'
Q.delay(0).thenResolve 'test'
fn_err = (arg1) ->
equal arg1, 'one'
Q.delay(0).thenReject 'error'
_console_fn = console[name]
_error = console.error
console[name] = (val) ->
console[name] = _console_fn
equal val, 'test'
equal arguments.length, 1
async[name](fn, 'one').then ->
console.error = (val) ->
console.error = _error
equal val, 'error'
async[name] fn_err, 'one'
###
|
[
{
"context": "room1._id\n data.authorName.should.eql '许晶鑫'\n data.attachments.length.should.eql 1",
"end": 823,
"score": 0.9995460510253906,
"start": 820,
"tag": "NAME",
"value": "许晶鑫"
},
{
"context": "'Hello'\n data.authorName.should.eql '小艾'\n ... | talk-api2x/test/controllers/service.coffee | ikingye/talk-os | 3,084 | should = require 'should'
fs = require 'fs'
path = require 'path'
async = require 'async'
Promise = require 'bluebird'
limbo = require 'limbo'
app = require '../app'
{prepare, cleanup, request, _app, requestAsync} = app
supertest = require 'supertest'
urlLib = require 'url'
serviceLoader = require 'talk-services'
qs = require 'querystring'
config = require 'config'
{
IntegrationModel
} = limbo.use 'talk'
describe 'Service#Mailgun', ->
@timeout 10000
mailgun = require './mailgun.json'
before prepare
it 'should receive the mail body from mailgun and create new page', (done) ->
async.auto
broadcast: (callback) ->
app.broadcast = (room, event, data) ->
if event is 'message:create'
data._roomId.should.eql app.room1._id
data.authorName.should.eql '许晶鑫'
data.attachments.length.should.eql 1
data.attachments[0].category.should.eql 'quote'
quote = data.attachments[0].data
quote.should.have.properties 'title', 'text'
callback()
mailgun: (callback) ->
mailgun.recipient = app.room1.email
options =
method: 'post'
url: 'services/mailgun'
body: JSON.stringify mailgun
request options, callback
, done
it 'should receive an email with attachment and create a message with files', (done) ->
async.auto
broadcast: (callback) ->
app.broadcast = (room, event, data) ->
if event is 'message:create'
# Do not save thumbnail pic in files
data.attachments.length.should.eql 2
data.attachments[0].category.should.eql 'quote'
data.attachments[1].category.should.eql 'file'
quote = data.attachments[0].data
quote.text.should.containEql 'striker' # The striker thumbnail url
file1 = data.attachments[1].data
file1.fileName.should.eql 'page.html'
callback()
mailgun: (callback) ->
mailgun.recipient = app.room1.email
req = supertest(_app).post('/' + path.join(config.apiVersion, 'services/mailgun'))
Object.keys(mailgun).forEach (key) -> req.field key, mailgun[key] if toString.call(mailgun[key]) is '[object String]'
req.attach 'document', __dirname + "/../files/page.html"
req.attach 'document', __dirname + "/../files/thumbnail.jpg"
req.end (err, res) -> callback err
, done
after cleanup
describe 'Service#ToApp', ->
before prepare
msgToken = ''
it 'should generate an appToken and redirect to the app url', (done) ->
async.auto
toApp: (callback) ->
options =
method: 'get'
url: '/services/toapp'
qs:
_sessionUserId: app.user1._id
_teamId: app.team1._id
_toId: app.user2._id
url: 'http://somewhere.com'
app.request options, (err, res) ->
res.statusCode.should.eql 302
appUrl = res.headers.location
appUrl.should.containEql 'http://somewhere.com'
appUrl.should.containEql 'msgToken'
{msgToken, userName} = qs.parse(urlLib.parse(appUrl).query)
userName.should.eql app.user1.name
callback err
, done
it 'should send message by msgToken', (done) ->
async.auto
broadcast: (callback) ->
hits = 0
app.broadcast = (channel, event, data, socketId) ->
if event is 'message:create'
hits |= 0b1
data._creatorId.should.eql app.user1._id
data._toId.should.eql app.user2._id
data._teamId.should.eql app.team1._id
quote = data.attachments[0].data
quote.title.should.eql 'hello'
quote.category.should.eql 'thirdapp'
if event is 'notification:update'
hits |= 0b10
data.text.should.containEql 'hello'
callback() if hits is 0b11
createMessage: (callback) ->
options =
method: 'post'
url: '/services/message'
body: JSON.stringify
msgToken: msgToken
attachments: [
category: 'quote'
data: title: 'hello'
]
app.request options, callback
, done
after cleanup
describe 'Service#Webhook', ->
before prepare
it 'should receive webhook and route messages to service', (done) ->
$service = serviceLoader.load 'incoming'
$broadcast = $service.then (service) ->
new Promise (resolve, reject) ->
hits = 0
app.broadcast = (channel, event, data) ->
try
if event is 'message:create' and "#{data._creatorId}" is "#{service.robot._id}"
hits |= 0b1
data.body.should.eql 'Hello'
data.authorName.should.eql '小艾'
resolve() if hits is 0b1
catch err
reject err
$integration = Promise.resolve().then ->
options =
method: 'POST'
url: '/integrations'
body:
_sessionUserId: app.user1._id
_teamId: app.team1._id
_roomId: app.room1._id
category: 'incoming'
requestAsync options
.spread (res) -> app.integration1 = res.body
$message = $integration.then (integration) ->
options =
method: 'POST'
url: "/services/webhook/#{integration.hashId}"
body:
content: 'Hello'
authorName: '小艾'
requestAsync options
Promise.all [$broadcast, $integration, $message]
.nodeify done
it 'should send error webhooks and receive an error infomation when errorTimes above 5', (done) ->
$sendMsg = Promise.each [0..6], (n) ->
# Without title or text
options =
method: 'POST'
url: "/services/webhook/#{app.integration1.hashId}"
body: authorName: '小艾'
requestAsync options
.catch (err) -> err.message.should.containEql 'Title and text can not be empty'
$checkIntegration = $sendMsg.then ->
IntegrationModel.findOneAsync _id: app.integration1._id
.then (integration) ->
integration.should.have.properties 'errorInfo', 'errorTimes'
integration.errorTimes.should.eql 6
$checkIntegration.nodeify done
after cleanup
| 202712 | should = require 'should'
fs = require 'fs'
path = require 'path'
async = require 'async'
Promise = require 'bluebird'
limbo = require 'limbo'
app = require '../app'
{prepare, cleanup, request, _app, requestAsync} = app
supertest = require 'supertest'
urlLib = require 'url'
serviceLoader = require 'talk-services'
qs = require 'querystring'
config = require 'config'
{
IntegrationModel
} = limbo.use 'talk'
describe 'Service#Mailgun', ->
@timeout 10000
mailgun = require './mailgun.json'
before prepare
it 'should receive the mail body from mailgun and create new page', (done) ->
async.auto
broadcast: (callback) ->
app.broadcast = (room, event, data) ->
if event is 'message:create'
data._roomId.should.eql app.room1._id
data.authorName.should.eql '<NAME>'
data.attachments.length.should.eql 1
data.attachments[0].category.should.eql 'quote'
quote = data.attachments[0].data
quote.should.have.properties 'title', 'text'
callback()
mailgun: (callback) ->
mailgun.recipient = app.room1.email
options =
method: 'post'
url: 'services/mailgun'
body: JSON.stringify mailgun
request options, callback
, done
it 'should receive an email with attachment and create a message with files', (done) ->
async.auto
broadcast: (callback) ->
app.broadcast = (room, event, data) ->
if event is 'message:create'
# Do not save thumbnail pic in files
data.attachments.length.should.eql 2
data.attachments[0].category.should.eql 'quote'
data.attachments[1].category.should.eql 'file'
quote = data.attachments[0].data
quote.text.should.containEql 'striker' # The striker thumbnail url
file1 = data.attachments[1].data
file1.fileName.should.eql 'page.html'
callback()
mailgun: (callback) ->
mailgun.recipient = app.room1.email
req = supertest(_app).post('/' + path.join(config.apiVersion, 'services/mailgun'))
Object.keys(mailgun).forEach (key) -> req.field key, mailgun[key] if toString.call(mailgun[key]) is '[object String]'
req.attach 'document', __dirname + "/../files/page.html"
req.attach 'document', __dirname + "/../files/thumbnail.jpg"
req.end (err, res) -> callback err
, done
after cleanup
describe 'Service#ToApp', ->
before prepare
msgToken = ''
it 'should generate an appToken and redirect to the app url', (done) ->
async.auto
toApp: (callback) ->
options =
method: 'get'
url: '/services/toapp'
qs:
_sessionUserId: app.user1._id
_teamId: app.team1._id
_toId: app.user2._id
url: 'http://somewhere.com'
app.request options, (err, res) ->
res.statusCode.should.eql 302
appUrl = res.headers.location
appUrl.should.containEql 'http://somewhere.com'
appUrl.should.containEql 'msgToken'
{msgToken, userName} = qs.parse(urlLib.parse(appUrl).query)
userName.should.eql app.user1.name
callback err
, done
it 'should send message by msgToken', (done) ->
async.auto
broadcast: (callback) ->
hits = 0
app.broadcast = (channel, event, data, socketId) ->
if event is 'message:create'
hits |= 0b1
data._creatorId.should.eql app.user1._id
data._toId.should.eql app.user2._id
data._teamId.should.eql app.team1._id
quote = data.attachments[0].data
quote.title.should.eql 'hello'
quote.category.should.eql 'thirdapp'
if event is 'notification:update'
hits |= 0b10
data.text.should.containEql 'hello'
callback() if hits is 0b11
createMessage: (callback) ->
options =
method: 'post'
url: '/services/message'
body: JSON.stringify
msgToken: msgToken
attachments: [
category: 'quote'
data: title: 'hello'
]
app.request options, callback
, done
after cleanup
describe 'Service#Webhook', ->
before prepare
it 'should receive webhook and route messages to service', (done) ->
$service = serviceLoader.load 'incoming'
$broadcast = $service.then (service) ->
new Promise (resolve, reject) ->
hits = 0
app.broadcast = (channel, event, data) ->
try
if event is 'message:create' and "#{data._creatorId}" is "#{service.robot._id}"
hits |= 0b1
data.body.should.eql 'Hello'
data.authorName.should.eql '小艾'
resolve() if hits is 0b1
catch err
reject err
$integration = Promise.resolve().then ->
options =
method: 'POST'
url: '/integrations'
body:
_sessionUserId: app.user1._id
_teamId: app.team1._id
_roomId: app.room1._id
category: 'incoming'
requestAsync options
.spread (res) -> app.integration1 = res.body
$message = $integration.then (integration) ->
options =
method: 'POST'
url: "/services/webhook/#{integration.hashId}"
body:
content: 'Hello'
authorName: '小艾'
requestAsync options
Promise.all [$broadcast, $integration, $message]
.nodeify done
it 'should send error webhooks and receive an error infomation when errorTimes above 5', (done) ->
$sendMsg = Promise.each [0..6], (n) ->
# Without title or text
options =
method: 'POST'
url: "/services/webhook/#{app.integration1.hashId}"
body: authorName: '小艾'
requestAsync options
.catch (err) -> err.message.should.containEql 'Title and text can not be empty'
$checkIntegration = $sendMsg.then ->
IntegrationModel.findOneAsync _id: app.integration1._id
.then (integration) ->
integration.should.have.properties 'errorInfo', 'errorTimes'
integration.errorTimes.should.eql 6
$checkIntegration.nodeify done
after cleanup
| true | should = require 'should'
fs = require 'fs'
path = require 'path'
async = require 'async'
Promise = require 'bluebird'
limbo = require 'limbo'
app = require '../app'
{prepare, cleanup, request, _app, requestAsync} = app
supertest = require 'supertest'
urlLib = require 'url'
serviceLoader = require 'talk-services'
qs = require 'querystring'
config = require 'config'
{
IntegrationModel
} = limbo.use 'talk'
describe 'Service#Mailgun', ->
@timeout 10000
mailgun = require './mailgun.json'
before prepare
it 'should receive the mail body from mailgun and create new page', (done) ->
async.auto
broadcast: (callback) ->
app.broadcast = (room, event, data) ->
if event is 'message:create'
data._roomId.should.eql app.room1._id
data.authorName.should.eql 'PI:NAME:<NAME>END_PI'
data.attachments.length.should.eql 1
data.attachments[0].category.should.eql 'quote'
quote = data.attachments[0].data
quote.should.have.properties 'title', 'text'
callback()
mailgun: (callback) ->
mailgun.recipient = app.room1.email
options =
method: 'post'
url: 'services/mailgun'
body: JSON.stringify mailgun
request options, callback
, done
it 'should receive an email with attachment and create a message with files', (done) ->
async.auto
broadcast: (callback) ->
app.broadcast = (room, event, data) ->
if event is 'message:create'
# Do not save thumbnail pic in files
data.attachments.length.should.eql 2
data.attachments[0].category.should.eql 'quote'
data.attachments[1].category.should.eql 'file'
quote = data.attachments[0].data
quote.text.should.containEql 'striker' # The striker thumbnail url
file1 = data.attachments[1].data
file1.fileName.should.eql 'page.html'
callback()
mailgun: (callback) ->
mailgun.recipient = app.room1.email
req = supertest(_app).post('/' + path.join(config.apiVersion, 'services/mailgun'))
Object.keys(mailgun).forEach (key) -> req.field key, mailgun[key] if toString.call(mailgun[key]) is '[object String]'
req.attach 'document', __dirname + "/../files/page.html"
req.attach 'document', __dirname + "/../files/thumbnail.jpg"
req.end (err, res) -> callback err
, done
after cleanup
describe 'Service#ToApp', ->
before prepare
msgToken = ''
it 'should generate an appToken and redirect to the app url', (done) ->
async.auto
toApp: (callback) ->
options =
method: 'get'
url: '/services/toapp'
qs:
_sessionUserId: app.user1._id
_teamId: app.team1._id
_toId: app.user2._id
url: 'http://somewhere.com'
app.request options, (err, res) ->
res.statusCode.should.eql 302
appUrl = res.headers.location
appUrl.should.containEql 'http://somewhere.com'
appUrl.should.containEql 'msgToken'
{msgToken, userName} = qs.parse(urlLib.parse(appUrl).query)
userName.should.eql app.user1.name
callback err
, done
it 'should send message by msgToken', (done) ->
async.auto
broadcast: (callback) ->
hits = 0
app.broadcast = (channel, event, data, socketId) ->
if event is 'message:create'
hits |= 0b1
data._creatorId.should.eql app.user1._id
data._toId.should.eql app.user2._id
data._teamId.should.eql app.team1._id
quote = data.attachments[0].data
quote.title.should.eql 'hello'
quote.category.should.eql 'thirdapp'
if event is 'notification:update'
hits |= 0b10
data.text.should.containEql 'hello'
callback() if hits is 0b11
createMessage: (callback) ->
options =
method: 'post'
url: '/services/message'
body: JSON.stringify
msgToken: msgToken
attachments: [
category: 'quote'
data: title: 'hello'
]
app.request options, callback
, done
after cleanup
describe 'Service#Webhook', ->
before prepare
it 'should receive webhook and route messages to service', (done) ->
$service = serviceLoader.load 'incoming'
$broadcast = $service.then (service) ->
new Promise (resolve, reject) ->
hits = 0
app.broadcast = (channel, event, data) ->
try
if event is 'message:create' and "#{data._creatorId}" is "#{service.robot._id}"
hits |= 0b1
data.body.should.eql 'Hello'
data.authorName.should.eql '小艾'
resolve() if hits is 0b1
catch err
reject err
$integration = Promise.resolve().then ->
options =
method: 'POST'
url: '/integrations'
body:
_sessionUserId: app.user1._id
_teamId: app.team1._id
_roomId: app.room1._id
category: 'incoming'
requestAsync options
.spread (res) -> app.integration1 = res.body
$message = $integration.then (integration) ->
options =
method: 'POST'
url: "/services/webhook/#{integration.hashId}"
body:
content: 'Hello'
authorName: '小艾'
requestAsync options
Promise.all [$broadcast, $integration, $message]
.nodeify done
it 'should send error webhooks and receive an error infomation when errorTimes above 5', (done) ->
$sendMsg = Promise.each [0..6], (n) ->
# Without title or text
options =
method: 'POST'
url: "/services/webhook/#{app.integration1.hashId}"
body: authorName: '小艾'
requestAsync options
.catch (err) -> err.message.should.containEql 'Title and text can not be empty'
$checkIntegration = $sendMsg.then ->
IntegrationModel.findOneAsync _id: app.integration1._id
.then (integration) ->
integration.should.have.properties 'errorInfo', 'errorTimes'
integration.errorTimes.should.eql 6
$checkIntegration.nodeify done
after cleanup
|
[
{
"context": "# [Slider.js](http://demo.greweb.fr/slider) by @greweb \n\n###!\nCopyright 2011 Gaetan Renaudeau\n\nLicensed ",
"end": 54,
"score": 0.999583899974823,
"start": 47,
"tag": "USERNAME",
"value": "@greweb"
},
{
"context": "greweb.fr/slider) by @greweb \n\n###!\nCopyright 20... | slider.coffee | kod3r/slider.js | 1 | # [Slider.js](http://demo.greweb.fr/slider) by @greweb
###!
Copyright 2011 Gaetan Renaudeau
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
# Util function : modulo for negative values
mod = (X,Y) -> X-Y*Math.floor(X/Y)
# RequestAnimationFrame polyfill : https://gist.github.com/997619
requestAnimationFrame = `function(a,b){while(a--&&!(b=window["oR0msR0mozR0webkitR0r".split(0)[a]+"equestAnimationFrame"]));return b||function(a){setTimeout(a,15)}}(5)`
# return the current millisecond timestamp
currentTime = `function(){return new Date().getTime()}`
# Slider template
# ---------------
tmplSlider = (o) ->
slider = $("""
<div class="slider">
<div class="loader"><span class="spinner"></span> <span class="percent">0</span>%</div>
<div class="slide-images"></div>
<div class="options">
<a class="prevSlide" href="javascript:;">prev</a>
<span class="slide-pager"></span>
<a class="nextSlide" href="javascript:;">next</a>
</div>
</div>
""")
slider.find('.slide-images').append(
$.map(o.slides, (slide) -> $('<div class="slide-image">'+
(if slide.link then '<a href="'+slide.link+'" target="_blank">' else '')+
'<img src="'+slide.src+'">'+
(if slide.name then '<span class="caption">'+slide.name+'</span>' else '')+
(if slide.link then '</a>' else '')+
'</div>')[0]
)
)
slider.find('.slide-pager').append $.map(o.slides, (slide, i) ->
$('<a href="javascript:;">' + (i + 1) + '</a>')[0]
)
slider
tmplSliderWithCanvas = (o) ->
node = tmplSlider o
node.find('div.slide-images').append('<canvas class="slide-images" />')
node
# SliderUtils
# -----------
SliderUtils =
extractImageData: (self, from, to) ->
{width, height} = self.canvas[0]
self.clean()
self.drawImage self.images[from]
fromData = self.ctx.getImageData 0, 0, width, height
self.clean()
self.drawImage self.images[to]
toData = self.ctx.getImageData 0, 0, width, height
output = self.ctx.createImageData width, height
return fromData: fromData, toData: toData, output: output
clippedTransition: ( clipFunction ) ->
(self, from, to, progress) ->
{width, height} = self.canvas[0]
ctx = self.ctx
self.drawImage self.images[from]
ctx.save()
ctx.beginPath()
clipFunction ctx, width, height, progress
ctx.clip()
self.drawImage self.images[to]
ctx.restore()
# SliderTransitionFunctions
# ------------------------
SliderTransitionFunctions =
# A clock load effect
clock:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.moveTo w/2, h/2
ctx.arc w/2, h/2, Math.max(w, h), 0, Math.PI*2*p, false
# A circle open effect
circle:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.arc w/2, h/2, 0.6*p*Math.max(w, h), 0, Math.PI*2, false
# A horizontal open effect
diamond:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
w2=w/2
h2=h/2
dh=p*h
dw=p*w
ctx.moveTo w2, h2-dh
ctx.lineTo w2+dw, h2
ctx.lineTo w2, h2+dh
ctx.lineTo w2-dw, h2
# A vertical open effect
verticalOpen:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
nbSpike=8
spikeh=h/(2*nbSpike) # the height of a demi-spike (triangle)
spikew=spikeh
pw=p*w/2
xl=w/2-pw
xr=w/2+pw
spikel=xl-spikew
spiker=xr+spikew
ctx.moveTo xl, 0
for hi in [0..nbSpike]
h1=(2*hi)*spikeh
h2=h1+spikeh
ctx.lineTo spikel, h1
ctx.lineTo xl, h2
ctx.lineTo spiker, h
for hi in [nbSpike..0]
h1=(2*hi)*spikeh
h2=h1-spikeh
ctx.lineTo xr, h1
ctx.lineTo spiker, h2
# A horizontal open effect
horizontalOpen:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.rect 0, (1-p)*h/2, w, h*p
# A sundblind open effect
horizontalSunblind:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p) #non linear progress
blinds = 6
blindHeight = h/blinds
for blind in [0..blinds]
ctx.rect 0, blindHeight*blind, w, blindHeight*p
# A vertical sundblind open effect
verticalSunblind:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p)
blinds = 10
blindWidth = w/blinds
for blind in [0..blinds]
prog = Math.max(0, Math.min( 2*p-(blind+1)/blinds, 1))
ctx.rect blindWidth*blind, 0, blindWidth*prog, h
# circles open effect
circles:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
circlesY = 6
circlesX = Math.floor circlesY*w/h
circleW = w/circlesX
circleH = h/circlesY
maxWH = Math.max(w, h)
maxRad = 0.7*Math.max(circleW, circleH)
for x in [0..circlesX]
for y in [0..circlesY]
cx = (x+0.5)*circleW
cy = (y+0.5)*circleH
r = Math.max(0, Math.min(2*p-cx/w, 1)) * maxRad
ctx.moveTo cx, cy
ctx.arc cx, cy, r, 0, Math.PI*2, false
# A square sundblind open effect
squares:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p) #non linear progress
blindsY = 5
blindsX = Math.floor blindsY*w/h
blindWidth = w/blindsX
blindHeight = h/blindsY
for x in [0..blindsX]
for y in [0..blindsY]
sx = blindWidth*x
sy = blindHeight*y
prog = Math.max(0, Math.min(3*p-sx/w-sy/h, 1))
rw = blindWidth*prog
rh = blindHeight*prog
ctx.rect sx-rw/2, sy-rh/2, rw, rh
# A blured fade left effect
fadeLeft:
init: (self, from, to) ->
data = SliderUtils.extractImageData(self, from, to)
data.randomTrait = []
h = self.canvas[0].height
for y in [0..h]
data.randomTrait[y] = Math.random()
data
render: (self, from, to, progress, data) ->
blur = 150
{width, height} = self.canvas[0]
ctx = self.ctx
fd = data.fromData.data
td = data.toData.data
out = data.output.data
randomTrait = data.randomTrait
`(function(){
var wpdb = width*progress/blur;
for (var x = 0; x < width; ++x) {
var xdb = x/blur;
for (var y = 0; y < height; ++y) {
var b = (y*width + x)*4
var p1 = Math.min(Math.max((xdb-wpdb*(1+randomTrait[y]/10)), 0), 1)
var p2 = 1-p1
for (var c = 0; c < 3; ++c) {
var i = b + c;
out[i] = p1 * (fd[i] ) + p2 * (td[i] )
}
out[b + 3] = 255;
}
}
}())`
self.ctx.putImageData data.output, 0, 0
# Slider - a lightweight slider
# -----------------------------
# Constructor : init container node and current slide number
class Slider
constructor: (container) -> @container = $(container)
current: 0
lastHumanNav: 0
duration: 4000
w: '640px'
h: '430px'
theme: 'theme-dark'
tmpl: tmplSlider
# Util function : return the circular value of num
circular: (num) -> mod num, @slides.size()
# Go to slide number `num` : update both DOM and this.current
slide: (num) ->
# num must be between 0 and nbslides-1
if @slides && @pages
num = Math.max(0, Math.min(num, @slides.size()-1))
# Move current class in **slides**
@slides.eq(@current).removeClass "current"
@slides.eq(num).addClass "current"
# Move current class in **pages**
@pages.eq(@current).removeClass "current"
@pages.eq(num).addClass "current"
@current = num
this
# Go to circular next slide (will call `slide`)
next: -> @slide @circular(@current+1)
# Go to circular previous slide (will call `slide`)
prev: -> @slide @circular(@current-1)
# Change the duration between each slide
setDuration: (@duration) ->
this
# Change the slider transition CSS class
setTransition: (transition) ->
if @node
@node.removeClass(@transition) if @transition
@node.addClass(transition) if transition
@transition = transition
this
# Change the slider theme CSS class
setTheme: (theme = "theme-dark") ->
if @node
@node.removeClass(@theme) if @theme
@node.addClass(theme) if theme
@theme = theme
this
# set slider size
setSize: (@w, @h) ->
if @node
@node.width w
@node.find(".slide-image").width w
@node.find(".slide-images").height h
this
# Fetch photos with a JSON providing its `url`.
# If `options` is defined, passing it in request params.
# If `transformer` is defined, using it to transform the json
# to a compatible json to pass to `Slider.setPhotos`.
fetchJson: (url, options, transformer) ->
params = $.extend({}, options)
transformer ?= (json) -> json
$.getJSON url, params, (json) => @setPhotos transformer(json)
this
# Sync slider data to DOM
_sync: ->
@setTransition @transition
@setTheme @theme
@setSize @w, @h
@slide @current
# `slides` : format: array of { src, name, link (optional) }
setPhotos: (@photos) ->
# Templating and appending to DOM
@node = @tmpl(slides: photos).addClass("loading")
@container.empty().append @node
@_sync()
# Loading all images before showing the slider
nbLoad = 0
imgs = @node.find(".slide-image img").bind("load", =>
total = imgs.size()
if ++nbLoad == total
@node.removeClass "loading"
@start()
# Update loader progression (in percent)
@node.find(".loader .percent").text Math.floor(100 * nbLoad / total)
)
@node.find(".loader").text "No photo" if imgs.size() == 0
this
# Start the slider
start: ->
@slides = @node.find(".slide-image")
@pages = @node.find(".slide-pager a")
@_sync()
@_bind()
this
stop: ->
@_unbind()
this
# Bind slider DOM events for navigation
_bind: ->
@_unbind()
@node.find(".prevSlide").click => @prev()
@node.find(".nextSlide").click => @next()
self = this
if @node
@node.find(".slide-pager a").each (i) ->
$(this).click -> self.slide i
now = -> currentTime()
@node.find(".options a").click => @lastHumanNav = now()
if not @timeout
loop_ = =>
@next() if now() - @lastHumanNav > 2000
@timeout = setTimeout(loop_, @duration)
@timeout = setTimeout(loop_, @duration)
this
_unbind: ->
if @node
@node.find(".prevSlide, .nextSlide, .slide-pager a, .options a").unbind 'click'
if @timeout
clearTimeout @timeout
@timeout = null
# SliderWithCanvas
# ---------------
# Let's support canvas transitions
class SliderWithCanvas extends Slider
transitionFunction: SliderTransitionFunctions.clock
transitionDuration: 1500
tmpl: tmplSliderWithCanvas
# also synchronize the renderMode
_sync: () ->
renderMode = @renderMode
super
@setRenderMode(renderMode)
# Init some variables related to canvas
start: () ->
@notCanvas = @node.find '.slide-images:not(canvas) img'
@canvas = @node.find 'canvas.slide-images'
@ctx = @canvas[0].getContext '2d' if @canvas[0] and @canvas[0].getContext
@images = $.map(@photos, ((photo) =>
img = new Image()
img.src = photo.src
img
)) if @photos
super
# The `setSize` method should update the canvas size
setSize: (w, h) ->
super w, h
@canvas.attr("height", h).attr("width", w) if @canvas
this
# set the render mode of the slider ( canvas | css )
setRenderMode: (@renderMode) ->
if @ctx
if @renderMode is 'canvas'
@drawImage @images[@current]
@notCanvas.hide()
@canvas.show()
else
@canvas.hide()
@notCanvas.show()
this
setTransition: (transition) ->
@setRenderMode 'css'
super transition
this
# Change the slider transition function (for the canvas animation)
setTransitionFunction: (@transitionFunction) ->
@setRenderMode 'canvas'
this
# Change the slider transition duration (means the time of the transition)
setTransitionDuration: (@transitionDuration) ->
@setRenderMode 'canvas'
this
# Overriding `slide` to support the canvas rendering
slide: (num) ->
@fromSlide = @current
@toSlide = num
@transitionStart = currentTime()
if @ctx and @renderMode is 'canvas'
@startRender()
super num
# clean the canvas
clean: -> @ctx.clearRect 0, 0, @canvas[0].width, @canvas[0].height
# draw an image on the all canvas with the correct ratio
drawImage: (img) ->
{width, height} = @canvas[0]
@ctx.drawImage img, 0, 0, width, width*img.height/img.width
# `_renderId` help to make sure once transition is running
_renderId: 0
# init render loop
startRender: ->
if @transitionFunction.init
@tfdata = @transitionFunction.init this, @fromSlide, @toSlide
@render(++@_renderId, @transitionFunction)
# render loop
render: (id, transitionFunction) ->
now = currentTime()
if id==@_renderId and now >= @transitionStart
progress = Math.min(1, (now - @transitionStart) / @transitionDuration)
if progress == 1
@clean()
@drawImage @images[@toSlide]
else
transitionFunction.render this, @fromSlide, @toSlide, progress, @tfdata
requestAnimationFrame (=>@render(id, transitionFunction)), @canvas[0]
# Exporting global variables
# --------------------------
window.Slider = SliderWithCanvas
window.SliderTransitionFunctions = SliderTransitionFunctions
window.SliderUtils = SliderUtils
| 177619 | # [Slider.js](http://demo.greweb.fr/slider) by @greweb
###!
Copyright 2011 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
# Util function : modulo for negative values
mod = (X,Y) -> X-Y*Math.floor(X/Y)
# RequestAnimationFrame polyfill : https://gist.github.com/997619
requestAnimationFrame = `function(a,b){while(a--&&!(b=window["oR0msR0mozR0webkitR0r".split(0)[a]+"equestAnimationFrame"]));return b||function(a){setTimeout(a,15)}}(5)`
# return the current millisecond timestamp
currentTime = `function(){return new Date().getTime()}`
# Slider template
# ---------------
tmplSlider = (o) ->
slider = $("""
<div class="slider">
<div class="loader"><span class="spinner"></span> <span class="percent">0</span>%</div>
<div class="slide-images"></div>
<div class="options">
<a class="prevSlide" href="javascript:;">prev</a>
<span class="slide-pager"></span>
<a class="nextSlide" href="javascript:;">next</a>
</div>
</div>
""")
slider.find('.slide-images').append(
$.map(o.slides, (slide) -> $('<div class="slide-image">'+
(if slide.link then '<a href="'+slide.link+'" target="_blank">' else '')+
'<img src="'+slide.src+'">'+
(if slide.name then '<span class="caption">'+slide.name+'</span>' else '')+
(if slide.link then '</a>' else '')+
'</div>')[0]
)
)
slider.find('.slide-pager').append $.map(o.slides, (slide, i) ->
$('<a href="javascript:;">' + (i + 1) + '</a>')[0]
)
slider
tmplSliderWithCanvas = (o) ->
node = tmplSlider o
node.find('div.slide-images').append('<canvas class="slide-images" />')
node
# SliderUtils
# -----------
SliderUtils =
extractImageData: (self, from, to) ->
{width, height} = self.canvas[0]
self.clean()
self.drawImage self.images[from]
fromData = self.ctx.getImageData 0, 0, width, height
self.clean()
self.drawImage self.images[to]
toData = self.ctx.getImageData 0, 0, width, height
output = self.ctx.createImageData width, height
return fromData: fromData, toData: toData, output: output
clippedTransition: ( clipFunction ) ->
(self, from, to, progress) ->
{width, height} = self.canvas[0]
ctx = self.ctx
self.drawImage self.images[from]
ctx.save()
ctx.beginPath()
clipFunction ctx, width, height, progress
ctx.clip()
self.drawImage self.images[to]
ctx.restore()
# SliderTransitionFunctions
# ------------------------
SliderTransitionFunctions =
# A clock load effect
clock:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.moveTo w/2, h/2
ctx.arc w/2, h/2, Math.max(w, h), 0, Math.PI*2*p, false
# A circle open effect
circle:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.arc w/2, h/2, 0.6*p*Math.max(w, h), 0, Math.PI*2, false
# A horizontal open effect
diamond:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
w2=w/2
h2=h/2
dh=p*h
dw=p*w
ctx.moveTo w2, h2-dh
ctx.lineTo w2+dw, h2
ctx.lineTo w2, h2+dh
ctx.lineTo w2-dw, h2
# A vertical open effect
verticalOpen:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
nbSpike=8
spikeh=h/(2*nbSpike) # the height of a demi-spike (triangle)
spikew=spikeh
pw=p*w/2
xl=w/2-pw
xr=w/2+pw
spikel=xl-spikew
spiker=xr+spikew
ctx.moveTo xl, 0
for hi in [0..nbSpike]
h1=(2*hi)*spikeh
h2=h1+spikeh
ctx.lineTo spikel, h1
ctx.lineTo xl, h2
ctx.lineTo spiker, h
for hi in [nbSpike..0]
h1=(2*hi)*spikeh
h2=h1-spikeh
ctx.lineTo xr, h1
ctx.lineTo spiker, h2
# A horizontal open effect
horizontalOpen:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.rect 0, (1-p)*h/2, w, h*p
# A sundblind open effect
horizontalSunblind:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p) #non linear progress
blinds = 6
blindHeight = h/blinds
for blind in [0..blinds]
ctx.rect 0, blindHeight*blind, w, blindHeight*p
# A vertical sundblind open effect
verticalSunblind:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p)
blinds = 10
blindWidth = w/blinds
for blind in [0..blinds]
prog = Math.max(0, Math.min( 2*p-(blind+1)/blinds, 1))
ctx.rect blindWidth*blind, 0, blindWidth*prog, h
# circles open effect
circles:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
circlesY = 6
circlesX = Math.floor circlesY*w/h
circleW = w/circlesX
circleH = h/circlesY
maxWH = Math.max(w, h)
maxRad = 0.7*Math.max(circleW, circleH)
for x in [0..circlesX]
for y in [0..circlesY]
cx = (x+0.5)*circleW
cy = (y+0.5)*circleH
r = Math.max(0, Math.min(2*p-cx/w, 1)) * maxRad
ctx.moveTo cx, cy
ctx.arc cx, cy, r, 0, Math.PI*2, false
# A square sundblind open effect
squares:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p) #non linear progress
blindsY = 5
blindsX = Math.floor blindsY*w/h
blindWidth = w/blindsX
blindHeight = h/blindsY
for x in [0..blindsX]
for y in [0..blindsY]
sx = blindWidth*x
sy = blindHeight*y
prog = Math.max(0, Math.min(3*p-sx/w-sy/h, 1))
rw = blindWidth*prog
rh = blindHeight*prog
ctx.rect sx-rw/2, sy-rh/2, rw, rh
# A blured fade left effect
fadeLeft:
init: (self, from, to) ->
data = SliderUtils.extractImageData(self, from, to)
data.randomTrait = []
h = self.canvas[0].height
for y in [0..h]
data.randomTrait[y] = Math.random()
data
render: (self, from, to, progress, data) ->
blur = 150
{width, height} = self.canvas[0]
ctx = self.ctx
fd = data.fromData.data
td = data.toData.data
out = data.output.data
randomTrait = data.randomTrait
`(function(){
var wpdb = width*progress/blur;
for (var x = 0; x < width; ++x) {
var xdb = x/blur;
for (var y = 0; y < height; ++y) {
var b = (y*width + x)*4
var p1 = Math.min(Math.max((xdb-wpdb*(1+randomTrait[y]/10)), 0), 1)
var p2 = 1-p1
for (var c = 0; c < 3; ++c) {
var i = b + c;
out[i] = p1 * (fd[i] ) + p2 * (td[i] )
}
out[b + 3] = 255;
}
}
}())`
self.ctx.putImageData data.output, 0, 0
# Slider - a lightweight slider
# -----------------------------
# Constructor : init container node and current slide number
class Slider
constructor: (container) -> @container = $(container)
current: 0
lastHumanNav: 0
duration: 4000
w: '640px'
h: '430px'
theme: 'theme-dark'
tmpl: tmplSlider
# Util function : return the circular value of num
circular: (num) -> mod num, @slides.size()
# Go to slide number `num` : update both DOM and this.current
slide: (num) ->
# num must be between 0 and nbslides-1
if @slides && @pages
num = Math.max(0, Math.min(num, @slides.size()-1))
# Move current class in **slides**
@slides.eq(@current).removeClass "current"
@slides.eq(num).addClass "current"
# Move current class in **pages**
@pages.eq(@current).removeClass "current"
@pages.eq(num).addClass "current"
@current = num
this
# Go to circular next slide (will call `slide`)
next: -> @slide @circular(@current+1)
# Go to circular previous slide (will call `slide`)
prev: -> @slide @circular(@current-1)
# Change the duration between each slide
setDuration: (@duration) ->
this
# Change the slider transition CSS class
setTransition: (transition) ->
if @node
@node.removeClass(@transition) if @transition
@node.addClass(transition) if transition
@transition = transition
this
# Change the slider theme CSS class
setTheme: (theme = "theme-dark") ->
if @node
@node.removeClass(@theme) if @theme
@node.addClass(theme) if theme
@theme = theme
this
# set slider size
setSize: (@w, @h) ->
if @node
@node.width w
@node.find(".slide-image").width w
@node.find(".slide-images").height h
this
# Fetch photos with a JSON providing its `url`.
# If `options` is defined, passing it in request params.
# If `transformer` is defined, using it to transform the json
# to a compatible json to pass to `Slider.setPhotos`.
fetchJson: (url, options, transformer) ->
params = $.extend({}, options)
transformer ?= (json) -> json
$.getJSON url, params, (json) => @setPhotos transformer(json)
this
# Sync slider data to DOM
_sync: ->
@setTransition @transition
@setTheme @theme
@setSize @w, @h
@slide @current
# `slides` : format: array of { src, name, link (optional) }
setPhotos: (@photos) ->
# Templating and appending to DOM
@node = @tmpl(slides: photos).addClass("loading")
@container.empty().append @node
@_sync()
# Loading all images before showing the slider
nbLoad = 0
imgs = @node.find(".slide-image img").bind("load", =>
total = imgs.size()
if ++nbLoad == total
@node.removeClass "loading"
@start()
# Update loader progression (in percent)
@node.find(".loader .percent").text Math.floor(100 * nbLoad / total)
)
@node.find(".loader").text "No photo" if imgs.size() == 0
this
# Start the slider
start: ->
@slides = @node.find(".slide-image")
@pages = @node.find(".slide-pager a")
@_sync()
@_bind()
this
stop: ->
@_unbind()
this
# Bind slider DOM events for navigation
_bind: ->
@_unbind()
@node.find(".prevSlide").click => @prev()
@node.find(".nextSlide").click => @next()
self = this
if @node
@node.find(".slide-pager a").each (i) ->
$(this).click -> self.slide i
now = -> currentTime()
@node.find(".options a").click => @lastHumanNav = now()
if not @timeout
loop_ = =>
@next() if now() - @lastHumanNav > 2000
@timeout = setTimeout(loop_, @duration)
@timeout = setTimeout(loop_, @duration)
this
_unbind: ->
if @node
@node.find(".prevSlide, .nextSlide, .slide-pager a, .options a").unbind 'click'
if @timeout
clearTimeout @timeout
@timeout = null
# SliderWithCanvas
# ---------------
# Let's support canvas transitions
class SliderWithCanvas extends Slider
transitionFunction: SliderTransitionFunctions.clock
transitionDuration: 1500
tmpl: tmplSliderWithCanvas
# also synchronize the renderMode
_sync: () ->
renderMode = @renderMode
super
@setRenderMode(renderMode)
# Init some variables related to canvas
start: () ->
@notCanvas = @node.find '.slide-images:not(canvas) img'
@canvas = @node.find 'canvas.slide-images'
@ctx = @canvas[0].getContext '2d' if @canvas[0] and @canvas[0].getContext
@images = $.map(@photos, ((photo) =>
img = new Image()
img.src = photo.src
img
)) if @photos
super
# The `setSize` method should update the canvas size
setSize: (w, h) ->
super w, h
@canvas.attr("height", h).attr("width", w) if @canvas
this
# set the render mode of the slider ( canvas | css )
setRenderMode: (@renderMode) ->
if @ctx
if @renderMode is 'canvas'
@drawImage @images[@current]
@notCanvas.hide()
@canvas.show()
else
@canvas.hide()
@notCanvas.show()
this
setTransition: (transition) ->
@setRenderMode 'css'
super transition
this
# Change the slider transition function (for the canvas animation)
setTransitionFunction: (@transitionFunction) ->
@setRenderMode 'canvas'
this
# Change the slider transition duration (means the time of the transition)
setTransitionDuration: (@transitionDuration) ->
@setRenderMode 'canvas'
this
# Overriding `slide` to support the canvas rendering
slide: (num) ->
@fromSlide = @current
@toSlide = num
@transitionStart = currentTime()
if @ctx and @renderMode is 'canvas'
@startRender()
super num
# clean the canvas
clean: -> @ctx.clearRect 0, 0, @canvas[0].width, @canvas[0].height
# draw an image on the all canvas with the correct ratio
drawImage: (img) ->
{width, height} = @canvas[0]
@ctx.drawImage img, 0, 0, width, width*img.height/img.width
# `_renderId` help to make sure once transition is running
_renderId: 0
# init render loop
startRender: ->
if @transitionFunction.init
@tfdata = @transitionFunction.init this, @fromSlide, @toSlide
@render(++@_renderId, @transitionFunction)
# render loop
render: (id, transitionFunction) ->
now = currentTime()
if id==@_renderId and now >= @transitionStart
progress = Math.min(1, (now - @transitionStart) / @transitionDuration)
if progress == 1
@clean()
@drawImage @images[@toSlide]
else
transitionFunction.render this, @fromSlide, @toSlide, progress, @tfdata
requestAnimationFrame (=>@render(id, transitionFunction)), @canvas[0]
# Exporting global variables
# --------------------------
window.Slider = SliderWithCanvas
window.SliderTransitionFunctions = SliderTransitionFunctions
window.SliderUtils = SliderUtils
| true | # [Slider.js](http://demo.greweb.fr/slider) by @greweb
###!
Copyright 2011 PI:NAME:<NAME>END_PI
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
# Util function : modulo for negative values
mod = (X,Y) -> X-Y*Math.floor(X/Y)
# RequestAnimationFrame polyfill : https://gist.github.com/997619
requestAnimationFrame = `function(a,b){while(a--&&!(b=window["oR0msR0mozR0webkitR0r".split(0)[a]+"equestAnimationFrame"]));return b||function(a){setTimeout(a,15)}}(5)`
# return the current millisecond timestamp
currentTime = `function(){return new Date().getTime()}`
# Slider template
# ---------------
tmplSlider = (o) ->
slider = $("""
<div class="slider">
<div class="loader"><span class="spinner"></span> <span class="percent">0</span>%</div>
<div class="slide-images"></div>
<div class="options">
<a class="prevSlide" href="javascript:;">prev</a>
<span class="slide-pager"></span>
<a class="nextSlide" href="javascript:;">next</a>
</div>
</div>
""")
slider.find('.slide-images').append(
$.map(o.slides, (slide) -> $('<div class="slide-image">'+
(if slide.link then '<a href="'+slide.link+'" target="_blank">' else '')+
'<img src="'+slide.src+'">'+
(if slide.name then '<span class="caption">'+slide.name+'</span>' else '')+
(if slide.link then '</a>' else '')+
'</div>')[0]
)
)
slider.find('.slide-pager').append $.map(o.slides, (slide, i) ->
$('<a href="javascript:;">' + (i + 1) + '</a>')[0]
)
slider
tmplSliderWithCanvas = (o) ->
node = tmplSlider o
node.find('div.slide-images').append('<canvas class="slide-images" />')
node
# SliderUtils
# -----------
SliderUtils =
extractImageData: (self, from, to) ->
{width, height} = self.canvas[0]
self.clean()
self.drawImage self.images[from]
fromData = self.ctx.getImageData 0, 0, width, height
self.clean()
self.drawImage self.images[to]
toData = self.ctx.getImageData 0, 0, width, height
output = self.ctx.createImageData width, height
return fromData: fromData, toData: toData, output: output
clippedTransition: ( clipFunction ) ->
(self, from, to, progress) ->
{width, height} = self.canvas[0]
ctx = self.ctx
self.drawImage self.images[from]
ctx.save()
ctx.beginPath()
clipFunction ctx, width, height, progress
ctx.clip()
self.drawImage self.images[to]
ctx.restore()
# SliderTransitionFunctions
# ------------------------
SliderTransitionFunctions =
# A clock load effect
clock:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.moveTo w/2, h/2
ctx.arc w/2, h/2, Math.max(w, h), 0, Math.PI*2*p, false
# A circle open effect
circle:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.arc w/2, h/2, 0.6*p*Math.max(w, h), 0, Math.PI*2, false
# A horizontal open effect
diamond:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
w2=w/2
h2=h/2
dh=p*h
dw=p*w
ctx.moveTo w2, h2-dh
ctx.lineTo w2+dw, h2
ctx.lineTo w2, h2+dh
ctx.lineTo w2-dw, h2
# A vertical open effect
verticalOpen:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
nbSpike=8
spikeh=h/(2*nbSpike) # the height of a demi-spike (triangle)
spikew=spikeh
pw=p*w/2
xl=w/2-pw
xr=w/2+pw
spikel=xl-spikew
spiker=xr+spikew
ctx.moveTo xl, 0
for hi in [0..nbSpike]
h1=(2*hi)*spikeh
h2=h1+spikeh
ctx.lineTo spikel, h1
ctx.lineTo xl, h2
ctx.lineTo spiker, h
for hi in [nbSpike..0]
h1=(2*hi)*spikeh
h2=h1-spikeh
ctx.lineTo xr, h1
ctx.lineTo spiker, h2
# A horizontal open effect
horizontalOpen:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
ctx.rect 0, (1-p)*h/2, w, h*p
# A sundblind open effect
horizontalSunblind:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p) #non linear progress
blinds = 6
blindHeight = h/blinds
for blind in [0..blinds]
ctx.rect 0, blindHeight*blind, w, blindHeight*p
# A vertical sundblind open effect
verticalSunblind:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p)
blinds = 10
blindWidth = w/blinds
for blind in [0..blinds]
prog = Math.max(0, Math.min( 2*p-(blind+1)/blinds, 1))
ctx.rect blindWidth*blind, 0, blindWidth*prog, h
# circles open effect
circles:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
circlesY = 6
circlesX = Math.floor circlesY*w/h
circleW = w/circlesX
circleH = h/circlesY
maxWH = Math.max(w, h)
maxRad = 0.7*Math.max(circleW, circleH)
for x in [0..circlesX]
for y in [0..circlesY]
cx = (x+0.5)*circleW
cy = (y+0.5)*circleH
r = Math.max(0, Math.min(2*p-cx/w, 1)) * maxRad
ctx.moveTo cx, cy
ctx.arc cx, cy, r, 0, Math.PI*2, false
# A square sundblind open effect
squares:
render: SliderUtils.clippedTransition (ctx, w, h, p) ->
p = 1-(1-p)*(1-p) #non linear progress
blindsY = 5
blindsX = Math.floor blindsY*w/h
blindWidth = w/blindsX
blindHeight = h/blindsY
for x in [0..blindsX]
for y in [0..blindsY]
sx = blindWidth*x
sy = blindHeight*y
prog = Math.max(0, Math.min(3*p-sx/w-sy/h, 1))
rw = blindWidth*prog
rh = blindHeight*prog
ctx.rect sx-rw/2, sy-rh/2, rw, rh
# A blured fade left effect
fadeLeft:
init: (self, from, to) ->
data = SliderUtils.extractImageData(self, from, to)
data.randomTrait = []
h = self.canvas[0].height
for y in [0..h]
data.randomTrait[y] = Math.random()
data
render: (self, from, to, progress, data) ->
blur = 150
{width, height} = self.canvas[0]
ctx = self.ctx
fd = data.fromData.data
td = data.toData.data
out = data.output.data
randomTrait = data.randomTrait
`(function(){
var wpdb = width*progress/blur;
for (var x = 0; x < width; ++x) {
var xdb = x/blur;
for (var y = 0; y < height; ++y) {
var b = (y*width + x)*4
var p1 = Math.min(Math.max((xdb-wpdb*(1+randomTrait[y]/10)), 0), 1)
var p2 = 1-p1
for (var c = 0; c < 3; ++c) {
var i = b + c;
out[i] = p1 * (fd[i] ) + p2 * (td[i] )
}
out[b + 3] = 255;
}
}
}())`
self.ctx.putImageData data.output, 0, 0
# Slider - a lightweight slider
# -----------------------------
# Constructor : init container node and current slide number
class Slider
constructor: (container) -> @container = $(container)
current: 0
lastHumanNav: 0
duration: 4000
w: '640px'
h: '430px'
theme: 'theme-dark'
tmpl: tmplSlider
# Util function : return the circular value of num
circular: (num) -> mod num, @slides.size()
# Go to slide number `num` : update both DOM and this.current
slide: (num) ->
# num must be between 0 and nbslides-1
if @slides && @pages
num = Math.max(0, Math.min(num, @slides.size()-1))
# Move current class in **slides**
@slides.eq(@current).removeClass "current"
@slides.eq(num).addClass "current"
# Move current class in **pages**
@pages.eq(@current).removeClass "current"
@pages.eq(num).addClass "current"
@current = num
this
# Go to circular next slide (will call `slide`)
next: -> @slide @circular(@current+1)
# Go to circular previous slide (will call `slide`)
prev: -> @slide @circular(@current-1)
# Change the duration between each slide
setDuration: (@duration) ->
this
# Change the slider transition CSS class
setTransition: (transition) ->
if @node
@node.removeClass(@transition) if @transition
@node.addClass(transition) if transition
@transition = transition
this
# Change the slider theme CSS class
setTheme: (theme = "theme-dark") ->
if @node
@node.removeClass(@theme) if @theme
@node.addClass(theme) if theme
@theme = theme
this
# set slider size
setSize: (@w, @h) ->
if @node
@node.width w
@node.find(".slide-image").width w
@node.find(".slide-images").height h
this
# Fetch photos with a JSON providing its `url`.
# If `options` is defined, passing it in request params.
# If `transformer` is defined, using it to transform the json
# to a compatible json to pass to `Slider.setPhotos`.
fetchJson: (url, options, transformer) ->
params = $.extend({}, options)
transformer ?= (json) -> json
$.getJSON url, params, (json) => @setPhotos transformer(json)
this
# Sync slider data to DOM
_sync: ->
@setTransition @transition
@setTheme @theme
@setSize @w, @h
@slide @current
# `slides` : format: array of { src, name, link (optional) }
setPhotos: (@photos) ->
# Templating and appending to DOM
@node = @tmpl(slides: photos).addClass("loading")
@container.empty().append @node
@_sync()
# Loading all images before showing the slider
nbLoad = 0
imgs = @node.find(".slide-image img").bind("load", =>
total = imgs.size()
if ++nbLoad == total
@node.removeClass "loading"
@start()
# Update loader progression (in percent)
@node.find(".loader .percent").text Math.floor(100 * nbLoad / total)
)
@node.find(".loader").text "No photo" if imgs.size() == 0
this
# Start the slider
start: ->
@slides = @node.find(".slide-image")
@pages = @node.find(".slide-pager a")
@_sync()
@_bind()
this
stop: ->
@_unbind()
this
# Bind slider DOM events for navigation
_bind: ->
@_unbind()
@node.find(".prevSlide").click => @prev()
@node.find(".nextSlide").click => @next()
self = this
if @node
@node.find(".slide-pager a").each (i) ->
$(this).click -> self.slide i
now = -> currentTime()
@node.find(".options a").click => @lastHumanNav = now()
if not @timeout
loop_ = =>
@next() if now() - @lastHumanNav > 2000
@timeout = setTimeout(loop_, @duration)
@timeout = setTimeout(loop_, @duration)
this
_unbind: ->
if @node
@node.find(".prevSlide, .nextSlide, .slide-pager a, .options a").unbind 'click'
if @timeout
clearTimeout @timeout
@timeout = null
# SliderWithCanvas
# ---------------
# Let's support canvas transitions
class SliderWithCanvas extends Slider
transitionFunction: SliderTransitionFunctions.clock
transitionDuration: 1500
tmpl: tmplSliderWithCanvas
# also synchronize the renderMode
_sync: () ->
renderMode = @renderMode
super
@setRenderMode(renderMode)
# Init some variables related to canvas
start: () ->
@notCanvas = @node.find '.slide-images:not(canvas) img'
@canvas = @node.find 'canvas.slide-images'
@ctx = @canvas[0].getContext '2d' if @canvas[0] and @canvas[0].getContext
@images = $.map(@photos, ((photo) =>
img = new Image()
img.src = photo.src
img
)) if @photos
super
# The `setSize` method should update the canvas size
setSize: (w, h) ->
super w, h
@canvas.attr("height", h).attr("width", w) if @canvas
this
# set the render mode of the slider ( canvas | css )
setRenderMode: (@renderMode) ->
if @ctx
if @renderMode is 'canvas'
@drawImage @images[@current]
@notCanvas.hide()
@canvas.show()
else
@canvas.hide()
@notCanvas.show()
this
setTransition: (transition) ->
@setRenderMode 'css'
super transition
this
# Change the slider transition function (for the canvas animation)
setTransitionFunction: (@transitionFunction) ->
@setRenderMode 'canvas'
this
# Change the slider transition duration (means the time of the transition)
setTransitionDuration: (@transitionDuration) ->
@setRenderMode 'canvas'
this
# Overriding `slide` to support the canvas rendering
slide: (num) ->
@fromSlide = @current
@toSlide = num
@transitionStart = currentTime()
if @ctx and @renderMode is 'canvas'
@startRender()
super num
# clean the canvas
clean: -> @ctx.clearRect 0, 0, @canvas[0].width, @canvas[0].height
# draw an image on the all canvas with the correct ratio
drawImage: (img) ->
{width, height} = @canvas[0]
@ctx.drawImage img, 0, 0, width, width*img.height/img.width
# `_renderId` help to make sure once transition is running
_renderId: 0
# init render loop
startRender: ->
if @transitionFunction.init
@tfdata = @transitionFunction.init this, @fromSlide, @toSlide
@render(++@_renderId, @transitionFunction)
# render loop
render: (id, transitionFunction) ->
now = currentTime()
if id==@_renderId and now >= @transitionStart
progress = Math.min(1, (now - @transitionStart) / @transitionDuration)
if progress == 1
@clean()
@drawImage @images[@toSlide]
else
transitionFunction.render this, @fromSlide, @toSlide, progress, @tfdata
requestAnimationFrame (=>@render(id, transitionFunction)), @canvas[0]
# Exporting global variables
# --------------------------
window.Slider = SliderWithCanvas
window.SliderTransitionFunctions = SliderTransitionFunctions
window.SliderUtils = SliderUtils
|
[
{
"context": "\"local_couchdb_admin_username\"\n password: Coconut.config.get \"local_couchdb_admin_password\"\n compl",
"end": 2726,
"score": 0.7975894808769226,
"start": 2712,
"tag": "PASSWORD",
"value": "Coconut.config"
},
{
"context": "get \"local_couchdb_admin_... | app/_attachments/models/Sync.coffee | ICTatRTI/coconut | 1 | class Sync extends Backbone.Model
initialize: ->
@set
_id: "SyncLog"
url: "/sync"
last_send: =>
return @get("last_send_result")?.history[0]
last_send_time: =>
result = @get("last_send_time") || @last_send?.start_time
if result
return moment(result).fromNow()
else
return "never"
last_get: =>
return @get("last_get_log")
last_get_time: =>
result = @get("last_get_time")
if result
return moment(@get("last_get_time")).fromNow()
else
return "never"
sendToCloud: (options) ->
@fetch
success: =>
@log "Sending data to #{Coconut.config.database_name()}"
switch Coconut.config.get "sync_mode"
when "couchdb-sync"
$.couch.replicate(
Coconut.config.database_name(),
Coconut.config.cloud_url_with_credentials(),
success: (response) =>
@save
last_send_result: response
options.success(response)
error: ->
options.error()
)
when "http-post"
resultCollection = new ResultCollection()
resultCollection.fetch
success: =>
notSentResults = resultCollection.notSent()
saveSyncLog = _.after notSentResults.length, =>
@save
last_send_time: new Date()
Coconut.menuView.update()
$(".sync-sent-status").html "a few seconds ago"
httpPostTarget = Coconut.config.local.httpPostTarget()
_.each resultCollection.notSent(), (result) =>
$.ajax
type: "POST"
#contentType: "application/json"
url: httpPostTarget
#data: JSON.stringify(result.toJSON())
data: result.toJSON()
success: =>
result.set "sentTo", httpPostTarget
result.set("complete", "true") if Coconut.config.get("completion_mode") is "on-send"
result.save()
saveSyncLog()
error: (error) =>
$(".sync-sent-status").html "Error saving to #{httpPostTarget}: #{JSON.stringify(error)}"
log: (message) =>
Coconut.debug message
$(".sync-get-status").html message
$("#message").append message + "<br/>"
# @save
# last_get_log: @get("last_get_log") + message
getFromCloud: (options) =>
@fetch
success: =>
$.couch.login
name: Coconut.config.get "local_couchdb_admin_username"
password: Coconut.config.get "local_couchdb_admin_password"
complete: =>
@log "Updating application design document..."
@replicateDesignDoc
success: =>
@log "Updating user accounts and question sets..."
@replicateApplicationDocs
success: =>
#$.couch.logout()
@log "Finished"
@save
last_get_time: new Date().getTime()
options?.success?()
reload_delay_seconds = 2
@log("Reloading application in #{reload_delay_seconds} seconds")
_.delay document.location.reload, reload_delay_seconds*1000
error: (error) =>
$.couch.logout()
@log "Error updating application: #{error}"
error: (error) =>
$.couch.logout()
@log "Error updating design document"
error: (error) =>
@log "Error logging in as local admin: #{error}, trying to proceed anyway in case we are in admin party"
sendAndGetFromCloud: (options) =>
return
@log "Checking for internet. (Is #{Coconut.config.cloud_url()} is reachable?) Please wait."
$.ajax
# This requires a CORS enabled server to work
url: Coconut.config.cloud_url()
error: (error) =>
@log "ERROR! #{Coconut.config.cloud_url()} is not reachable. Either the internet is not working or the site is down: #{error}"
options?.error()
@save
last_send_error: true
success: =>
@log "#{Coconut.config.cloud_url()} is reachable, so internet is available."
statusChecker = setInterval(@checkStatus(),5000)
@sendToCloud
success: (result) =>
@log "Data sent: #{JSON.stringify result,undefined,2}"
@replicate
success: (result) =>
@log "Data received: #{JSON.stringify result,undefined,2}"
@log "Sync Complete"
@save
last_get_time: new Date().getTime()
options?.success?()
error: =>
@log "Sync fail during get"
options?.error?()
error: (error) =>
@log "Synchronization fail during send: #{error}"
checkStatus: ->
$.ajax
url: "#{Coconut.config.cloud_url()}/_active_tasks"
success: (result) =>
@log result
getNewNotifications: (options) ->
$.couch.db(Coconut.config.database_name()).view "#{Coconut.config.design_doc_name()}/rawNotificationsConvertedToCaseNotifications"
descending: true
include_docs: true
limit: 1
success: (result) ->
mostRecentNotification = result.rows?[0]?.doc.date
url = "#{Coconut.config.cloud_url_with_credentials()}/_design/#{Coconut.config.database_name()}/_view/notifications?&ascending=true&include_docs=true&skip=1"
url += "&startkey=\"#{mostRecentNotification}\"" if mostRecentNotification
healthFacilities = WardHierarchy.allWards district: User.currentUser.get("district")
healthFacilities = [] unless User.currentUser.get("district")?
$.ajax
url: url
dataType: "jsonp"
success: (result) ->
_.each result.rows, (row) ->
notification = row.doc
if _.include(healthFacilities, notification.hf)
result = new Result
question: "Case Notification"
MalariaCaseID: notification.caseid
FacilityName: notification.hf
Shehia: notification.shehia
Name: notification.name
result.save()
notification.hasCaseNotification = true
$.couch.db(Coconut.config.database_name()).saveDoc notification
options.success?()
replicate: (options) ->
@log "Preparing to receive data"
$.couch.login
name: Coconut.config.get "local_couchdb_admin_username"
password: Coconut.config.get "local_couchdb_admin_password"
complete: =>
@log "Receiving data from #{Coconut.config.database_name()}"
$.couch.replicate(
Coconut.config.cloud_url_with_credentials(),
Coconut.config.database_name(),
success: (result) =>
@save
last_get_time: new Date().getTime()
@log "Data received: #{JSON.stringify result,undefined,2}"
options.success()
error: (error) =>
@log "Error receiving data from #{Coconut.config.database_name()}: #{JSON.stringify error}"
options.error()
,
options.replicationArguments
)
error: ->
@log "Unable to login as local admin for replicating the design document (main application), trying to proceed anyway in case we are in admin party."
replicateDesignDoc: (options) =>
@replicate _.extend options,
replicationArguments:
doc_ids: ["_design/#{Backbone.couch_connector.config.ddoc_name}"]
replicateApplicationDocs: (options) =>
# Updating design_doc, users & forms
$.couch.db(Coconut.config.database_name()).view "#{Coconut.config.design_doc_name()}/docIDsForUpdating",
include_docs: false
success: (result) =>
doc_ids = _.pluck result.rows, "id"
doc_ids.push "_design/#{Coconut.config.design_doc_name()}"
doc_ids.push "coconut.config"
@log "Updating #{doc_ids.length} docs (users, forms, configuration and the design document). Please wait."
@replicate _.extend options,
replicationArguments:
doc_ids: doc_ids
| 209122 | class Sync extends Backbone.Model
initialize: ->
@set
_id: "SyncLog"
url: "/sync"
last_send: =>
return @get("last_send_result")?.history[0]
last_send_time: =>
result = @get("last_send_time") || @last_send?.start_time
if result
return moment(result).fromNow()
else
return "never"
last_get: =>
return @get("last_get_log")
last_get_time: =>
result = @get("last_get_time")
if result
return moment(@get("last_get_time")).fromNow()
else
return "never"
sendToCloud: (options) ->
@fetch
success: =>
@log "Sending data to #{Coconut.config.database_name()}"
switch Coconut.config.get "sync_mode"
when "couchdb-sync"
$.couch.replicate(
Coconut.config.database_name(),
Coconut.config.cloud_url_with_credentials(),
success: (response) =>
@save
last_send_result: response
options.success(response)
error: ->
options.error()
)
when "http-post"
resultCollection = new ResultCollection()
resultCollection.fetch
success: =>
notSentResults = resultCollection.notSent()
saveSyncLog = _.after notSentResults.length, =>
@save
last_send_time: new Date()
Coconut.menuView.update()
$(".sync-sent-status").html "a few seconds ago"
httpPostTarget = Coconut.config.local.httpPostTarget()
_.each resultCollection.notSent(), (result) =>
$.ajax
type: "POST"
#contentType: "application/json"
url: httpPostTarget
#data: JSON.stringify(result.toJSON())
data: result.toJSON()
success: =>
result.set "sentTo", httpPostTarget
result.set("complete", "true") if Coconut.config.get("completion_mode") is "on-send"
result.save()
saveSyncLog()
error: (error) =>
$(".sync-sent-status").html "Error saving to #{httpPostTarget}: #{JSON.stringify(error)}"
log: (message) =>
Coconut.debug message
$(".sync-get-status").html message
$("#message").append message + "<br/>"
# @save
# last_get_log: @get("last_get_log") + message
getFromCloud: (options) =>
@fetch
success: =>
$.couch.login
name: Coconut.config.get "local_couchdb_admin_username"
password: <PASSWORD>.get "local_couchdb_admin_password"
complete: =>
@log "Updating application design document..."
@replicateDesignDoc
success: =>
@log "Updating user accounts and question sets..."
@replicateApplicationDocs
success: =>
#$.couch.logout()
@log "Finished"
@save
last_get_time: new Date().getTime()
options?.success?()
reload_delay_seconds = 2
@log("Reloading application in #{reload_delay_seconds} seconds")
_.delay document.location.reload, reload_delay_seconds*1000
error: (error) =>
$.couch.logout()
@log "Error updating application: #{error}"
error: (error) =>
$.couch.logout()
@log "Error updating design document"
error: (error) =>
@log "Error logging in as local admin: #{error}, trying to proceed anyway in case we are in admin party"
sendAndGetFromCloud: (options) =>
return
@log "Checking for internet. (Is #{Coconut.config.cloud_url()} is reachable?) Please wait."
$.ajax
# This requires a CORS enabled server to work
url: Coconut.config.cloud_url()
error: (error) =>
@log "ERROR! #{Coconut.config.cloud_url()} is not reachable. Either the internet is not working or the site is down: #{error}"
options?.error()
@save
last_send_error: true
success: =>
@log "#{Coconut.config.cloud_url()} is reachable, so internet is available."
statusChecker = setInterval(@checkStatus(),5000)
@sendToCloud
success: (result) =>
@log "Data sent: #{JSON.stringify result,undefined,2}"
@replicate
success: (result) =>
@log "Data received: #{JSON.stringify result,undefined,2}"
@log "Sync Complete"
@save
last_get_time: new Date().getTime()
options?.success?()
error: =>
@log "Sync fail during get"
options?.error?()
error: (error) =>
@log "Synchronization fail during send: #{error}"
checkStatus: ->
$.ajax
url: "#{Coconut.config.cloud_url()}/_active_tasks"
success: (result) =>
@log result
getNewNotifications: (options) ->
$.couch.db(Coconut.config.database_name()).view "#{Coconut.config.design_doc_name()}/rawNotificationsConvertedToCaseNotifications"
descending: true
include_docs: true
limit: 1
success: (result) ->
mostRecentNotification = result.rows?[0]?.doc.date
url = "#{Coconut.config.cloud_url_with_credentials()}/_design/#{Coconut.config.database_name()}/_view/notifications?&ascending=true&include_docs=true&skip=1"
url += "&startkey=\"#{mostRecentNotification}\"" if mostRecentNotification
healthFacilities = WardHierarchy.allWards district: User.currentUser.get("district")
healthFacilities = [] unless User.currentUser.get("district")?
$.ajax
url: url
dataType: "jsonp"
success: (result) ->
_.each result.rows, (row) ->
notification = row.doc
if _.include(healthFacilities, notification.hf)
result = new Result
question: "Case Notification"
MalariaCaseID: notification.caseid
FacilityName: notification.hf
Shehia: notification.shehia
Name: notification.name
result.save()
notification.hasCaseNotification = true
$.couch.db(Coconut.config.database_name()).saveDoc notification
options.success?()
replicate: (options) ->
@log "Preparing to receive data"
$.couch.login
name: Coconut.config.get "local_couchdb_admin_username"
password: <PASSWORD> "local_couchdb_admin_password"
complete: =>
@log "Receiving data from #{Coconut.config.database_name()}"
$.couch.replicate(
Coconut.config.cloud_url_with_credentials(),
Coconut.config.database_name(),
success: (result) =>
@save
last_get_time: new Date().getTime()
@log "Data received: #{JSON.stringify result,undefined,2}"
options.success()
error: (error) =>
@log "Error receiving data from #{Coconut.config.database_name()}: #{JSON.stringify error}"
options.error()
,
options.replicationArguments
)
error: ->
@log "Unable to login as local admin for replicating the design document (main application), trying to proceed anyway in case we are in admin party."
replicateDesignDoc: (options) =>
@replicate _.extend options,
replicationArguments:
doc_ids: ["_design/#{Backbone.couch_connector.config.ddoc_name}"]
replicateApplicationDocs: (options) =>
# Updating design_doc, users & forms
$.couch.db(Coconut.config.database_name()).view "#{Coconut.config.design_doc_name()}/docIDsForUpdating",
include_docs: false
success: (result) =>
doc_ids = _.pluck result.rows, "id"
doc_ids.push "_design/#{Coconut.config.design_doc_name()}"
doc_ids.push "coconut.config"
@log "Updating #{doc_ids.length} docs (users, forms, configuration and the design document). Please wait."
@replicate _.extend options,
replicationArguments:
doc_ids: doc_ids
| true | class Sync extends Backbone.Model
initialize: ->
@set
_id: "SyncLog"
url: "/sync"
last_send: =>
return @get("last_send_result")?.history[0]
last_send_time: =>
result = @get("last_send_time") || @last_send?.start_time
if result
return moment(result).fromNow()
else
return "never"
last_get: =>
return @get("last_get_log")
last_get_time: =>
result = @get("last_get_time")
if result
return moment(@get("last_get_time")).fromNow()
else
return "never"
sendToCloud: (options) ->
@fetch
success: =>
@log "Sending data to #{Coconut.config.database_name()}"
switch Coconut.config.get "sync_mode"
when "couchdb-sync"
$.couch.replicate(
Coconut.config.database_name(),
Coconut.config.cloud_url_with_credentials(),
success: (response) =>
@save
last_send_result: response
options.success(response)
error: ->
options.error()
)
when "http-post"
resultCollection = new ResultCollection()
resultCollection.fetch
success: =>
notSentResults = resultCollection.notSent()
saveSyncLog = _.after notSentResults.length, =>
@save
last_send_time: new Date()
Coconut.menuView.update()
$(".sync-sent-status").html "a few seconds ago"
httpPostTarget = Coconut.config.local.httpPostTarget()
_.each resultCollection.notSent(), (result) =>
$.ajax
type: "POST"
#contentType: "application/json"
url: httpPostTarget
#data: JSON.stringify(result.toJSON())
data: result.toJSON()
success: =>
result.set "sentTo", httpPostTarget
result.set("complete", "true") if Coconut.config.get("completion_mode") is "on-send"
result.save()
saveSyncLog()
error: (error) =>
$(".sync-sent-status").html "Error saving to #{httpPostTarget}: #{JSON.stringify(error)}"
log: (message) =>
Coconut.debug message
$(".sync-get-status").html message
$("#message").append message + "<br/>"
# @save
# last_get_log: @get("last_get_log") + message
getFromCloud: (options) =>
@fetch
success: =>
$.couch.login
name: Coconut.config.get "local_couchdb_admin_username"
password: PI:PASSWORD:<PASSWORD>END_PI.get "local_couchdb_admin_password"
complete: =>
@log "Updating application design document..."
@replicateDesignDoc
success: =>
@log "Updating user accounts and question sets..."
@replicateApplicationDocs
success: =>
#$.couch.logout()
@log "Finished"
@save
last_get_time: new Date().getTime()
options?.success?()
reload_delay_seconds = 2
@log("Reloading application in #{reload_delay_seconds} seconds")
_.delay document.location.reload, reload_delay_seconds*1000
error: (error) =>
$.couch.logout()
@log "Error updating application: #{error}"
error: (error) =>
$.couch.logout()
@log "Error updating design document"
error: (error) =>
@log "Error logging in as local admin: #{error}, trying to proceed anyway in case we are in admin party"
sendAndGetFromCloud: (options) =>
return
@log "Checking for internet. (Is #{Coconut.config.cloud_url()} is reachable?) Please wait."
$.ajax
# This requires a CORS enabled server to work
url: Coconut.config.cloud_url()
error: (error) =>
@log "ERROR! #{Coconut.config.cloud_url()} is not reachable. Either the internet is not working or the site is down: #{error}"
options?.error()
@save
last_send_error: true
success: =>
@log "#{Coconut.config.cloud_url()} is reachable, so internet is available."
statusChecker = setInterval(@checkStatus(),5000)
@sendToCloud
success: (result) =>
@log "Data sent: #{JSON.stringify result,undefined,2}"
@replicate
success: (result) =>
@log "Data received: #{JSON.stringify result,undefined,2}"
@log "Sync Complete"
@save
last_get_time: new Date().getTime()
options?.success?()
error: =>
@log "Sync fail during get"
options?.error?()
error: (error) =>
@log "Synchronization fail during send: #{error}"
checkStatus: ->
$.ajax
url: "#{Coconut.config.cloud_url()}/_active_tasks"
success: (result) =>
@log result
getNewNotifications: (options) ->
$.couch.db(Coconut.config.database_name()).view "#{Coconut.config.design_doc_name()}/rawNotificationsConvertedToCaseNotifications"
descending: true
include_docs: true
limit: 1
success: (result) ->
mostRecentNotification = result.rows?[0]?.doc.date
url = "#{Coconut.config.cloud_url_with_credentials()}/_design/#{Coconut.config.database_name()}/_view/notifications?&ascending=true&include_docs=true&skip=1"
url += "&startkey=\"#{mostRecentNotification}\"" if mostRecentNotification
healthFacilities = WardHierarchy.allWards district: User.currentUser.get("district")
healthFacilities = [] unless User.currentUser.get("district")?
$.ajax
url: url
dataType: "jsonp"
success: (result) ->
_.each result.rows, (row) ->
notification = row.doc
if _.include(healthFacilities, notification.hf)
result = new Result
question: "Case Notification"
MalariaCaseID: notification.caseid
FacilityName: notification.hf
Shehia: notification.shehia
Name: notification.name
result.save()
notification.hasCaseNotification = true
$.couch.db(Coconut.config.database_name()).saveDoc notification
options.success?()
replicate: (options) ->
@log "Preparing to receive data"
$.couch.login
name: Coconut.config.get "local_couchdb_admin_username"
password: PI:PASSWORD:<PASSWORD>END_PI "local_couchdb_admin_password"
complete: =>
@log "Receiving data from #{Coconut.config.database_name()}"
$.couch.replicate(
Coconut.config.cloud_url_with_credentials(),
Coconut.config.database_name(),
success: (result) =>
@save
last_get_time: new Date().getTime()
@log "Data received: #{JSON.stringify result,undefined,2}"
options.success()
error: (error) =>
@log "Error receiving data from #{Coconut.config.database_name()}: #{JSON.stringify error}"
options.error()
,
options.replicationArguments
)
error: ->
@log "Unable to login as local admin for replicating the design document (main application), trying to proceed anyway in case we are in admin party."
replicateDesignDoc: (options) =>
@replicate _.extend options,
replicationArguments:
doc_ids: ["_design/#{Backbone.couch_connector.config.ddoc_name}"]
replicateApplicationDocs: (options) =>
# Updating design_doc, users & forms
$.couch.db(Coconut.config.database_name()).view "#{Coconut.config.design_doc_name()}/docIDsForUpdating",
include_docs: false
success: (result) =>
doc_ids = _.pluck result.rows, "id"
doc_ids.push "_design/#{Coconut.config.design_doc_name()}"
doc_ids.push "coconut.config"
@log "Updating #{doc_ids.length} docs (users, forms, configuration and the design document). Please wait."
@replicate _.extend options,
replicationArguments:
doc_ids: doc_ids
|
[
{
"context": ":\n userId: 0\n level: 1\n name: null\n email: null\n isLogin: false\n ",
"end": 165,
"score": 0.5867400169372559,
"start": 161,
"tag": "NAME",
"value": "null"
}
] | application/static/coffeescript/victory.coffee | kelp404/Victory | 15 |
window.victory =
userLevel:
root: 0
normal: 1
loginUrl: ''
logoutUrl: ''
user:
userId: 0
level: 1
name: null
email: null
isLogin: false
isRoot: ->
victory.user.level == victory.userLevel.root
| 90008 |
window.victory =
userLevel:
root: 0
normal: 1
loginUrl: ''
logoutUrl: ''
user:
userId: 0
level: 1
name: <NAME>
email: null
isLogin: false
isRoot: ->
victory.user.level == victory.userLevel.root
| true |
window.victory =
userLevel:
root: 0
normal: 1
loginUrl: ''
logoutUrl: ''
user:
userId: 0
level: 1
name: PI:NAME:<NAME>END_PI
email: null
isLogin: false
isRoot: ->
victory.user.level == victory.userLevel.root
|
[
{
"context": "oGLib\n# Module | Stat Methods\n# Author | Sherif Emabrak\n# Description | The delaunay method returns the D",
"end": 163,
"score": 0.9998741745948792,
"start": 149,
"tag": "NAME",
"value": "Sherif Emabrak"
}
] | src/lib/statistics/link/delaunay.coffee | Sherif-Embarak/gp-test | 0 | # ------------------------------------------------------------------------------
# Project | GoGLib
# Module | Stat Methods
# Author | Sherif Emabrak
# Description | The delaunay method returns the Delaunay triangulation of the nodes,
# which is the triangulationof a set of points that is as close as
# possible to being isosceles, thus minimizing narrow triangles
# ------------------------------------------------------------------------------
delaunay = () -> | 8116 | # ------------------------------------------------------------------------------
# Project | GoGLib
# Module | Stat Methods
# Author | <NAME>
# Description | The delaunay method returns the Delaunay triangulation of the nodes,
# which is the triangulationof a set of points that is as close as
# possible to being isosceles, thus minimizing narrow triangles
# ------------------------------------------------------------------------------
delaunay = () -> | true | # ------------------------------------------------------------------------------
# Project | GoGLib
# Module | Stat Methods
# Author | PI:NAME:<NAME>END_PI
# Description | The delaunay method returns the Delaunay triangulation of the nodes,
# which is the triangulationof a set of points that is as close as
# possible to being isosceles, thus minimizing narrow triangles
# ------------------------------------------------------------------------------
delaunay = () -> |
[
{
"context": "##\n knockback.js 1.2.3\n Copyright (c) 2011-2016 Kevin Malakoff.\n License: MIT (http://www.opensource.org/licens",
"end": 66,
"score": 0.9998343586921692,
"start": 52,
"tag": "NAME",
"value": "Kevin Malakoff"
},
{
"context": "ses/mit-license.php)\n Source: https:... | src/core/kb.coffee | kmalakoff/knockback | 160 | ###
knockback.js 1.2.3
Copyright (c) 2011-2016 Kevin Malakoff.
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/kmalakoff/knockback
Dependencies: Knockout.js, Backbone.js, and Underscore.js (or LoDash.js).
Optional dependencies: Backbone.ModelRef.js and BackboneORM.
###
window = if window? then window else global
ko = require 'knockout'
LIFECYCLE_METHODS = ['release', 'destroy', 'dispose']
# The 'kb' namespace for classes, factory functions, constants, etc.
#
# @method .configure(options)
# Method to update Knockback global configuration.
# @param [Object] configuration options. 1) orm - select the library for relationships (default, backbone-orm, backbone-associations, backbone-relational), 2) deep_retain - true to multiply retain view models in the store
#
# @method .collectionObservable(collection, options)
# Factory to create a new kb.CollectionObservable. See {kb.CollectionObservable#constructor} for information on options
# @param [Collection] collection the collection to observe (can be null)
# @param [Object] options the create options
# @return [ko.observableArray] the constructor does not return 'this' but a ko.observableArray
#
# @method .observable(model, options, view_model)
# Factory to create a new kb.Observable. See {kb.Observable#constructor} for information on options
# @param [Model] model the model to observe (can be null)
# @param [String|Array|Object] options the create options. String is a single attribute name, Array is an array of attribute names.
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .viewModel(model, options, view_model)
# Factory to create a new kb.ViewModel. See {kb.ViewModel#constructor} for information on options
# @param [Model|ModelRef] model the model to observe (can be null)
# @param [Object] options the create options
# @return [ko.observable] the constructor returns 'this'
#
# @method .defaultObservable(target, default_value)
# Factory to create a new kb.DefaultObservable. See {kb.DefaultObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-defaults component.
# @param [ko.observable] target_observable the observable to check for null, undefined, or the empty string
# @param [Any] default_value the default value. Can be a value, string or ko.observable
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .formattedObservable(format, arg1, arg2, etc)
# Factory to create a new kb.FormattedObservable. See {kb.FormattedObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-formatting component.
# @param [String|ko.observable] format the format string. Format: `"{0} and {1}"` where `{0}` and `{1}` would be synchronized with the arguments (eg. "Bob and Carol" where `{0}` is Bob and `{1}` is Carol)
# @param [Array] args arguments to be passed to the kb.LocaleManager's get() method
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .localizedObservable(value, options, view_model)
# Factory to create a new kb.LocalizedObservable. See {kb.LocalizedObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-localization component.
# @param [Data|ko.observable] value the value to localize
# @param [Object] options the create options
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
module.exports = class kb
# Knockback library semantic version
@VERSION: '1.2.3'
####################################
# OBSERVABLE STORAGE TYPES
####################################
# Stored value type is not known like null/undefined (could be observed as a Model or a Collection or a simple type)
@TYPE_UNKNOWN: 0
# Stored value type is simple like a String or Number -> observable type: ko.observable
@TYPE_SIMPLE: 1
# Stored value type is an Array -> observable type: ko.observableArray
@TYPE_ARRAY: 2
# Stored value type is a Model -> observable type: ViewModel
@TYPE_MODEL: 3
# Stored value type is a Collection -> observable type: kb.CollectionObservable
@TYPE_COLLECTION: 4
# Checks if an object has been released.
# @param [Any] obj the object to release and also release its keys
@wasReleased: (obj) -> return not obj or obj.__kb_released
# Checks if an object can be released. Used to perform minimal nested releasing on objects by checking if self or next level contained items can be released.
# @param [Any] obj the object to release and also release its keys
@isReleaseable: (obj, depth=0) ->
return false if (not obj or (obj isnt Object(obj))) or obj.__kb_released # must be an object and not already released
return true if ko.isObservable(obj) or (obj instanceof kb.ViewModel) # a known type that is releasable
return false if (typeof(obj) is 'function') or kb.isModel(obj) or kb.isCollection(obj) # a known type that is not releaseable
return true for method in LIFECYCLE_METHODS when typeof(obj[method]) is 'function' # a releaseable signature
return false if depth > 0 # max depth check for ViewModel inside of ViewModel
return true for key, value of obj when (key isnt '__kb') and kb.isReleaseable(value, depth+1)
return false
# Releases any type of view model or observable or items in an array using the conventions of release(), destroy(), dispose().
# @param [Any] obj the object to release and also release its keys
#
# @example
# var view_model = kb.viewModel(model);
# kb.release(view_model); view_model = null;
# @example
# var todos = kb.collectionObservable(collection);
# kb.release(todos); todos = null;
@release: (obj) ->
return unless kb.isReleaseable(obj)
obj.__kb_released = true # mark as released
# release array's items
if _.isArray(obj)
(obj[index] = null; kb.release(value)) for index, value of obj when kb.isReleaseable(value)
return
# observable or lifecycle managed
if ko.isObservable(obj) and _.isArray(array = kb.peek(obj))
return obj.destroy?() if obj.__kb_is_co or (obj.__kb_is_o and (obj.valueType() is kb.TYPE_COLLECTION))
(array[index] = null; kb.release(value)) for index, value of array when kb.isReleaseable(value)
obj.dispose() if typeof(obj.dispose) is 'function'
return
# releaseable signature
return obj[method].call(obj) for method in LIFECYCLE_METHODS when typeof(obj[method]) is 'function' # a releaseable signature
return @releaseKeys(obj) unless ko.isObservable(obj) # view model
return
# Releases and clears all of the keys on an object using the conventions of release(), destroy(), dispose() without releasing the top level object itself.
@releaseKeys: (obj) ->
(obj[key] = null; kb.release(value)) for key, value of obj when key isnt '__kb' and kb.isReleaseable(value)
return
# Binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
# ```
# ko.utils.domNodeDisposal.addDisposeCallback(node, function() { kb.release(view_model)} );
# ```
# @example The hard way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = $('<div data-bind="name: name"></div>')[0];
# var view_model = kb.viewModel(new Backbone.Model({name: 'Bob'}));
# ko.applyBindings(view_model, el);
# kb.releaseOnNodeRemove(view_model, el);
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@releaseOnNodeRemove: (view_model, node) ->
view_model or kb._throwUnexpected(@, 'missing view model')
node or kb._throwUnexpected(@, 'missing node')
ko.utils.domNodeDisposal.addDisposeCallback(node, -> kb.release(view_model))
# Renders a template and binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
#
# NOTE: if you provide an afterRender method on the View Model and do not provide afterRender in the options, afterRender will be called with the following signature: afterRender(element) which differs from the Knockout signture of afterRender(elements)
#
# @example The easy way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = kb.renderTemplate('my_template', kb.viewModel(new Backbone.Model({name: 'Bob'})));
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@renderTemplate: (template, view_model, options={}) ->
return console?.log 'renderTemplate: document is undefined' unless document = window?.document
el = document.createElement('div')
observable = ko.renderTemplate(template, view_model, options, el, 'replaceChildren');
if el.childNodes.length is 1 # do not return the template wrapper if possible
el = el.childNodes[0]
else if el.childNodes.length
for i in [0..el.childNodes.length] # ensure the context is passed up to wrapper from a child
try ko.storedBindingContextForNode(el, ko.contextFor(el.childNodes[i])); break catch;
kb.releaseOnNodeRemove(view_model, el)
observable.dispose() # we will handle memory management with ko.removeNode (otherwise creates memory leak on default bound dispose function)
view_model.afterRender(el) if view_model.afterRender and not options.afterRender # call afterRender for custom setup unless provided in options (so doesn't get double called)
return el
# Applies bindings and binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
#
# @example The easy way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = $('<div data-bind="name: name"></div>')[0];
# kb.applyBindings(kb.viewModel(new Backbone.Model({name: 'Bob'})), el);
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@applyBindings: (view_model, node) ->
if node.length # convert to a root element
[node, children] = [document.createElement('div'), node]
node.appendChild(child) for child in children
ko.applyBindings(view_model, node)
kb.releaseOnNodeRemove(view_model, node)
return node
@getValue: (model, key, args) ->
return unless model
return model[key]() if _.isFunction(model[key]) and kb.settings.orm?.useFunction(model, key)
return model.get(key) unless args
model.get.apply(model, _.map([key].concat(args), (value) -> kb.peek(value)))
@setValue: (model, key, value) ->
return unless model
return model[key](value) if _.isFunction(model[key]) and kb.settings.orm?.useFunction(model, key)
(attributes = {})[key] = value
model.set(attributes)
# Helper to ignore dependencies in a function
#
# @param [Object] obj the object to test
#
# @example
# kb.ignore(fn);
@ignore = ko.dependencyDetection?.ignore or (callback, callbackTarget, callbackArgs) -> value = null; ko.computed(-> value = callback.apply(callbackTarget, callbackArgs || [])).dispose(); return value
####################################
# INTERNAL HELPERS
####################################
# @nodoc
@extend = require './functions/extend'
# @nodoc
@_throwMissing: (instance, message) -> throw "#{if _.isString(instance) then instance else instance.constructor.name}: #{message} is missing"
# @nodoc
@_throwUnexpected: (instance, message) -> throw "#{if _.isString(instance) then instance else instance.constructor.name}: #{message} is unexpected"
# @nodoc
@publishMethods: (observable, instance, methods) -> observable[fn] = kb._.bind(instance[fn], instance) for fn in methods; return
# @nodoc
@peek: (obs) -> return obs unless ko.isObservable(obs); return obs.peek() if obs.peek; return kb.ignore -> obs()
# @nodoc
@isModel: (obj) -> obj and ((obj instanceof kb.Model) or ((typeof(obj.get) is 'function') and (typeof(obj.bind) is 'function')))
# @nodoc
@isCollection: (obj) -> obj and (obj instanceof kb.Collection)
if window.Parse
Backbone = kb.Parse = window.Parse
_ = kb._ = window.Parse._
else
Backbone = kb.Backbone = require 'backbone'
_ = kb._ = require 'underscore'
kb.ko = ko
# cache local references
kb.Collection = Backbone.Collection
kb.Model = Backbone.Object or Backbone.Model
kb.Events = Backbone.Events
| 216448 | ###
knockback.js 1.2.3
Copyright (c) 2011-2016 <NAME>.
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/kmalakoff/knockback
Dependencies: Knockout.js, Backbone.js, and Underscore.js (or LoDash.js).
Optional dependencies: Backbone.ModelRef.js and BackboneORM.
###
window = if window? then window else global
ko = require 'knockout'
LIFECYCLE_METHODS = ['release', 'destroy', 'dispose']
# The 'kb' namespace for classes, factory functions, constants, etc.
#
# @method .configure(options)
# Method to update Knockback global configuration.
# @param [Object] configuration options. 1) orm - select the library for relationships (default, backbone-orm, backbone-associations, backbone-relational), 2) deep_retain - true to multiply retain view models in the store
#
# @method .collectionObservable(collection, options)
# Factory to create a new kb.CollectionObservable. See {kb.CollectionObservable#constructor} for information on options
# @param [Collection] collection the collection to observe (can be null)
# @param [Object] options the create options
# @return [ko.observableArray] the constructor does not return 'this' but a ko.observableArray
#
# @method .observable(model, options, view_model)
# Factory to create a new kb.Observable. See {kb.Observable#constructor} for information on options
# @param [Model] model the model to observe (can be null)
# @param [String|Array|Object] options the create options. String is a single attribute name, Array is an array of attribute names.
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .viewModel(model, options, view_model)
# Factory to create a new kb.ViewModel. See {kb.ViewModel#constructor} for information on options
# @param [Model|ModelRef] model the model to observe (can be null)
# @param [Object] options the create options
# @return [ko.observable] the constructor returns 'this'
#
# @method .defaultObservable(target, default_value)
# Factory to create a new kb.DefaultObservable. See {kb.DefaultObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-defaults component.
# @param [ko.observable] target_observable the observable to check for null, undefined, or the empty string
# @param [Any] default_value the default value. Can be a value, string or ko.observable
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .formattedObservable(format, arg1, arg2, etc)
# Factory to create a new kb.FormattedObservable. See {kb.FormattedObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-formatting component.
# @param [String|ko.observable] format the format string. Format: `"{0} and {1}"` where `{0}` and `{1}` would be synchronized with the arguments (eg. "<NAME> and <NAME>" where `{0}` is <NAME> and `{1}` is <NAME>)
# @param [Array] args arguments to be passed to the kb.LocaleManager's get() method
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .localizedObservable(value, options, view_model)
# Factory to create a new kb.LocalizedObservable. See {kb.LocalizedObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-localization component.
# @param [Data|ko.observable] value the value to localize
# @param [Object] options the create options
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
module.exports = class kb
# Knockback library semantic version
@VERSION: '1.2.3'
####################################
# OBSERVABLE STORAGE TYPES
####################################
# Stored value type is not known like null/undefined (could be observed as a Model or a Collection or a simple type)
@TYPE_UNKNOWN: 0
# Stored value type is simple like a String or Number -> observable type: ko.observable
@TYPE_SIMPLE: 1
# Stored value type is an Array -> observable type: ko.observableArray
@TYPE_ARRAY: 2
# Stored value type is a Model -> observable type: ViewModel
@TYPE_MODEL: 3
# Stored value type is a Collection -> observable type: kb.CollectionObservable
@TYPE_COLLECTION: 4
# Checks if an object has been released.
# @param [Any] obj the object to release and also release its keys
@wasReleased: (obj) -> return not obj or obj.__kb_released
# Checks if an object can be released. Used to perform minimal nested releasing on objects by checking if self or next level contained items can be released.
# @param [Any] obj the object to release and also release its keys
@isReleaseable: (obj, depth=0) ->
return false if (not obj or (obj isnt Object(obj))) or obj.__kb_released # must be an object and not already released
return true if ko.isObservable(obj) or (obj instanceof kb.ViewModel) # a known type that is releasable
return false if (typeof(obj) is 'function') or kb.isModel(obj) or kb.isCollection(obj) # a known type that is not releaseable
return true for method in LIFECYCLE_METHODS when typeof(obj[method]) is 'function' # a releaseable signature
return false if depth > 0 # max depth check for ViewModel inside of ViewModel
return true for key, value of obj when (key isnt '__kb') and kb.isReleaseable(value, depth+1)
return false
# Releases any type of view model or observable or items in an array using the conventions of release(), destroy(), dispose().
# @param [Any] obj the object to release and also release its keys
#
# @example
# var view_model = kb.viewModel(model);
# kb.release(view_model); view_model = null;
# @example
# var todos = kb.collectionObservable(collection);
# kb.release(todos); todos = null;
@release: (obj) ->
return unless kb.isReleaseable(obj)
obj.__kb_released = true # mark as released
# release array's items
if _.isArray(obj)
(obj[index] = null; kb.release(value)) for index, value of obj when kb.isReleaseable(value)
return
# observable or lifecycle managed
if ko.isObservable(obj) and _.isArray(array = kb.peek(obj))
return obj.destroy?() if obj.__kb_is_co or (obj.__kb_is_o and (obj.valueType() is kb.TYPE_COLLECTION))
(array[index] = null; kb.release(value)) for index, value of array when kb.isReleaseable(value)
obj.dispose() if typeof(obj.dispose) is 'function'
return
# releaseable signature
return obj[method].call(obj) for method in LIFECYCLE_METHODS when typeof(obj[method]) is 'function' # a releaseable signature
return @releaseKeys(obj) unless ko.isObservable(obj) # view model
return
# Releases and clears all of the keys on an object using the conventions of release(), destroy(), dispose() without releasing the top level object itself.
@releaseKeys: (obj) ->
(obj[key] = null; kb.release(value)) for key, value of obj when key isnt '__kb' and kb.isReleaseable(value)
return
# Binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
# ```
# ko.utils.domNodeDisposal.addDisposeCallback(node, function() { kb.release(view_model)} );
# ```
# @example The hard way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = $('<div data-bind="name: name"></div>')[0];
# var view_model = kb.viewModel(new Backbone.Model({name: '<NAME>'}));
# ko.applyBindings(view_model, el);
# kb.releaseOnNodeRemove(view_model, el);
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@releaseOnNodeRemove: (view_model, node) ->
view_model or kb._throwUnexpected(@, 'missing view model')
node or kb._throwUnexpected(@, 'missing node')
ko.utils.domNodeDisposal.addDisposeCallback(node, -> kb.release(view_model))
# Renders a template and binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
#
# NOTE: if you provide an afterRender method on the View Model and do not provide afterRender in the options, afterRender will be called with the following signature: afterRender(element) which differs from the Knockout signture of afterRender(elements)
#
# @example The easy way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = kb.renderTemplate('my_template', kb.viewModel(new Backbone.Model({name: '<NAME>'})));
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@renderTemplate: (template, view_model, options={}) ->
return console?.log 'renderTemplate: document is undefined' unless document = window?.document
el = document.createElement('div')
observable = ko.renderTemplate(template, view_model, options, el, 'replaceChildren');
if el.childNodes.length is 1 # do not return the template wrapper if possible
el = el.childNodes[0]
else if el.childNodes.length
for i in [0..el.childNodes.length] # ensure the context is passed up to wrapper from a child
try ko.storedBindingContextForNode(el, ko.contextFor(el.childNodes[i])); break catch;
kb.releaseOnNodeRemove(view_model, el)
observable.dispose() # we will handle memory management with ko.removeNode (otherwise creates memory leak on default bound dispose function)
view_model.afterRender(el) if view_model.afterRender and not options.afterRender # call afterRender for custom setup unless provided in options (so doesn't get double called)
return el
# Applies bindings and binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
#
# @example The easy way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = $('<div data-bind="name: name"></div>')[0];
# kb.applyBindings(kb.viewModel(new Backbone.Model({name: '<NAME>'})), el);
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@applyBindings: (view_model, node) ->
if node.length # convert to a root element
[node, children] = [document.createElement('div'), node]
node.appendChild(child) for child in children
ko.applyBindings(view_model, node)
kb.releaseOnNodeRemove(view_model, node)
return node
@getValue: (model, key, args) ->
return unless model
return model[key]() if _.isFunction(model[key]) and kb.settings.orm?.useFunction(model, key)
return model.get(key) unless args
model.get.apply(model, _.map([key].concat(args), (value) -> kb.peek(value)))
@setValue: (model, key, value) ->
return unless model
return model[key](value) if _.isFunction(model[key]) and kb.settings.orm?.useFunction(model, key)
(attributes = {})[key] = value
model.set(attributes)
# Helper to ignore dependencies in a function
#
# @param [Object] obj the object to test
#
# @example
# kb.ignore(fn);
@ignore = ko.dependencyDetection?.ignore or (callback, callbackTarget, callbackArgs) -> value = null; ko.computed(-> value = callback.apply(callbackTarget, callbackArgs || [])).dispose(); return value
####################################
# INTERNAL HELPERS
####################################
# @nodoc
@extend = require './functions/extend'
# @nodoc
@_throwMissing: (instance, message) -> throw "#{if _.isString(instance) then instance else instance.constructor.name}: #{message} is missing"
# @nodoc
@_throwUnexpected: (instance, message) -> throw "#{if _.isString(instance) then instance else instance.constructor.name}: #{message} is unexpected"
# @nodoc
@publishMethods: (observable, instance, methods) -> observable[fn] = kb._.bind(instance[fn], instance) for fn in methods; return
# @nodoc
@peek: (obs) -> return obs unless ko.isObservable(obs); return obs.peek() if obs.peek; return kb.ignore -> obs()
# @nodoc
@isModel: (obj) -> obj and ((obj instanceof kb.Model) or ((typeof(obj.get) is 'function') and (typeof(obj.bind) is 'function')))
# @nodoc
@isCollection: (obj) -> obj and (obj instanceof kb.Collection)
if window.Parse
Backbone = kb.Parse = window.Parse
_ = kb._ = window.Parse._
else
Backbone = kb.Backbone = require 'backbone'
_ = kb._ = require 'underscore'
kb.ko = ko
# cache local references
kb.Collection = Backbone.Collection
kb.Model = Backbone.Object or Backbone.Model
kb.Events = Backbone.Events
| true | ###
knockback.js 1.2.3
Copyright (c) 2011-2016 PI:NAME:<NAME>END_PI.
License: MIT (http://www.opensource.org/licenses/mit-license.php)
Source: https://github.com/kmalakoff/knockback
Dependencies: Knockout.js, Backbone.js, and Underscore.js (or LoDash.js).
Optional dependencies: Backbone.ModelRef.js and BackboneORM.
###
window = if window? then window else global
ko = require 'knockout'
LIFECYCLE_METHODS = ['release', 'destroy', 'dispose']
# The 'kb' namespace for classes, factory functions, constants, etc.
#
# @method .configure(options)
# Method to update Knockback global configuration.
# @param [Object] configuration options. 1) orm - select the library for relationships (default, backbone-orm, backbone-associations, backbone-relational), 2) deep_retain - true to multiply retain view models in the store
#
# @method .collectionObservable(collection, options)
# Factory to create a new kb.CollectionObservable. See {kb.CollectionObservable#constructor} for information on options
# @param [Collection] collection the collection to observe (can be null)
# @param [Object] options the create options
# @return [ko.observableArray] the constructor does not return 'this' but a ko.observableArray
#
# @method .observable(model, options, view_model)
# Factory to create a new kb.Observable. See {kb.Observable#constructor} for information on options
# @param [Model] model the model to observe (can be null)
# @param [String|Array|Object] options the create options. String is a single attribute name, Array is an array of attribute names.
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .viewModel(model, options, view_model)
# Factory to create a new kb.ViewModel. See {kb.ViewModel#constructor} for information on options
# @param [Model|ModelRef] model the model to observe (can be null)
# @param [Object] options the create options
# @return [ko.observable] the constructor returns 'this'
#
# @method .defaultObservable(target, default_value)
# Factory to create a new kb.DefaultObservable. See {kb.DefaultObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-defaults component.
# @param [ko.observable] target_observable the observable to check for null, undefined, or the empty string
# @param [Any] default_value the default value. Can be a value, string or ko.observable
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .formattedObservable(format, arg1, arg2, etc)
# Factory to create a new kb.FormattedObservable. See {kb.FormattedObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-formatting component.
# @param [String|ko.observable] format the format string. Format: `"{0} and {1}"` where `{0}` and `{1}` would be synchronized with the arguments (eg. "PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI" where `{0}` is PI:NAME:<NAME>END_PI and `{1}` is PI:NAME:<NAME>END_PI)
# @param [Array] args arguments to be passed to the kb.LocaleManager's get() method
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
#
# @method .localizedObservable(value, options, view_model)
# Factory to create a new kb.LocalizedObservable. See {kb.LocalizedObservable#constructor} for information on options. If you are using knockback-core or knockback-core-stack, you can include this from the lib/knockback-localization component.
# @param [Data|ko.observable] value the value to localize
# @param [Object] options the create options
# @return [ko.observable] the constructor does not return 'this' but a ko.observable
module.exports = class kb
# Knockback library semantic version
@VERSION: '1.2.3'
####################################
# OBSERVABLE STORAGE TYPES
####################################
# Stored value type is not known like null/undefined (could be observed as a Model or a Collection or a simple type)
@TYPE_UNKNOWN: 0
# Stored value type is simple like a String or Number -> observable type: ko.observable
@TYPE_SIMPLE: 1
# Stored value type is an Array -> observable type: ko.observableArray
@TYPE_ARRAY: 2
# Stored value type is a Model -> observable type: ViewModel
@TYPE_MODEL: 3
# Stored value type is a Collection -> observable type: kb.CollectionObservable
@TYPE_COLLECTION: 4
# Checks if an object has been released.
# @param [Any] obj the object to release and also release its keys
@wasReleased: (obj) -> return not obj or obj.__kb_released
# Checks if an object can be released. Used to perform minimal nested releasing on objects by checking if self or next level contained items can be released.
# @param [Any] obj the object to release and also release its keys
@isReleaseable: (obj, depth=0) ->
return false if (not obj or (obj isnt Object(obj))) or obj.__kb_released # must be an object and not already released
return true if ko.isObservable(obj) or (obj instanceof kb.ViewModel) # a known type that is releasable
return false if (typeof(obj) is 'function') or kb.isModel(obj) or kb.isCollection(obj) # a known type that is not releaseable
return true for method in LIFECYCLE_METHODS when typeof(obj[method]) is 'function' # a releaseable signature
return false if depth > 0 # max depth check for ViewModel inside of ViewModel
return true for key, value of obj when (key isnt '__kb') and kb.isReleaseable(value, depth+1)
return false
# Releases any type of view model or observable or items in an array using the conventions of release(), destroy(), dispose().
# @param [Any] obj the object to release and also release its keys
#
# @example
# var view_model = kb.viewModel(model);
# kb.release(view_model); view_model = null;
# @example
# var todos = kb.collectionObservable(collection);
# kb.release(todos); todos = null;
@release: (obj) ->
return unless kb.isReleaseable(obj)
obj.__kb_released = true # mark as released
# release array's items
if _.isArray(obj)
(obj[index] = null; kb.release(value)) for index, value of obj when kb.isReleaseable(value)
return
# observable or lifecycle managed
if ko.isObservable(obj) and _.isArray(array = kb.peek(obj))
return obj.destroy?() if obj.__kb_is_co or (obj.__kb_is_o and (obj.valueType() is kb.TYPE_COLLECTION))
(array[index] = null; kb.release(value)) for index, value of array when kb.isReleaseable(value)
obj.dispose() if typeof(obj.dispose) is 'function'
return
# releaseable signature
return obj[method].call(obj) for method in LIFECYCLE_METHODS when typeof(obj[method]) is 'function' # a releaseable signature
return @releaseKeys(obj) unless ko.isObservable(obj) # view model
return
# Releases and clears all of the keys on an object using the conventions of release(), destroy(), dispose() without releasing the top level object itself.
@releaseKeys: (obj) ->
(obj[key] = null; kb.release(value)) for key, value of obj when key isnt '__kb' and kb.isReleaseable(value)
return
# Binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
# ```
# ko.utils.domNodeDisposal.addDisposeCallback(node, function() { kb.release(view_model)} );
# ```
# @example The hard way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = $('<div data-bind="name: name"></div>')[0];
# var view_model = kb.viewModel(new Backbone.Model({name: 'PI:NAME:<NAME>END_PI'}));
# ko.applyBindings(view_model, el);
# kb.releaseOnNodeRemove(view_model, el);
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@releaseOnNodeRemove: (view_model, node) ->
view_model or kb._throwUnexpected(@, 'missing view model')
node or kb._throwUnexpected(@, 'missing node')
ko.utils.domNodeDisposal.addDisposeCallback(node, -> kb.release(view_model))
# Renders a template and binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
#
# NOTE: if you provide an afterRender method on the View Model and do not provide afterRender in the options, afterRender will be called with the following signature: afterRender(element) which differs from the Knockout signture of afterRender(elements)
#
# @example The easy way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = kb.renderTemplate('my_template', kb.viewModel(new Backbone.Model({name: 'PI:NAME:<NAME>END_PI'})));
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@renderTemplate: (template, view_model, options={}) ->
return console?.log 'renderTemplate: document is undefined' unless document = window?.document
el = document.createElement('div')
observable = ko.renderTemplate(template, view_model, options, el, 'replaceChildren');
if el.childNodes.length is 1 # do not return the template wrapper if possible
el = el.childNodes[0]
else if el.childNodes.length
for i in [0..el.childNodes.length] # ensure the context is passed up to wrapper from a child
try ko.storedBindingContextForNode(el, ko.contextFor(el.childNodes[i])); break catch;
kb.releaseOnNodeRemove(view_model, el)
observable.dispose() # we will handle memory management with ko.removeNode (otherwise creates memory leak on default bound dispose function)
view_model.afterRender(el) if view_model.afterRender and not options.afterRender # call afterRender for custom setup unless provided in options (so doesn't get double called)
return el
# Applies bindings and binds a callback to the node that releases the view model when the node is removed using ko.removeNode.
#
# @example The easy way to set up automatic calling of 'kb.release(view_model)' when the bound element is released.
# var el = $('<div data-bind="name: name"></div>')[0];
# kb.applyBindings(kb.viewModel(new Backbone.Model({name: 'PI:NAME:<NAME>END_PI'})), el);
# ...
# ko.removeNode(el); // removes el from the DOM and calls kb.release(view_model)
@applyBindings: (view_model, node) ->
if node.length # convert to a root element
[node, children] = [document.createElement('div'), node]
node.appendChild(child) for child in children
ko.applyBindings(view_model, node)
kb.releaseOnNodeRemove(view_model, node)
return node
@getValue: (model, key, args) ->
return unless model
return model[key]() if _.isFunction(model[key]) and kb.settings.orm?.useFunction(model, key)
return model.get(key) unless args
model.get.apply(model, _.map([key].concat(args), (value) -> kb.peek(value)))
@setValue: (model, key, value) ->
return unless model
return model[key](value) if _.isFunction(model[key]) and kb.settings.orm?.useFunction(model, key)
(attributes = {})[key] = value
model.set(attributes)
# Helper to ignore dependencies in a function
#
# @param [Object] obj the object to test
#
# @example
# kb.ignore(fn);
@ignore = ko.dependencyDetection?.ignore or (callback, callbackTarget, callbackArgs) -> value = null; ko.computed(-> value = callback.apply(callbackTarget, callbackArgs || [])).dispose(); return value
####################################
# INTERNAL HELPERS
####################################
# @nodoc
@extend = require './functions/extend'
# @nodoc
@_throwMissing: (instance, message) -> throw "#{if _.isString(instance) then instance else instance.constructor.name}: #{message} is missing"
# @nodoc
@_throwUnexpected: (instance, message) -> throw "#{if _.isString(instance) then instance else instance.constructor.name}: #{message} is unexpected"
# @nodoc
@publishMethods: (observable, instance, methods) -> observable[fn] = kb._.bind(instance[fn], instance) for fn in methods; return
# @nodoc
@peek: (obs) -> return obs unless ko.isObservable(obs); return obs.peek() if obs.peek; return kb.ignore -> obs()
# @nodoc
@isModel: (obj) -> obj and ((obj instanceof kb.Model) or ((typeof(obj.get) is 'function') and (typeof(obj.bind) is 'function')))
# @nodoc
@isCollection: (obj) -> obj and (obj instanceof kb.Collection)
if window.Parse
Backbone = kb.Parse = window.Parse
_ = kb._ = window.Parse._
else
Backbone = kb.Backbone = require 'backbone'
_ = kb._ = require 'underscore'
kb.ko = ko
# cache local references
kb.Collection = Backbone.Collection
kb.Model = Backbone.Object or Backbone.Model
kb.Events = Backbone.Events
|
[
{
"context": "esponse) ->\n\tfogbugz.SearchCases({q: 'assignedTo:\"Claudio Wilson\"', max : 20, cols :'sTitle'}, request.cookies.tok",
"end": 804,
"score": 0.9996426701545715,
"start": 790,
"tag": "NAME",
"value": "Claudio Wilson"
}
] | test/test.coffee | claudiowilson/FogbugzJS | 1 | express = require 'express'
fogbugz = require './../../lib/fogbugz'
settings = require './settings'
app = express()
app.configure( ->
app.use(express.logger('dev'))
app.use(express.bodyParser())
app.use(express.cookieParser('testadoodle'))
)
fogbugz.SetURL(settings.fogbugzURL)
app.get('/', (request, response) ->
fogbugz.LogOn(settings.fogbugzUser, settings.fogbugzPassword, (error, token) ->
expiry = new Date();
expiry.setMonth(expiry.getMonth() + 1)
response.cookie('token', token, {expires : expiry, httpOnly:true})
fogbugz.ListProjects({'fWrite': true, 'ixProject': 1, 'fIncludeDeleted': 1}, token, (err, result) ->
if err then console.log(err.message) else response.send(200)
)
)
)
app.get('/cases', (request, response) ->
fogbugz.SearchCases({q: 'assignedTo:"Claudio Wilson"', max : 20, cols :'sTitle'}, request.cookies.token, (err, result) ->
if err then console.log(err.message) else console.log(result); response.send(200)
)
)
app.get('/views', (request, response) ->
fogbugz.ViewPerson({ixPerson:2}, request.cookies.token, (err, result) ->
if err then console.log(err.message) else console.log(result)
)
)
app.listen(3000)
console.log('Listening on port 3000....') | 150691 | express = require 'express'
fogbugz = require './../../lib/fogbugz'
settings = require './settings'
app = express()
app.configure( ->
app.use(express.logger('dev'))
app.use(express.bodyParser())
app.use(express.cookieParser('testadoodle'))
)
fogbugz.SetURL(settings.fogbugzURL)
app.get('/', (request, response) ->
fogbugz.LogOn(settings.fogbugzUser, settings.fogbugzPassword, (error, token) ->
expiry = new Date();
expiry.setMonth(expiry.getMonth() + 1)
response.cookie('token', token, {expires : expiry, httpOnly:true})
fogbugz.ListProjects({'fWrite': true, 'ixProject': 1, 'fIncludeDeleted': 1}, token, (err, result) ->
if err then console.log(err.message) else response.send(200)
)
)
)
app.get('/cases', (request, response) ->
fogbugz.SearchCases({q: 'assignedTo:"<NAME>"', max : 20, cols :'sTitle'}, request.cookies.token, (err, result) ->
if err then console.log(err.message) else console.log(result); response.send(200)
)
)
app.get('/views', (request, response) ->
fogbugz.ViewPerson({ixPerson:2}, request.cookies.token, (err, result) ->
if err then console.log(err.message) else console.log(result)
)
)
app.listen(3000)
console.log('Listening on port 3000....') | true | express = require 'express'
fogbugz = require './../../lib/fogbugz'
settings = require './settings'
app = express()
app.configure( ->
app.use(express.logger('dev'))
app.use(express.bodyParser())
app.use(express.cookieParser('testadoodle'))
)
fogbugz.SetURL(settings.fogbugzURL)
app.get('/', (request, response) ->
fogbugz.LogOn(settings.fogbugzUser, settings.fogbugzPassword, (error, token) ->
expiry = new Date();
expiry.setMonth(expiry.getMonth() + 1)
response.cookie('token', token, {expires : expiry, httpOnly:true})
fogbugz.ListProjects({'fWrite': true, 'ixProject': 1, 'fIncludeDeleted': 1}, token, (err, result) ->
if err then console.log(err.message) else response.send(200)
)
)
)
app.get('/cases', (request, response) ->
fogbugz.SearchCases({q: 'assignedTo:"PI:NAME:<NAME>END_PI"', max : 20, cols :'sTitle'}, request.cookies.token, (err, result) ->
if err then console.log(err.message) else console.log(result); response.send(200)
)
)
app.get('/views', (request, response) ->
fogbugz.ViewPerson({ixPerson:2}, request.cookies.token, (err, result) ->
if err then console.log(err.message) else console.log(result)
)
)
app.listen(3000)
console.log('Listening on port 3000....') |
[
{
"context": "========\n\nsection 'String interpolation'\n\nname = 'Dr Groot'\ntitle = 'plant matter expert'\n\nprint \"You know, ",
"end": 274,
"score": 0.7523558735847473,
"start": 266,
"tag": "NAME",
"value": "Dr Groot"
},
{
"context": "n 'Existential operator'\n\nobj1 = artist:\n... | demo/site/coffee/main.coffee | feihong/jingtai | 0 | $content = $('#content')
section = (title) -> $('<h4>').text(title).appendTo($content)
print = (text) -> $('<p>').text(text).appendTo($content)
#=============================================================================
section 'String interpolation'
name = 'Dr Groot'
title = 'plant matter expert'
print "You know, #{name} is our resident #{title}."
section 'Comprehensions'
numbers = [1, 3, 5, 6, 7, 8, 9, 11, 12, 14, 16, 17]
print [(n*2 + 1) for n in numbers when n > 10]
section 'Existential operator'
obj1 = artist:
name: 'Marty'
latestAlbum:
title: 'Back in Time'
print obj1.artist?.latestAlbum?.title
obj2 = artist:
name: 'Marty'
latestAlbum: null
print obj2.artist?.latestAlbum?.title
| 61390 | $content = $('#content')
section = (title) -> $('<h4>').text(title).appendTo($content)
print = (text) -> $('<p>').text(text).appendTo($content)
#=============================================================================
section 'String interpolation'
name = '<NAME>'
title = 'plant matter expert'
print "You know, #{name} is our resident #{title}."
section 'Comprehensions'
numbers = [1, 3, 5, 6, 7, 8, 9, 11, 12, 14, 16, 17]
print [(n*2 + 1) for n in numbers when n > 10]
section 'Existential operator'
obj1 = artist:
name: '<NAME>'
latestAlbum:
title: 'Back in Time'
print obj1.artist?.latestAlbum?.title
obj2 = artist:
name: '<NAME>'
latestAlbum: null
print obj2.artist?.latestAlbum?.title
| true | $content = $('#content')
section = (title) -> $('<h4>').text(title).appendTo($content)
print = (text) -> $('<p>').text(text).appendTo($content)
#=============================================================================
section 'String interpolation'
name = 'PI:NAME:<NAME>END_PI'
title = 'plant matter expert'
print "You know, #{name} is our resident #{title}."
section 'Comprehensions'
numbers = [1, 3, 5, 6, 7, 8, 9, 11, 12, 14, 16, 17]
print [(n*2 + 1) for n in numbers when n > 10]
section 'Existential operator'
obj1 = artist:
name: 'PI:NAME:<NAME>END_PI'
latestAlbum:
title: 'Back in Time'
print obj1.artist?.latestAlbum?.title
obj2 = artist:
name: 'PI:NAME:<NAME>END_PI'
latestAlbum: null
print obj2.artist?.latestAlbum?.title
|
[
{
"context": "me format, format should be in the form of email : user@sample.com'\n\n\n LANG.errors.extension[VALID.ERROR.",
"end": 1983,
"score": 0.9999197125434875,
"start": 1968,
"tag": "EMAIL",
"value": "user@sample.com"
}
] | modules/wizard_edit_form/wizard_edit_form.coffee | signonsridhar/sridhar_hbs | 0 | define(['bases/control',
'_',
'modules/dt_dialog/dt_dialog',
'modules/number_selector_search_form/number_selector_search_form',
'models/phone_number/phone_number',
'models/phone/phone',
'modules/phone_selector_search_form/phone_selector_search_form',
'models/directory/directory',
'models/auth/auth'
], (BaseControl,_, DTDialog, NumberSelectorSearchForm,PhoneNumber,Phone,PhoneSelectorSearchForm,Directory,Auth)->
BaseControl.extend({
LANG: (controller)->
LANG = {
errors:{
first_name:{}
last_name:{}
email:{}
extension:{}
}
}
LANG.errors.first_name[VALID.ERROR.SIZE] = 'must be between 2-40 characters'
LANG.errors.first_name[VALID.ERROR.REQUIRED] = 'first name is required'
LANG.errors.first_name[VALID.ERROR.FORMAT] = "must be 2 to 40 characters alphanumeric, and may contain the following special chars: . , & ( ) ! ? - @ '"
LANG.errors.last_name[VALID.ERROR.SIZE] = 'must be between 2-40 characters'
LANG.errors.last_name[VALID.ERROR.REQUIRED] = 'last name is required'
LANG.errors.last_name[VALID.ERROR.FORMAT] = "must be 2 to 40 characters alphanumeric, and may contain the following special chars: . , & ( ) ! ? - @ '"
LANG.errors.email[VALID.ERROR.REQUIRED] = 'email is required'
LANG.errors.email[VALID.ERROR.SIZE] = 'must be a valid email address with 3 to 70 characters'
LANG.errors.email[VALID.ERROR.FORMAT] = 'must be alphanumeric, must have @ and period, must be 3 to 70 chars, and may contain following special chars: - . _ +'
LANG.errors.email[VALID.ERROR.UNIQUE] = 'this email already exists'
LANG.errors.email[VALID.ERROR.INVALID] = 'Invalid user name format, format should be in the form of email : user@sample.com'
LANG.errors.extension[VALID.ERROR.REQUIRED] = 'extension is required'
LANG
},{
init:(elem, options)->
this.setup_viewmodel(
this.populate_proxy_helper(options)
)
this.render('wizard_edit_form/wizard_edit_form')
this.bind_view(this.viewmodel)
this.on()
this.set_validity(false)
this.loc_num_search = new NumberSelectorSearchForm($('<div class="local_number_selector_search_form_container_js"></div>'),
{
'partnerid': options.phone_options.partnerid, country: 'US'
})
this.loc_dialog = new DTDialog(this.element.find('.local_num_container_js'),
{
content: this.loc_num_search.element,
settings: {
height: 437,
width: 700,
autoOpen: false,
modal: true,
buttons:{
"Select Number": => this.local_select_number_button()
Cancel: => this.local_cancel_button()
}
}
})
this.loc_dialog.show_hide_title(false)
this.phone_search = new PhoneSelectorSearchForm($('<div class="phone_selector_search_form_container_js"></div>'),
{
'partnerid': options.phone_options.partnerid, country: 'US'
})
this.phone_dialog = new DTDialog(this.element.find('.phone_container_js'),
{
content: this.phone_search.element,
settings: {
height: 237,
width: 400,
autoOpen: false,
modal: true,
buttons:{
"Select Phone": => this.select_phone_button()
}
}
})
this.phone_dialog.show_hide_title(false)
'.main_click_js click': () ->
this.loc_dialog.open_dialog()
this.refresh_local()
'.phone_options_click_js click': () ->
this.phone_dialog.open_dialog()
this.phone_search.refresh()
populate_proxy_helper:(options)->
proxy_item ={}
item = options.bundle
proxy_item.first_name = item.attr('user.first_name')
proxy_item.last_name = item.attr('user.last_name')
proxy_item.email = item.attr('user.email')
proxy_item.extension_number = item.attr('extensions.0.extension_number')
proxy_item.device_name = item.attr('extensions.0.devices.0.device_name')
proxy_item.phone_number = item.attr('extensions.0.phone_numbers.0.phonenumber')
proxy_item.old_didid = item.attr('extensions.0.phone_numbers.0.didid')
#disable first row of wizard admin
if(!options.index && can.route.attr('main') == 'wizard')
proxy_item.admin_disabled = 'disabled'
proxy_item
'{viewmodel} first_name change':()->
this.options.bundle.attr('user.first_name',this.viewmodel.attr('first_name'))
'{viewmodel} last_name change':()->
this.options.bundle.attr('user.last_name',this.viewmodel.attr('last_name'))
'{viewmodel} email change':()->
this.options.bundle.attr('user.email',this.viewmodel.attr('email'))
'{viewmodel} extension_number change':()->
this.options.bundle.attr('extensions.0.extension_number',this.viewmodel.attr('extension_number'))
spinner_visibility:(is_visible)->
if is_visible
this.element.find('.phone_num_spinner').show()
else
this.element.find('.phone_num_spinner').hide()
is_spinner_visible:()->
this.element.find('.phone_num_spinner').is(":visible")
reserve_number: (selected_did, $number_selector_dialog)->
partner_id = this.options.phone_options.partnerid
PhoneNumber.reserve_number(selected_did.attr(), partner_id, null).then((reserved_did)=>
#unreserving previously selected didid
this.unreserve_number()
console.log(arguments)
this.options.bundle.attr('extensions.0.phone_numbers.0',reserved_did.attr())
this.options.bundle.attr('extensions.0.phone_numbers.0.olddidid',this.viewmodel.attr('old_didid'))
#updating viewmodel
this.viewmodel.attr('phone_number',reserved_did.attr('phonenumber'))
$number_selector_dialog.close_dialog()
).fail((response)=>
$number_selector_dialog.find('.backend_error').empty().html(response.msg)
this.refresh_local()
)
unreserve_number: ()->
existing_phone_number_did = this.options.bundle.attr('extensions.0.phone_numbers.0.didid')
if( existing_phone_number_did && existing_phone_number_did != this.viewmodel.attr('old_didid'))
PhoneNumber.unreserve_number(existing_phone_number_did).fail((response)=>
console.log("unreserve phone number failed")
)
local_select_number_button:() ->
selected_phone_number_did = this.loc_num_search.get_selected()
existing_phone_number_did = this.options.bundle.attr('extensions.0.phone_numbers.0.didid')
if(selected_phone_number_did)
this.reserve_number(selected_phone_number_did, this.loc_dialog)
local_cancel_button:() ->
this.loc_dialog.close_dialog()
refresh_local: () ->
if !this.is_spinner_visible()
city = this.options.phone_options.city
state = this.options.phone_options.state
this.loc_num_search.refresh(city, state)
this.loc_num_search.checkViewModelChange()
select_phone_button:() ->
phone_did = this.phone_search.get_selected()
if(phone_did)
this.options.bundle.attr('extensions.0.devices.0.device_name',phone_did.attr('name'))
this.options.bundle.attr('extensions.0.devices.0.product_sku', phone_did.attr('sku'))
this.options.bundle.attr('extensions.0.devices.0.productid',phone_did.attr('product_id'))
#updating viewmodel
this.viewmodel.attr('device_name',phone_did.attr('name'))
this.phone_dialog.close_dialog()
})
) | 9885 | define(['bases/control',
'_',
'modules/dt_dialog/dt_dialog',
'modules/number_selector_search_form/number_selector_search_form',
'models/phone_number/phone_number',
'models/phone/phone',
'modules/phone_selector_search_form/phone_selector_search_form',
'models/directory/directory',
'models/auth/auth'
], (BaseControl,_, DTDialog, NumberSelectorSearchForm,PhoneNumber,Phone,PhoneSelectorSearchForm,Directory,Auth)->
BaseControl.extend({
LANG: (controller)->
LANG = {
errors:{
first_name:{}
last_name:{}
email:{}
extension:{}
}
}
LANG.errors.first_name[VALID.ERROR.SIZE] = 'must be between 2-40 characters'
LANG.errors.first_name[VALID.ERROR.REQUIRED] = 'first name is required'
LANG.errors.first_name[VALID.ERROR.FORMAT] = "must be 2 to 40 characters alphanumeric, and may contain the following special chars: . , & ( ) ! ? - @ '"
LANG.errors.last_name[VALID.ERROR.SIZE] = 'must be between 2-40 characters'
LANG.errors.last_name[VALID.ERROR.REQUIRED] = 'last name is required'
LANG.errors.last_name[VALID.ERROR.FORMAT] = "must be 2 to 40 characters alphanumeric, and may contain the following special chars: . , & ( ) ! ? - @ '"
LANG.errors.email[VALID.ERROR.REQUIRED] = 'email is required'
LANG.errors.email[VALID.ERROR.SIZE] = 'must be a valid email address with 3 to 70 characters'
LANG.errors.email[VALID.ERROR.FORMAT] = 'must be alphanumeric, must have @ and period, must be 3 to 70 chars, and may contain following special chars: - . _ +'
LANG.errors.email[VALID.ERROR.UNIQUE] = 'this email already exists'
LANG.errors.email[VALID.ERROR.INVALID] = 'Invalid user name format, format should be in the form of email : <EMAIL>'
LANG.errors.extension[VALID.ERROR.REQUIRED] = 'extension is required'
LANG
},{
init:(elem, options)->
this.setup_viewmodel(
this.populate_proxy_helper(options)
)
this.render('wizard_edit_form/wizard_edit_form')
this.bind_view(this.viewmodel)
this.on()
this.set_validity(false)
this.loc_num_search = new NumberSelectorSearchForm($('<div class="local_number_selector_search_form_container_js"></div>'),
{
'partnerid': options.phone_options.partnerid, country: 'US'
})
this.loc_dialog = new DTDialog(this.element.find('.local_num_container_js'),
{
content: this.loc_num_search.element,
settings: {
height: 437,
width: 700,
autoOpen: false,
modal: true,
buttons:{
"Select Number": => this.local_select_number_button()
Cancel: => this.local_cancel_button()
}
}
})
this.loc_dialog.show_hide_title(false)
this.phone_search = new PhoneSelectorSearchForm($('<div class="phone_selector_search_form_container_js"></div>'),
{
'partnerid': options.phone_options.partnerid, country: 'US'
})
this.phone_dialog = new DTDialog(this.element.find('.phone_container_js'),
{
content: this.phone_search.element,
settings: {
height: 237,
width: 400,
autoOpen: false,
modal: true,
buttons:{
"Select Phone": => this.select_phone_button()
}
}
})
this.phone_dialog.show_hide_title(false)
'.main_click_js click': () ->
this.loc_dialog.open_dialog()
this.refresh_local()
'.phone_options_click_js click': () ->
this.phone_dialog.open_dialog()
this.phone_search.refresh()
populate_proxy_helper:(options)->
proxy_item ={}
item = options.bundle
proxy_item.first_name = item.attr('user.first_name')
proxy_item.last_name = item.attr('user.last_name')
proxy_item.email = item.attr('user.email')
proxy_item.extension_number = item.attr('extensions.0.extension_number')
proxy_item.device_name = item.attr('extensions.0.devices.0.device_name')
proxy_item.phone_number = item.attr('extensions.0.phone_numbers.0.phonenumber')
proxy_item.old_didid = item.attr('extensions.0.phone_numbers.0.didid')
#disable first row of wizard admin
if(!options.index && can.route.attr('main') == 'wizard')
proxy_item.admin_disabled = 'disabled'
proxy_item
'{viewmodel} first_name change':()->
this.options.bundle.attr('user.first_name',this.viewmodel.attr('first_name'))
'{viewmodel} last_name change':()->
this.options.bundle.attr('user.last_name',this.viewmodel.attr('last_name'))
'{viewmodel} email change':()->
this.options.bundle.attr('user.email',this.viewmodel.attr('email'))
'{viewmodel} extension_number change':()->
this.options.bundle.attr('extensions.0.extension_number',this.viewmodel.attr('extension_number'))
spinner_visibility:(is_visible)->
if is_visible
this.element.find('.phone_num_spinner').show()
else
this.element.find('.phone_num_spinner').hide()
is_spinner_visible:()->
this.element.find('.phone_num_spinner').is(":visible")
reserve_number: (selected_did, $number_selector_dialog)->
partner_id = this.options.phone_options.partnerid
PhoneNumber.reserve_number(selected_did.attr(), partner_id, null).then((reserved_did)=>
#unreserving previously selected didid
this.unreserve_number()
console.log(arguments)
this.options.bundle.attr('extensions.0.phone_numbers.0',reserved_did.attr())
this.options.bundle.attr('extensions.0.phone_numbers.0.olddidid',this.viewmodel.attr('old_didid'))
#updating viewmodel
this.viewmodel.attr('phone_number',reserved_did.attr('phonenumber'))
$number_selector_dialog.close_dialog()
).fail((response)=>
$number_selector_dialog.find('.backend_error').empty().html(response.msg)
this.refresh_local()
)
unreserve_number: ()->
existing_phone_number_did = this.options.bundle.attr('extensions.0.phone_numbers.0.didid')
if( existing_phone_number_did && existing_phone_number_did != this.viewmodel.attr('old_didid'))
PhoneNumber.unreserve_number(existing_phone_number_did).fail((response)=>
console.log("unreserve phone number failed")
)
local_select_number_button:() ->
selected_phone_number_did = this.loc_num_search.get_selected()
existing_phone_number_did = this.options.bundle.attr('extensions.0.phone_numbers.0.didid')
if(selected_phone_number_did)
this.reserve_number(selected_phone_number_did, this.loc_dialog)
local_cancel_button:() ->
this.loc_dialog.close_dialog()
refresh_local: () ->
if !this.is_spinner_visible()
city = this.options.phone_options.city
state = this.options.phone_options.state
this.loc_num_search.refresh(city, state)
this.loc_num_search.checkViewModelChange()
select_phone_button:() ->
phone_did = this.phone_search.get_selected()
if(phone_did)
this.options.bundle.attr('extensions.0.devices.0.device_name',phone_did.attr('name'))
this.options.bundle.attr('extensions.0.devices.0.product_sku', phone_did.attr('sku'))
this.options.bundle.attr('extensions.0.devices.0.productid',phone_did.attr('product_id'))
#updating viewmodel
this.viewmodel.attr('device_name',phone_did.attr('name'))
this.phone_dialog.close_dialog()
})
) | true | define(['bases/control',
'_',
'modules/dt_dialog/dt_dialog',
'modules/number_selector_search_form/number_selector_search_form',
'models/phone_number/phone_number',
'models/phone/phone',
'modules/phone_selector_search_form/phone_selector_search_form',
'models/directory/directory',
'models/auth/auth'
], (BaseControl,_, DTDialog, NumberSelectorSearchForm,PhoneNumber,Phone,PhoneSelectorSearchForm,Directory,Auth)->
BaseControl.extend({
LANG: (controller)->
LANG = {
errors:{
first_name:{}
last_name:{}
email:{}
extension:{}
}
}
LANG.errors.first_name[VALID.ERROR.SIZE] = 'must be between 2-40 characters'
LANG.errors.first_name[VALID.ERROR.REQUIRED] = 'first name is required'
LANG.errors.first_name[VALID.ERROR.FORMAT] = "must be 2 to 40 characters alphanumeric, and may contain the following special chars: . , & ( ) ! ? - @ '"
LANG.errors.last_name[VALID.ERROR.SIZE] = 'must be between 2-40 characters'
LANG.errors.last_name[VALID.ERROR.REQUIRED] = 'last name is required'
LANG.errors.last_name[VALID.ERROR.FORMAT] = "must be 2 to 40 characters alphanumeric, and may contain the following special chars: . , & ( ) ! ? - @ '"
LANG.errors.email[VALID.ERROR.REQUIRED] = 'email is required'
LANG.errors.email[VALID.ERROR.SIZE] = 'must be a valid email address with 3 to 70 characters'
LANG.errors.email[VALID.ERROR.FORMAT] = 'must be alphanumeric, must have @ and period, must be 3 to 70 chars, and may contain following special chars: - . _ +'
LANG.errors.email[VALID.ERROR.UNIQUE] = 'this email already exists'
LANG.errors.email[VALID.ERROR.INVALID] = 'Invalid user name format, format should be in the form of email : PI:EMAIL:<EMAIL>END_PI'
LANG.errors.extension[VALID.ERROR.REQUIRED] = 'extension is required'
LANG
},{
init:(elem, options)->
this.setup_viewmodel(
this.populate_proxy_helper(options)
)
this.render('wizard_edit_form/wizard_edit_form')
this.bind_view(this.viewmodel)
this.on()
this.set_validity(false)
this.loc_num_search = new NumberSelectorSearchForm($('<div class="local_number_selector_search_form_container_js"></div>'),
{
'partnerid': options.phone_options.partnerid, country: 'US'
})
this.loc_dialog = new DTDialog(this.element.find('.local_num_container_js'),
{
content: this.loc_num_search.element,
settings: {
height: 437,
width: 700,
autoOpen: false,
modal: true,
buttons:{
"Select Number": => this.local_select_number_button()
Cancel: => this.local_cancel_button()
}
}
})
this.loc_dialog.show_hide_title(false)
this.phone_search = new PhoneSelectorSearchForm($('<div class="phone_selector_search_form_container_js"></div>'),
{
'partnerid': options.phone_options.partnerid, country: 'US'
})
this.phone_dialog = new DTDialog(this.element.find('.phone_container_js'),
{
content: this.phone_search.element,
settings: {
height: 237,
width: 400,
autoOpen: false,
modal: true,
buttons:{
"Select Phone": => this.select_phone_button()
}
}
})
this.phone_dialog.show_hide_title(false)
'.main_click_js click': () ->
this.loc_dialog.open_dialog()
this.refresh_local()
'.phone_options_click_js click': () ->
this.phone_dialog.open_dialog()
this.phone_search.refresh()
populate_proxy_helper:(options)->
proxy_item ={}
item = options.bundle
proxy_item.first_name = item.attr('user.first_name')
proxy_item.last_name = item.attr('user.last_name')
proxy_item.email = item.attr('user.email')
proxy_item.extension_number = item.attr('extensions.0.extension_number')
proxy_item.device_name = item.attr('extensions.0.devices.0.device_name')
proxy_item.phone_number = item.attr('extensions.0.phone_numbers.0.phonenumber')
proxy_item.old_didid = item.attr('extensions.0.phone_numbers.0.didid')
#disable first row of wizard admin
if(!options.index && can.route.attr('main') == 'wizard')
proxy_item.admin_disabled = 'disabled'
proxy_item
'{viewmodel} first_name change':()->
this.options.bundle.attr('user.first_name',this.viewmodel.attr('first_name'))
'{viewmodel} last_name change':()->
this.options.bundle.attr('user.last_name',this.viewmodel.attr('last_name'))
'{viewmodel} email change':()->
this.options.bundle.attr('user.email',this.viewmodel.attr('email'))
'{viewmodel} extension_number change':()->
this.options.bundle.attr('extensions.0.extension_number',this.viewmodel.attr('extension_number'))
spinner_visibility:(is_visible)->
if is_visible
this.element.find('.phone_num_spinner').show()
else
this.element.find('.phone_num_spinner').hide()
is_spinner_visible:()->
this.element.find('.phone_num_spinner').is(":visible")
reserve_number: (selected_did, $number_selector_dialog)->
partner_id = this.options.phone_options.partnerid
PhoneNumber.reserve_number(selected_did.attr(), partner_id, null).then((reserved_did)=>
#unreserving previously selected didid
this.unreserve_number()
console.log(arguments)
this.options.bundle.attr('extensions.0.phone_numbers.0',reserved_did.attr())
this.options.bundle.attr('extensions.0.phone_numbers.0.olddidid',this.viewmodel.attr('old_didid'))
#updating viewmodel
this.viewmodel.attr('phone_number',reserved_did.attr('phonenumber'))
$number_selector_dialog.close_dialog()
).fail((response)=>
$number_selector_dialog.find('.backend_error').empty().html(response.msg)
this.refresh_local()
)
unreserve_number: ()->
existing_phone_number_did = this.options.bundle.attr('extensions.0.phone_numbers.0.didid')
if( existing_phone_number_did && existing_phone_number_did != this.viewmodel.attr('old_didid'))
PhoneNumber.unreserve_number(existing_phone_number_did).fail((response)=>
console.log("unreserve phone number failed")
)
local_select_number_button:() ->
selected_phone_number_did = this.loc_num_search.get_selected()
existing_phone_number_did = this.options.bundle.attr('extensions.0.phone_numbers.0.didid')
if(selected_phone_number_did)
this.reserve_number(selected_phone_number_did, this.loc_dialog)
local_cancel_button:() ->
this.loc_dialog.close_dialog()
refresh_local: () ->
if !this.is_spinner_visible()
city = this.options.phone_options.city
state = this.options.phone_options.state
this.loc_num_search.refresh(city, state)
this.loc_num_search.checkViewModelChange()
select_phone_button:() ->
phone_did = this.phone_search.get_selected()
if(phone_did)
this.options.bundle.attr('extensions.0.devices.0.device_name',phone_did.attr('name'))
this.options.bundle.attr('extensions.0.devices.0.product_sku', phone_did.attr('sku'))
this.options.bundle.attr('extensions.0.devices.0.productid',phone_did.attr('product_id'))
#updating viewmodel
this.viewmodel.attr('device_name',phone_did.attr('name'))
this.phone_dialog.close_dialog()
})
) |
[
{
"context": "0\" for [0...8]).join(''),\n\t\t\"output\" : \"9b752e45573d4b39f4dbd3323cab82bf63326bfb\"\n\t},\n\t{\n\t\t\"input\" : (\"a\" for [0...1000000]).join(",
"end": 841,
"score": 0.7413726449012756,
"start": 811,
"tag": "KEY",
"value": "3d4b39f4dbd3323cab82bf63326bfb"
}
] | test/fixed-data/ripemd160.iced | CyberFlameGO/triplesec | 274 |
exports.data = [
{
"input" : "",
"output" : "9c1185a5c5e9fc54612808977ee8f548b2258d31"
},{
"input" : "a",
"output" : "0bdc9d2d256b3ee9daae347be6f4dc835a467ffe"
},
{
"input" : "abc",
"output" : "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc"
},
{
"input" : "message digest",
"output" : "5d0689ef49d2fae572b881b123a85ffa21595f36"
},
{
"input" : "abcdefghijklmnopqrstuvwxyz",
"output" : "f71c27109c692c1b56bbdceb5b9d2865b3708dbc"
},
{
"input" : "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"output" : "12a053384a9c0c88e405a06c27dcf49ada62eb2b"
},
{
"input" : "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
"output" : "b0e20b6e3116640286ed3a87a5713079b21f5189"
},
{
"input" : ("1234567890" for [0...8]).join(''),
"output" : "9b752e45573d4b39f4dbd3323cab82bf63326bfb"
},
{
"input" : ("a" for [0...1000000]).join(''),
"output" : "52783243c1697bdbe16d37f97f68f08325dc1528"
}]
| 136707 |
exports.data = [
{
"input" : "",
"output" : "9c1185a5c5e9fc54612808977ee8f548b2258d31"
},{
"input" : "a",
"output" : "0bdc9d2d256b3ee9daae347be6f4dc835a467ffe"
},
{
"input" : "abc",
"output" : "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc"
},
{
"input" : "message digest",
"output" : "5d0689ef49d2fae572b881b123a85ffa21595f36"
},
{
"input" : "abcdefghijklmnopqrstuvwxyz",
"output" : "f71c27109c692c1b56bbdceb5b9d2865b3708dbc"
},
{
"input" : "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"output" : "12a053384a9c0c88e405a06c27dcf49ada62eb2b"
},
{
"input" : "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
"output" : "b0e20b6e3116640286ed3a87a5713079b21f5189"
},
{
"input" : ("1234567890" for [0...8]).join(''),
"output" : "9b752e4557<KEY>"
},
{
"input" : ("a" for [0...1000000]).join(''),
"output" : "52783243c1697bdbe16d37f97f68f08325dc1528"
}]
| true |
exports.data = [
{
"input" : "",
"output" : "9c1185a5c5e9fc54612808977ee8f548b2258d31"
},{
"input" : "a",
"output" : "0bdc9d2d256b3ee9daae347be6f4dc835a467ffe"
},
{
"input" : "abc",
"output" : "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc"
},
{
"input" : "message digest",
"output" : "5d0689ef49d2fae572b881b123a85ffa21595f36"
},
{
"input" : "abcdefghijklmnopqrstuvwxyz",
"output" : "f71c27109c692c1b56bbdceb5b9d2865b3708dbc"
},
{
"input" : "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"output" : "12a053384a9c0c88e405a06c27dcf49ada62eb2b"
},
{
"input" : "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
"output" : "b0e20b6e3116640286ed3a87a5713079b21f5189"
},
{
"input" : ("1234567890" for [0...8]).join(''),
"output" : "9b752e4557PI:KEY:<KEY>END_PI"
},
{
"input" : ("a" for [0...1000000]).join(''),
"output" : "52783243c1697bdbe16d37f97f68f08325dc1528"
}]
|
[
{
"context": "ROR: 500103\n\n NOT_LOGIN: 403201\n INVALID_TOKEN: 403202\n MISSING_UID: 403203\n MEMBER_CHECK_FAIL: 403204",
"end": 168,
"score": 0.9932095408439636,
"start": 162,
"tag": "PASSWORD",
"value": "403202"
},
{
"context": "0\n INVALID_APPLICATION: 403211\n PASSWORD_ER... | talk-api2x/server/config/error.coffee | ikingye/talk-os | 3,084 | Err = require 'err1st'
meta =
DEFAULT_ERROR: 500100
CREATE_ERROR: 500101
UPDATE_ERROR: 500102
DELETE_ERROR: 500103
NOT_LOGIN: 403201
INVALID_TOKEN: 403202
MISSING_UID: 403203
MEMBER_CHECK_FAIL: 403204
SYNC_TEAMBITION_FAIL: 403205
LANDING_FAIL: 403206
SOCKET_ID_ERROR: 403207
SOCKET_ID_BROKEN: 403208
CLIENT_ID_ERROR: 403209
UNKNOWN_SYNC_SOURCE: 403210
INVALID_APPLICATION: 403211
PASSWORD_ERROR: 403212
MISSING_INTEGRATION_SOURCE: 403213
PROCESS_LOCKED: 400214
TEAM_LEAVE_ERROR: 400215
ALREADY_MEMBER: 400216
INVALID_INVITECODE: 400217
INVALID_OBJECT: 400218
NO_PERMISSION: 403219
TOKEN_EXPIRED: 403220
NOT_EDITABLE: 403221
ROOM_IS_ARCHIVED: 403222
SIGNATURE_FAILED: 403223
REQUEST_FAILD: 400224
NOT_ACCESSIBLE_FOR_GUEST: 400225
FILE_SAVE_FAILED: 400226
FILE_MISSING: 400227
GUEST_MODE_DISABLED: 400228
OUT_OF_SIZE: 400230
SEARCH_FAILED: 400231
OUT_OF_SEARCH_RANGE: 400232
RATE_LIMIT_EXCEEDED: 429233
NOT_TEAMBITION_USER: 403234
NOT_PRIVATE_ROOM: 403235
INVALID_MSG_TOKEN: 403236
INVITATION_EXISTING: 400237
TOO_MANY_FIELDS: 400238
MEMBER_EXISTING: 400239
VOIP_REQUEST_FAILD: 400240
PROPERTY_EXISTING: 400241
INVALID_ACCESS_TOKEN: 403242
INVALID_OPERATION: 403243
INVALID_REFER: 400244
PUSH_FAILED: 400245
INVALID_MAKR_TARGET: 400246
NAME_CONFLICT: 400247
CAN_NOT_ADD_INTEGRATION_IN_OFFICIAL_ROOMS: 400248
INTEGRATION_ERROR: 400249
MOBILE_RATE_EXCEEDED: 400250
SEND_SMS_ERROR: 400251
INTEGRATION_ERROR_DISABLED: 400252
BAD_REQUEST: 400253
PARAMS_MISSING: 400400
OBJECT_MISSING: 400401
OBJECT_EXISTING: 400402
CLIENT_MISSING: 403403
NOT_FOUND: [404404, 'Not Found']
PARAMS_INVALID: 400405
INVALID_TARGET: 400406
MESSAGE_NOT_EDITABLE: 400407
MESSAGE_STRUCTURE_ERROR: 400408
INVALID_SERVICE: 400409
INVALID_RSS_URL: 400410
FIELD_MISSING: 400411
PARAMS_CONFLICT: 400412
FUNCTION_MISSING: 500413
CONFIG_MISSING: 500414
INTERNAL_SERVER_ERROR: [500500, "Internal Server Error"]
Err.meta meta
Err.localeMeta 'zh', require './locales/zh'
Err.localeMeta 'en', require './locales/en'
| 119002 | Err = require 'err1st'
meta =
DEFAULT_ERROR: 500100
CREATE_ERROR: 500101
UPDATE_ERROR: 500102
DELETE_ERROR: 500103
NOT_LOGIN: 403201
INVALID_TOKEN: <PASSWORD>
MISSING_UID: 403203
MEMBER_CHECK_FAIL: 403204
SYNC_TEAMBITION_FAIL: 403205
LANDING_FAIL: 403206
SOCKET_ID_ERROR: 403207
SOCKET_ID_BROKEN: 403208
CLIENT_ID_ERROR: 403209
UNKNOWN_SYNC_SOURCE: 403210
INVALID_APPLICATION: 403211
PASSWORD_ERROR: <PASSWORD>
MISSING_INTEGRATION_SOURCE: 403213
PROCESS_LOCKED: 400214
TEAM_LEAVE_ERROR: 400215
ALREADY_MEMBER: 400216
INVALID_INVITECODE: 400217
INVALID_OBJECT: 400218
NO_PERMISSION: 403219
TOKEN_EXPIRED: 403220
NOT_EDITABLE: 403221
ROOM_IS_ARCHIVED: 403222
SIGNATURE_FAILED: 403223
REQUEST_FAILD: 400224
NOT_ACCESSIBLE_FOR_GUEST: 400225
FILE_SAVE_FAILED: 400226
FILE_MISSING: 400227
GUEST_MODE_DISABLED: 400228
OUT_OF_SIZE: 400230
SEARCH_FAILED: 400231
OUT_OF_SEARCH_RANGE: 400232
RATE_LIMIT_EXCEEDED: 429233
NOT_TEAMBITION_USER: 403234
NOT_PRIVATE_ROOM: 403235
INVALID_MSG_TOKEN: <PASSWORD>
INVITATION_EXISTING: 400237
TOO_MANY_FIELDS: 400238
MEMBER_EXISTING: 400239
VOIP_REQUEST_FAILD: 400240
PROPERTY_EXISTING: 400241
INVALID_ACCESS_TOKEN: <PASSWORD>
INVALID_OPERATION: 403243
INVALID_REFER: 400244
PUSH_FAILED: 400245
INVALID_MAKR_TARGET: 400246
NAME_CONFLICT: 400247
CAN_NOT_ADD_INTEGRATION_IN_OFFICIAL_ROOMS: 400248
INTEGRATION_ERROR: 400249
MOBILE_RATE_EXCEEDED: 400250
SEND_SMS_ERROR: 400251
INTEGRATION_ERROR_DISABLED: 400252
BAD_REQUEST: 400253
PARAMS_MISSING: 400400
OBJECT_MISSING: 400401
OBJECT_EXISTING: 400402
CLIENT_MISSING: 403403
NOT_FOUND: [404404, 'Not Found']
PARAMS_INVALID: 400405
INVALID_TARGET: 400406
MESSAGE_NOT_EDITABLE: 400407
MESSAGE_STRUCTURE_ERROR: 400408
INVALID_SERVICE: 400409
INVALID_RSS_URL: 400410
FIELD_MISSING: 400411
PARAMS_CONFLICT: 400412
FUNCTION_MISSING: 500413
CONFIG_MISSING: 500414
INTERNAL_SERVER_ERROR: [500500, "Internal Server Error"]
Err.meta meta
Err.localeMeta 'zh', require './locales/zh'
Err.localeMeta 'en', require './locales/en'
| true | Err = require 'err1st'
meta =
DEFAULT_ERROR: 500100
CREATE_ERROR: 500101
UPDATE_ERROR: 500102
DELETE_ERROR: 500103
NOT_LOGIN: 403201
INVALID_TOKEN: PI:PASSWORD:<PASSWORD>END_PI
MISSING_UID: 403203
MEMBER_CHECK_FAIL: 403204
SYNC_TEAMBITION_FAIL: 403205
LANDING_FAIL: 403206
SOCKET_ID_ERROR: 403207
SOCKET_ID_BROKEN: 403208
CLIENT_ID_ERROR: 403209
UNKNOWN_SYNC_SOURCE: 403210
INVALID_APPLICATION: 403211
PASSWORD_ERROR: PI:PASSWORD:<PASSWORD>END_PI
MISSING_INTEGRATION_SOURCE: 403213
PROCESS_LOCKED: 400214
TEAM_LEAVE_ERROR: 400215
ALREADY_MEMBER: 400216
INVALID_INVITECODE: 400217
INVALID_OBJECT: 400218
NO_PERMISSION: 403219
TOKEN_EXPIRED: 403220
NOT_EDITABLE: 403221
ROOM_IS_ARCHIVED: 403222
SIGNATURE_FAILED: 403223
REQUEST_FAILD: 400224
NOT_ACCESSIBLE_FOR_GUEST: 400225
FILE_SAVE_FAILED: 400226
FILE_MISSING: 400227
GUEST_MODE_DISABLED: 400228
OUT_OF_SIZE: 400230
SEARCH_FAILED: 400231
OUT_OF_SEARCH_RANGE: 400232
RATE_LIMIT_EXCEEDED: 429233
NOT_TEAMBITION_USER: 403234
NOT_PRIVATE_ROOM: 403235
INVALID_MSG_TOKEN: PI:PASSWORD:<PASSWORD>END_PI
INVITATION_EXISTING: 400237
TOO_MANY_FIELDS: 400238
MEMBER_EXISTING: 400239
VOIP_REQUEST_FAILD: 400240
PROPERTY_EXISTING: 400241
INVALID_ACCESS_TOKEN: PI:PASSWORD:<PASSWORD>END_PI
INVALID_OPERATION: 403243
INVALID_REFER: 400244
PUSH_FAILED: 400245
INVALID_MAKR_TARGET: 400246
NAME_CONFLICT: 400247
CAN_NOT_ADD_INTEGRATION_IN_OFFICIAL_ROOMS: 400248
INTEGRATION_ERROR: 400249
MOBILE_RATE_EXCEEDED: 400250
SEND_SMS_ERROR: 400251
INTEGRATION_ERROR_DISABLED: 400252
BAD_REQUEST: 400253
PARAMS_MISSING: 400400
OBJECT_MISSING: 400401
OBJECT_EXISTING: 400402
CLIENT_MISSING: 403403
NOT_FOUND: [404404, 'Not Found']
PARAMS_INVALID: 400405
INVALID_TARGET: 400406
MESSAGE_NOT_EDITABLE: 400407
MESSAGE_STRUCTURE_ERROR: 400408
INVALID_SERVICE: 400409
INVALID_RSS_URL: 400410
FIELD_MISSING: 400411
PARAMS_CONFLICT: 400412
FUNCTION_MISSING: 500413
CONFIG_MISSING: 500414
INTERNAL_SERVER_ERROR: [500500, "Internal Server Error"]
Err.meta meta
Err.localeMeta 'zh', require './locales/zh'
Err.localeMeta 'en', require './locales/en'
|
[
{
"context": " the following:\n# 1. Get an API token: curl -u 'username' -d '{\"scopes\":[\"repo\"],\"note\":\"Hooks management\"",
"end": 235,
"score": 0.9931620359420776,
"start": 227,
"tag": "USERNAME",
"value": "username"
},
{
"context": "ia API:\n# curl -H \"Authorization... | src/scripts/github-pull-request-notifier.coffee | fourkitchens/hubot-scripts | 2 | # Description:
# An HTTP Listener that notifies about new Github pull requests
#
# Dependencies:
# "url": ""
# "querystring": ""
#
# Configuration:
# You will have to do the following:
# 1. Get an API token: curl -u 'username' -d '{"scopes":["repo"],"note":"Hooks management"}' \
# https://api.github.com/authorizations
# 2. Add <HUBOT_URL>:<PORT>/hubot/gh-pull-requests?room=<room>[&type=<type>] url hook via API:
# curl -H "Authorization: token <your api token>" \
# -d '{"name":"web","active":true,"events":["pull_request"],"config":{"url":"<this script url>","content_type":"json"}}' \
# https://api.github.com/repos/<your user>/<your repo>/hooks
#
# Commands:
# None
#
# URLS:
# POST /hubot/gh-pull-requests?room=<room>[&type=<type]
#
# Authors:
# spajus
url = require('url')
querystring = require('querystring')
module.exports = (robot) ->
robot.router.post "/hubot/gh-pull-requests", (req, res) ->
query = querystring.parse(url.parse(req.url).query)
res.end
user = {}
user.room = query.room if query.room
user.type = query.type if query.type
try
announcePullRequest req.body, (what) ->
robot.send user, what
catch error
console.log "github pull request notifier error: #{error}. Request: #{req.body}"
announcePullRequest = (data, cb) ->
if data.action == 'opened'
mentioned = data.pull_request.body.match(/(^|\s)(@[\w\-]+)/g)
if mentioned
unique = (array) ->
output = {}
output[array[key]] = array[key] for key in [0...array.length]
value for key, value of output
mentioned = mentioned.map (nick) -> nick.trim()
mentioned = unique mentioned
mentioned_line = "\nMentioned: #{mentioned.join(", ")}"
else
mentioned_line = ''
cb "New pull request \"#{data.pull_request.title}\" by #{data.pull_request.user.login}: #{data.pull_request.html_url}#{mentioned_line}"
| 209226 | # Description:
# An HTTP Listener that notifies about new Github pull requests
#
# Dependencies:
# "url": ""
# "querystring": ""
#
# Configuration:
# You will have to do the following:
# 1. Get an API token: curl -u 'username' -d '{"scopes":["repo"],"note":"Hooks management"}' \
# https://api.github.com/authorizations
# 2. Add <HUBOT_URL>:<PORT>/hubot/gh-pull-requests?room=<room>[&type=<type>] url hook via API:
# curl -H "Authorization: token <your <KEY>>" \
# -d '{"name":"web","active":true,"events":["pull_request"],"config":{"url":"<this script url>","content_type":"json"}}' \
# https://api.github.com/repos/<your user>/<your repo>/hooks
#
# Commands:
# None
#
# URLS:
# POST /hubot/gh-pull-requests?room=<room>[&type=<type]
#
# Authors:
# spajus
url = require('url')
querystring = require('querystring')
module.exports = (robot) ->
robot.router.post "/hubot/gh-pull-requests", (req, res) ->
query = querystring.parse(url.parse(req.url).query)
res.end
user = {}
user.room = query.room if query.room
user.type = query.type if query.type
try
announcePullRequest req.body, (what) ->
robot.send user, what
catch error
console.log "github pull request notifier error: #{error}. Request: #{req.body}"
announcePullRequest = (data, cb) ->
if data.action == 'opened'
mentioned = data.pull_request.body.match(/(^|\s)(@[\w\-]+)/g)
if mentioned
unique = (array) ->
output = {}
output[array[key]] = array[key] for key in [0...array.length]
value for key, value of output
mentioned = mentioned.map (nick) -> nick.trim()
mentioned = unique mentioned
mentioned_line = "\nMentioned: #{mentioned.join(", ")}"
else
mentioned_line = ''
cb "New pull request \"#{data.pull_request.title}\" by #{data.pull_request.user.login}: #{data.pull_request.html_url}#{mentioned_line}"
| true | # Description:
# An HTTP Listener that notifies about new Github pull requests
#
# Dependencies:
# "url": ""
# "querystring": ""
#
# Configuration:
# You will have to do the following:
# 1. Get an API token: curl -u 'username' -d '{"scopes":["repo"],"note":"Hooks management"}' \
# https://api.github.com/authorizations
# 2. Add <HUBOT_URL>:<PORT>/hubot/gh-pull-requests?room=<room>[&type=<type>] url hook via API:
# curl -H "Authorization: token <your PI:KEY:<KEY>END_PI>" \
# -d '{"name":"web","active":true,"events":["pull_request"],"config":{"url":"<this script url>","content_type":"json"}}' \
# https://api.github.com/repos/<your user>/<your repo>/hooks
#
# Commands:
# None
#
# URLS:
# POST /hubot/gh-pull-requests?room=<room>[&type=<type]
#
# Authors:
# spajus
url = require('url')
querystring = require('querystring')
module.exports = (robot) ->
robot.router.post "/hubot/gh-pull-requests", (req, res) ->
query = querystring.parse(url.parse(req.url).query)
res.end
user = {}
user.room = query.room if query.room
user.type = query.type if query.type
try
announcePullRequest req.body, (what) ->
robot.send user, what
catch error
console.log "github pull request notifier error: #{error}. Request: #{req.body}"
announcePullRequest = (data, cb) ->
if data.action == 'opened'
mentioned = data.pull_request.body.match(/(^|\s)(@[\w\-]+)/g)
if mentioned
unique = (array) ->
output = {}
output[array[key]] = array[key] for key in [0...array.length]
value for key, value of output
mentioned = mentioned.map (nick) -> nick.trim()
mentioned = unique mentioned
mentioned_line = "\nMentioned: #{mentioned.join(", ")}"
else
mentioned_line = ''
cb "New pull request \"#{data.pull_request.title}\" by #{data.pull_request.user.login}: #{data.pull_request.html_url}#{mentioned_line}"
|
[
{
"context": " return done null, false if conf.admin.username != username\n return done null, false if conf.admin.passw",
"end": 728,
"score": 0.727826714515686,
"start": 720,
"tag": "USERNAME",
"value": "username"
},
{
"context": " return done null, false if conf.admin.password... | lib/admin/index.coffee | devjam/micra | 0 | 'use strict'
path = require 'path'
express = require 'express'
passport = require 'passport'
LocalStrategy = require('passport-local').Strategy
serveStatic = require 'serve-static'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
session = require 'express-session'
flash = require 'connect-flash'
module.exports = (conf)->
router = express.Router()
admin = express()
admin.set 'views', __dirname + '/views'
admin.set 'view engine', 'jade'
admin.set 'x-powered-by', false
login = (req, res, next)->
next()
if conf.admin.username && conf.admin.password
passport.use new LocalStrategy (username, password, done)->
return done null, false if conf.admin.username != username
return done null, false if conf.admin.password != password
done null, conf.admin
passport.serializeUser (user, done)->
done null, user.username
passport.deserializeUser (id, done)->
done null, conf.admin
router.get '/login', (req, res)->
return res.redirect conf.admin.path if req.isAuthenticated()
res.render 'login',
path: conf.admin.path
message: req.flash 'error'
router.post '/login', passport.authenticate 'local',
successRedirect: conf.admin.path
failureRedirect: conf.admin.path + '/login'
failureFlash: '入力が正しくありません'
login = (req, res, next)->
return next() if req.isAuthenticated()
res.redirect conf.admin.path + '/login'
router.get '/logout', (req, res)->
req.logout()
res.redirect conf.admin.path + '/login'
router.get '/', login, (req, res)->
res.send 'admin index'
router.get '/add', login, (req, res)->
res.send 'admin add'
router.get '/edit', login, (req, res)->
res.send 'admin edit'
# after update
# filename = require.resolve path.join conf.basedir, conf.src, '/jj'
# if require.cache[filename]
# delete require.cache[filename]
admin.use serveStatic __dirname + '/public'
admin.use cookieParser()
admin.use bodyParser.json()
admin.use bodyParser.urlencoded extended: false
admin.use session
secret: 'ankug1ai6wioeh78fajflka5skdfnj1aks0jla4wrtag2a'
resave: false
saveUninitialized: false
admin.use flash()
admin.use passport.initialize()
admin.use passport.session()
admin.use router
admin
| 217688 | 'use strict'
path = require 'path'
express = require 'express'
passport = require 'passport'
LocalStrategy = require('passport-local').Strategy
serveStatic = require 'serve-static'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
session = require 'express-session'
flash = require 'connect-flash'
module.exports = (conf)->
router = express.Router()
admin = express()
admin.set 'views', __dirname + '/views'
admin.set 'view engine', 'jade'
admin.set 'x-powered-by', false
login = (req, res, next)->
next()
if conf.admin.username && conf.admin.password
passport.use new LocalStrategy (username, password, done)->
return done null, false if conf.admin.username != username
return done null, false if conf.admin.password != <PASSWORD>
done null, conf.admin
passport.serializeUser (user, done)->
done null, user.username
passport.deserializeUser (id, done)->
done null, conf.admin
router.get '/login', (req, res)->
return res.redirect conf.admin.path if req.isAuthenticated()
res.render 'login',
path: conf.admin.path
message: req.flash 'error'
router.post '/login', passport.authenticate 'local',
successRedirect: conf.admin.path
failureRedirect: conf.admin.path + '/login'
failureFlash: '入力が正しくありません'
login = (req, res, next)->
return next() if req.isAuthenticated()
res.redirect conf.admin.path + '/login'
router.get '/logout', (req, res)->
req.logout()
res.redirect conf.admin.path + '/login'
router.get '/', login, (req, res)->
res.send 'admin index'
router.get '/add', login, (req, res)->
res.send 'admin add'
router.get '/edit', login, (req, res)->
res.send 'admin edit'
# after update
# filename = require.resolve path.join conf.basedir, conf.src, '/jj'
# if require.cache[filename]
# delete require.cache[filename]
admin.use serveStatic __dirname + '/public'
admin.use cookieParser()
admin.use bodyParser.json()
admin.use bodyParser.urlencoded extended: false
admin.use session
secret: '<KEY>'
resave: false
saveUninitialized: false
admin.use flash()
admin.use passport.initialize()
admin.use passport.session()
admin.use router
admin
| true | 'use strict'
path = require 'path'
express = require 'express'
passport = require 'passport'
LocalStrategy = require('passport-local').Strategy
serveStatic = require 'serve-static'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
session = require 'express-session'
flash = require 'connect-flash'
module.exports = (conf)->
router = express.Router()
admin = express()
admin.set 'views', __dirname + '/views'
admin.set 'view engine', 'jade'
admin.set 'x-powered-by', false
login = (req, res, next)->
next()
if conf.admin.username && conf.admin.password
passport.use new LocalStrategy (username, password, done)->
return done null, false if conf.admin.username != username
return done null, false if conf.admin.password != PI:PASSWORD:<PASSWORD>END_PI
done null, conf.admin
passport.serializeUser (user, done)->
done null, user.username
passport.deserializeUser (id, done)->
done null, conf.admin
router.get '/login', (req, res)->
return res.redirect conf.admin.path if req.isAuthenticated()
res.render 'login',
path: conf.admin.path
message: req.flash 'error'
router.post '/login', passport.authenticate 'local',
successRedirect: conf.admin.path
failureRedirect: conf.admin.path + '/login'
failureFlash: '入力が正しくありません'
login = (req, res, next)->
return next() if req.isAuthenticated()
res.redirect conf.admin.path + '/login'
router.get '/logout', (req, res)->
req.logout()
res.redirect conf.admin.path + '/login'
router.get '/', login, (req, res)->
res.send 'admin index'
router.get '/add', login, (req, res)->
res.send 'admin add'
router.get '/edit', login, (req, res)->
res.send 'admin edit'
# after update
# filename = require.resolve path.join conf.basedir, conf.src, '/jj'
# if require.cache[filename]
# delete require.cache[filename]
admin.use serveStatic __dirname + '/public'
admin.use cookieParser()
admin.use bodyParser.json()
admin.use bodyParser.urlencoded extended: false
admin.use session
secret: 'PI:KEY:<KEY>END_PI'
resave: false
saveUninitialized: false
admin.use flash()
admin.use passport.initialize()
admin.use passport.session()
admin.use router
admin
|
[
{
"context": ": (config.email ? opts.email), from: \"InstantILL <InstantILL@openaccessbutton.org>\", subject: \"ILL request \" + opts._id\n tmpl = tm",
"end": 3308,
"score": 0.9999271631240845,
"start": 3277,
"tag": "EMAIL",
"value": "InstantILL@openaccessbutton.org"
},
{
"context":... | worker/src/svc/oaworks/ill.coffee | oaworks/paradigm | 1 |
# this should default to a search of ILLs as well... with a restrict
# restrict = @auth.role('openaccessbutton.admin') and this.queryParams.all then [] else [{term:{from:@user?._id}}]
P.svc.oaworks.ill = (opts) -> # only worked on POST with optional auth
if not opts?
opts = @copy @params
if opts.ill
opts.doi = opts.ill
delete opts.ill
opts.metadata ?= await @svc.oaworks.metadata opts
opts.pilot = Date.now() if opts.pilot is true
opts.live = Date.now() if opts.live is true
config = opts.config
try config = JSON.parse config
if typeof config is 'string' or (not config and opts.from)
config = await @fetch 'https://api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
if not config? or JSON.stringify(config) is '{}'
config = await @fetch 'https://dev.api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
config ?= {}
vars = name: 'librarian', details: '' # anywhere to get the user name from config?
ordered = ['title','author','volume','issue','date','pages']
for o of opts
if o is 'metadata'
for m of opts[o]
if m isnt 'email'
opts[m] = opts[o][m]
ordered.push(m) if m not in ordered
delete opts.metadata
else
ordered.push(o) if o not in ordered
for r in ordered
if opts[r]
vars[r] = opts[r]
if r is 'author'
authors = '<p>Authors:<br>'
first = true
ats = []
for a in opts[r]
if a.family
if first
first = false
else
authors += ', '
atidy = a.family + (if a.given then ' ' + a.given else '')
authors += atidy
ats.push atidy
vars[r] = ats
delete opts.author if opts.author? # remove author metadata due to messy provisions causing save issues
vars.illid = opts._id = await @uid()
# such as https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in=in%3A&si0qs=0021-9231&si1in=au%3A&si1op=AND&si2in=kw%3A&si2op=AND&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch
# could be provided as: (unless other params are mandatory)
# https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?si0qs=0021-9231
if config.search and config.search.length and (opts.issn or opts.journal)
if config.search.indexOf('worldcat') isnt -1
su = config.search.split('?')[0] + '?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in='
su += if opts.issn? then 'in%3A' else 'ti%3A'
su += '&si0qs=' + (opts.issn ? opts.journal)
su += '&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch'
else
su = config.search
su += if opts.issn then opts.issn else opts.journal
vars.worldcatsearchurl = su
tmpl = await @svc.oaworks.templates 'instantill_create.html'
tmpl = tmpl.content
if not opts.forwarded and not opts.resolved and (config.email or opts.email)
@mail svc: 'oaworks', vars: vars, template: tmpl, to: (config.email ? opts.email), from: "InstantILL <InstantILL@openaccessbutton.org>", subject: "ILL request " + opts._id
tmpl = tmpl.replace /Dear.*?\,/, 'Dear Joe, here is a copy of what was just sent:'
@waitUntil @mail svc: 'oaworks', vars: vars, template: tmpl, from: "InstantILL <InstantILL@openaccessbutton.org>", subject: "ILL CREATED " + opts._id, to: 'mark@cottagelabs.com' # ['joe@openaccessbutton.org']
return opts
P.svc.oaworks.ill._index = true
P.svc.oaworks.ill.collect = (params) ->
params ?= @copy @params
sid = params.collect # end of the url is an SID
params._id ?= await @uid()
# example AKfycbwPq7xWoTLwnqZHv7gJAwtsHRkreJ1hMJVeeplxDG_MipdIamU6
url = 'https://script.google.com/macros/s/' + sid + '/exec?'
for q of params
url += (if q is '_id' then 'uuid' else q) + '=' + params[q] + '&' if q isnt 'collect'
@waitUntil @fetch url
@waitUntil @svc.rscvd params
return true
P.svc.oaworks.ill.collect._hide = true
P.svc.oaworks.ill.openurl = (config, meta) ->
# Will eventually redirect after reading openurl params passed here, somehow.
# For now a POST of metadata here by a user with an open url registered will build their openurl
config ?= @params.config ? {}
meta ?= @params.meta ? await @svc.oaworks.metadata()
if config.ill_redirect_base_url
config.ill_form ?= config.ill_redirect_base_url
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# add iupui / openURL defaults to config
defaults =
sid: 'sid'
title: 'atitle' # this is what iupui needs (title is also acceptable, but would clash with using title for journal title, which we set below, as iupui do that
doi: 'rft_id' # don't know yet what this should be
pmcid: 'pmcid' # don't know yet what this should be
author: 'aulast' # author should actually be au, but aulast works even if contains the whole author, using aufirst just concatenates
journal: 'title' # this is what iupui needs
page: 'pages' # iupui uses the spage and epage for start and end pages, but pages is allowed in openurl, check if this will work for iupui
published: 'date' # this is what iupui needs, but in format 1991-07-01 - date format may be a problem
year: 'rft.year' # this is what IUPUI uses
for d of defaults
config[d] = defaults[d] if not config[d]
url = ''
url += config.ill_added_params.replace('?','') + '&' if config.ill_added_params
url += config.sid + '=InstantILL&'
for k of meta
v = ''
if k is 'author'
for author in (if Array.isArray(meta.author) then meta.author else [meta.author])
v += ', ' if v.length
v += if typeof author is 'string' then author else if author.family then author.family + (if author.given then ', ' + author.given else '') else JSON.stringify author
else if k in ['doi','pmid','pmc','pmcid','url','journal','title','year','issn','volume','issue','page','crossref_type','publisher','published','notes']
v = meta[k]
url += (if config[k] then config[k] else k) + '=' + encodeURIComponent(v) + '&' if v
if meta.usermetadata
nfield = if config.notes then config.notes else 'notes'
url = url.replace 'usermetadata=true', ''
if url.indexOf(nfield+'=') is -1
url += '&' + nfield + '=The user provided some metadata.'
else
url = url.replace nfield+'=', nfield+'=The user provided some metadata. '
return url.replace '/&&/g', '&'
P.svc.oaworks.ill.subscription = (config, meta) ->
if not config and not meta and (@params.sub or @params.subscription) # assume values are being passed directly on GET request
config = @copy @params
config.subscription = config.sub if config.sub
if @params.meta
meta = @params.meta
delete config.meta
else if config.doi and @keys(config).length is 2
console.log config.doi
meta = await @svc.oaworks.metadata config.doi
delete config.doi
else
meta = @copy config
delete config.doi
config ?= @params.config ? {}
if typeof config is 'string'
config = await @fetch 'https://api.cottagelabs.com/service/oab/ill/config?uid=' + config
if not config? or JSON.stringify(config) is '{}'
config = await @fetch 'https://dev.api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
meta ?= @params.meta
res = {findings:{}, lookups:[], error:[], contents: []}
if config.subscription?
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# need to get their subscriptions link from their config - and need to know how to build the query string for it
openurl = await @svc.oaworks.ill.openurl config, meta
openurl = openurl.replace(config.ill_added_params.replace('?',''),'') if config.ill_added_params
if typeof config.subscription is 'string'
config.subscription = config.subscription.split(',')
if typeof config.subscription_type is 'string'
config.subscription_type = config.subscription_type.split(',')
config.subscription_type ?= []
for s of config.subscription
sub = config.subscription[s]
if typeof sub is 'object'
subtype = sub.type
sub = sub.url
else
subtype = config.subscription_type[s] ? 'unknown'
sub = sub.trim()
if sub
if subtype is 'serialssolutions' or sub.indexOf('serialssolutions') isnt -1 # and sub.indexOf('.xml.') is -1
tid = sub.split('.search')[0]
tid = tid.split('//')[1] if tid.indexOf('//') isnt -1
#bs = if sub.indexOf('://') isnt -1 then sub.split('://')[0] else 'http' # always use http because https on the xml endpoint fails
sub = 'http://' + tid + '.openurl.xml.serialssolutions.com/openurlxml?version=1.0&genre=article&'
else if (subtype is 'sfx' or sub.indexOf('sfx.') isnt -1) and sub.indexOf('sfx.response_type=simplexml') is -1
sub += (if sub.indexOf('?') is -1 then '?' else '&') + 'sfx.response_type=simplexml'
else if (subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1) and sub.indexOf('response_type') is -1
# https://github.com/OAButton/discussion/issues/1793
#sub = 'https://trails-msu.userservices.exlibrisgroup.com/view/uresolver/01TRAILS_MSU/openurl?svc_dat=CTO&response_type=xml&sid=InstantILL&'
sub = sub.split('?')[0] + '?svc_dat=CTO&response_type=xml&sid=InstantILL&'
#ID=doi:10.1108%2FNFS-09-2019-0293&genre=article&atitle=Impact%20of%20processing%20and%20packaging%20on%20the%20quality%20of%20murici%20jelly%20%5BByrsonima%20crassifolia%20(L.)%20rich%5D%20during%20storage.&title=Nutrition%20&%20Food%20Science&issn=00346659&volume=50&issue=5&date=20200901&au=Da%20Cunha,%20Mariana%20Crivelari&spage=871&pages=871-883
url = sub + (if sub.indexOf('?') is -1 then '?' else '&') + openurl
url = url.split('snc.idm.oclc.org/login?url=')[1] if url.indexOf('snc.idm.oclc.org/login?url=') isnt -1
url = url.replace('cache=true','')
if subtype is 'sfx' or sub.indexOf('sfx.') isnt -1 and url.indexOf('=10.') isnt -1
url = url.replace('=10.','=doi:10.')
if subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1 and url.indexOf('doi=10.') isnt -1
url = url.replace 'doi=10.', 'ID=doi:10.'
pg = ''
spg = ''
error = false
res.lookups.push url
try
# proxy may still be required if our main machine was registered with some of these ILL service providers...
pg = if url.indexOf('.xml.serialssolutions') isnt -1 or url.indexOf('sfx.response_type=simplexml') isnt -1 or url.indexOf('response_type=xml') isnt -1 then await @fetch(url) else await @puppet url
spg = if pg.indexOf('<body') isnt -1 then pg.toLowerCase().split('<body')[1].split('</body')[0] else pg
res.contents.push spg
catch err
error = true
# sfx
# with access:
# https://cricksfx.hosted.exlibrisgroup.com/crick?sid=Elsevier:Scopus&_service_type=getFullTxt&issn=00225193&isbn=&volume=467&issue=&spage=7&epage=14&pages=7-14&artnum=&date=2019&id=doi:10.1016%2fj.jtbi.2019.01.031&title=Journal+of+Theoretical+Biology&atitle=Potential+relations+between+post-spliced+introns+and+mature+mRNAs+in+the+Caenorhabditis+elegans+genome&aufirst=S.&auinit=S.&auinit1=S&aulast=Bo
# which will contain a link like:
# <A title="Navigate to target in new window" HREF="javascript:openSFXMenuLink(this, 'basic1', undefined, '_blank');">Go to Journal website at</A>
# but the content can be different on different sfx language pages, so need to find this link via the tag attributes, then trigger it, then get the page it opens
# can test this with 10.1016/j.jtbi.2019.01.031 on instantill page
# note there is also now an sfx xml endpoint that we have found to check
if subtype is 'sfx' or url.indexOf('sfx.') isnt -1
res.error.push 'sfx' if error
if spg.indexOf('getFullTxt') isnt -1 and spg.indexOf('<target_url>') isnt -1
try
# this will get the first target that has a getFullTxt type and has a target_url element with a value in it, or will error
res.url = spg.split('getFullTxt')[1].split('</target>')[0].split('<target_url>')[1].split('</target_url>')[0].trim()
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
return res
else
res.url = undefined
res.findings.sfx = undefined
else
if spg.indexOf('<a title="navigate to target in new window') isnt -1 and spg.split('<a title="navigate to target in new window')[1].split('">')[0].indexOf('basic1') isnt -1
# tried to get the next link after the click through, but was not worth putting more time into it. For now, seems like this will have to do
res.url = url
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
return res
else
res.url = undefined
res.findings.sfx = undefined
# eds
# note eds does need a login, but IP address range is supposed to get round that
# our IP is supposed to be registered with the library as being one of their internal ones so should not need login
# however a curl from our IP to it still does not seem to work - will try with puppeteer to see if it is blocking in other ways
# not sure why the links here are via an oclc login - tested, and we will use without it
# with access:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=RE&aulast=Marx&atitle=Platelet-rich+plasma:+growth+factor+enhancement+for+bone+grafts&id=doi:10.1016/S1079-2104(98)90029-4&title=Oral+Surgery,+Oral+Medicine,+Oral+Pathology,+Oral+Radiology,+and+Endodontology&volume=85&issue=6&date=1998&spage=638&issn=1079-2104
# can be tested on instantill page with 10.1016/S1079-2104(98)90029-4
# without:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=MP&aulast=Newton&atitle=Librarian+roles+in+institutional+repository+data+set+collecting:+outcomes+of+a+research+library+task+force&id=doi:10.1080/01462679.2011.530546
else if subtype is 'eds' or url.indexOf('ebscohost.') isnt -1
res.error.push 'eds' if error
if spg.indexOf('view this ') isnt -1 and pg.indexOf('<a data-auto="menu-link" href="') isnt -1
res.url = url.replace('://','______').split('/')[0].replace('______','://') + pg.split('<a data-auto="menu-link" href="')[1].split('" title="')[0]
res.findings.eds = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'eds'
return res
else
res.url = undefined
# serials solutions
# the HTML source code for the No Results page includes a span element with the class SS_NoResults. This class is only found on the No Results page (confirmed by serialssolutions)
# with:
# https://rx8kl6yf4x.search.serialssolutions.com/?genre=article&issn=14085348&title=Annales%3A%20Series%20Historia%20et%20Sociologia&volume=28&issue=1&date=20180101&atitle=HOW%20TO%20UNDERSTAND%20THE%20WAR%20IN%20SYRIA.&spage=13&PAGES=13-28&AUTHOR=%C5%A0TERBENC%2C%20Primo%C5%BE&&aufirst=&aulast=&sid=EBSCO:aph&pid=
# can test this on instantill page with How to understand the war in Syria - Annales Series Historia et Sociologia 2018
# but the with link has a suppressed link that has to be clicked to get the actual page with the content on it
# <a href="?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick">Click here</a>
# which is the only link with the showsuppressedlinks param and the clickhere content
# then the page with the content link is like:
# https://rx8kl6yf4x.search.serialssolutions.com/?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick
# and the content is found in a link like this:
# <div id="ArticleCL" class="cl">
# <a target="_blank" href="./log?L=RX8KL6YF4X&D=EAP&J=TC0000940997&P=Link&PT=EZProxy&A=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&H=c7306f7121&U=http%3A%2F%2Fwww.ulib.iupui.edu%2Fcgi-bin%2Fproxy.pl%3Furl%3Dhttp%3A%2F%2Fopenurl.ebscohost.com%2Flinksvc%2Flinking.aspx%3Fgenre%3Darticle%26issn%3D1408-5348%26title%3DAnnales%2BSeries%2Bhistoria%2Bet%2Bsociologia%26date%3D2018%26volume%3D28%26issue%3D1%26spage%3D13%26atitle%3DHOW%2BTO%2BUNDERSTAND%2BTHE%2BWAR%2BIN%2BSYRIA.%26aulast%3D%25C5%25A0TERBENC%26aufirst%3DPrimo%C5%BE">Article</a>
# </div>
# without:
# https://rx8kl6yf4x.search.serialssolutions.com/directLink?&atitle=Writing+at+the+Speed+of+Sound%3A+Music+Stenography+and+Recording+beyond+the+Phonograph&author=Pierce%2C+J+Mackenzie&issn=01482076&title=Nineteenth+Century+Music&volume=41&issue=2&date=2017-10-01&spage=121&id=doi:&sid=ProQ_ss&genre=article
# we also have an xml alternative for serials solutions
# see https://journal.code4lib.org/articles/108
else if subtype is 'serialssolutions' or url.indexOf('serialssolutions.') isnt -1
res.error.push 'serialssolutions' if error
if spg.indexOf('<ssopenurl:url type="article">') isnt -1
fnd = spg.split('<ssopenurl:url type="article">')[1].split('</ssopenurl:url>')[0].trim().replace(/&/g, '&') # this gets us something that has an empty accountid param - do we need that for it to work?
if fnd.length
res.url = fnd
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
return res
else
res.url = undefined
res.findings.serials = undefined
# disable journal matching for now until we have time to get it more accurate - some things get journal links but are not subscribed
#else if spg.indexOf('<ssopenurl:result format="journal">') isnt -1
# # we assume if there is a journal result but not a URL that it means the institution has a journal subscription but we don't have a link
# res.journal = true
# res.found = 'serials'
# return res
else
if spg.indexOf('ss_noresults') is -1
try
surl = url.split('?')[0] + '?ShowSupressedLinks' + pg.split('?ShowSupressedLinks')[1].split('">')[0]
npg = await @puppet surl # would this still need proxy?
if npg.indexOf('ArticleCL') isnt -1 and npg.split('DatabaseCL')[0].indexOf('href="./log') isnt -1
res.url = surl.split('?')[0] + npg.split('ArticleCL')[1].split('DatabaseCL')[0].split('href="')[1].split('">')[0].replace(/&/g, '&')
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
return res
else
res.url = undefined
res.findings.serials = undefined
catch
res.error.push 'serialssolutions' if error
else if subtype is 'exlibris' or url.indexOf('.exlibris') isnt -1
res.error.push 'exlibris' if error
if spg.indexOf('full_text_indicator') isnt -1 and spg.split('full_text_indicator')[1].replace('">', '').indexOf('true') is 0 and spg.indexOf('resolution_url') isnt -1
res.url = spg.split('<resolution_url>')[1].split('</resolution_url>')[0].replace(/&/g, '&')
res.findings.exlibris = res.url
res.found = 'exlibris'
return res
return res
| 112681 |
# this should default to a search of ILLs as well... with a restrict
# restrict = @auth.role('openaccessbutton.admin') and this.queryParams.all then [] else [{term:{from:@user?._id}}]
P.svc.oaworks.ill = (opts) -> # only worked on POST with optional auth
if not opts?
opts = @copy @params
if opts.ill
opts.doi = opts.ill
delete opts.ill
opts.metadata ?= await @svc.oaworks.metadata opts
opts.pilot = Date.now() if opts.pilot is true
opts.live = Date.now() if opts.live is true
config = opts.config
try config = JSON.parse config
if typeof config is 'string' or (not config and opts.from)
config = await @fetch 'https://api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
if not config? or JSON.stringify(config) is '{}'
config = await @fetch 'https://dev.api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
config ?= {}
vars = name: 'librarian', details: '' # anywhere to get the user name from config?
ordered = ['title','author','volume','issue','date','pages']
for o of opts
if o is 'metadata'
for m of opts[o]
if m isnt 'email'
opts[m] = opts[o][m]
ordered.push(m) if m not in ordered
delete opts.metadata
else
ordered.push(o) if o not in ordered
for r in ordered
if opts[r]
vars[r] = opts[r]
if r is 'author'
authors = '<p>Authors:<br>'
first = true
ats = []
for a in opts[r]
if a.family
if first
first = false
else
authors += ', '
atidy = a.family + (if a.given then ' ' + a.given else '')
authors += atidy
ats.push atidy
vars[r] = ats
delete opts.author if opts.author? # remove author metadata due to messy provisions causing save issues
vars.illid = opts._id = await @uid()
# such as https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in=in%3A&si0qs=0021-9231&si1in=au%3A&si1op=AND&si2in=kw%3A&si2op=AND&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch
# could be provided as: (unless other params are mandatory)
# https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?si0qs=0021-9231
if config.search and config.search.length and (opts.issn or opts.journal)
if config.search.indexOf('worldcat') isnt -1
su = config.search.split('?')[0] + '?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in='
su += if opts.issn? then 'in%3A' else 'ti%3A'
su += '&si0qs=' + (opts.issn ? opts.journal)
su += '&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch'
else
su = config.search
su += if opts.issn then opts.issn else opts.journal
vars.worldcatsearchurl = su
tmpl = await @svc.oaworks.templates 'instantill_create.html'
tmpl = tmpl.content
if not opts.forwarded and not opts.resolved and (config.email or opts.email)
@mail svc: 'oaworks', vars: vars, template: tmpl, to: (config.email ? opts.email), from: "InstantILL <<EMAIL>>", subject: "ILL request " + opts._id
tmpl = tmpl.replace /Dear.*?\,/, 'Dear <NAME>, here is a copy of what was just sent:'
@waitUntil @mail svc: 'oaworks', vars: vars, template: tmpl, from: "InstantILL <<EMAIL>>", subject: "ILL CREATED " + opts._id, to: '<EMAIL>' # ['<EMAIL>']
return opts
P.svc.oaworks.ill._index = true
P.svc.oaworks.ill.collect = (params) ->
params ?= @copy @params
sid = params.collect # end of the url is an SID
params._id ?= await @uid()
# example AKfycbwPq7xWoTLwnqZHv7gJAwtsHRkreJ1hMJVeeplxDG_MipdIamU6
url = 'https://script.google.com/macros/s/' + sid + '/exec?'
for q of params
url += (if q is '_id' then 'uuid' else q) + '=' + params[q] + '&' if q isnt 'collect'
@waitUntil @fetch url
@waitUntil @svc.rscvd params
return true
P.svc.oaworks.ill.collect._hide = true
P.svc.oaworks.ill.openurl = (config, meta) ->
# Will eventually redirect after reading openurl params passed here, somehow.
# For now a POST of metadata here by a user with an open url registered will build their openurl
config ?= @params.config ? {}
meta ?= @params.meta ? await @svc.oaworks.metadata()
if config.ill_redirect_base_url
config.ill_form ?= config.ill_redirect_base_url
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# add iupui / openURL defaults to config
defaults =
sid: 'sid'
title: 'atitle' # this is what iupui needs (title is also acceptable, but would clash with using title for journal title, which we set below, as iupui do that
doi: 'rft_id' # don't know yet what this should be
pmcid: 'pmcid' # don't know yet what this should be
author: 'aulast' # author should actually be au, but aulast works even if contains the whole author, using aufirst just concatenates
journal: 'title' # this is what iupui needs
page: 'pages' # iupui uses the spage and epage for start and end pages, but pages is allowed in openurl, check if this will work for iupui
published: 'date' # this is what iupui needs, but in format 1991-07-01 - date format may be a problem
year: 'rft.year' # this is what IUPUI uses
for d of defaults
config[d] = defaults[d] if not config[d]
url = ''
url += config.ill_added_params.replace('?','') + '&' if config.ill_added_params
url += config.sid + '=InstantILL&'
for k of meta
v = ''
if k is 'author'
for author in (if Array.isArray(meta.author) then meta.author else [meta.author])
v += ', ' if v.length
v += if typeof author is 'string' then author else if author.family then author.family + (if author.given then ', ' + author.given else '') else JSON.stringify author
else if k in ['doi','pmid','pmc','pmcid','url','journal','title','year','issn','volume','issue','page','crossref_type','publisher','published','notes']
v = meta[k]
url += (if config[k] then config[k] else k) + '=' + encodeURIComponent(v) + '&' if v
if meta.usermetadata
nfield = if config.notes then config.notes else 'notes'
url = url.replace 'usermetadata=true', ''
if url.indexOf(nfield+'=') is -1
url += '&' + nfield + '=The user provided some metadata.'
else
url = url.replace nfield+'=', nfield+'=The user provided some metadata. '
return url.replace '/&&/g', '&'
P.svc.oaworks.ill.subscription = (config, meta) ->
if not config and not meta and (@params.sub or @params.subscription) # assume values are being passed directly on GET request
config = @copy @params
config.subscription = config.sub if config.sub
if @params.meta
meta = @params.meta
delete config.meta
else if config.doi and @keys(config).length is 2
console.log config.doi
meta = await @svc.oaworks.metadata config.doi
delete config.doi
else
meta = @copy config
delete config.doi
config ?= @params.config ? {}
if typeof config is 'string'
config = await @fetch 'https://api.cottagelabs.com/service/oab/ill/config?uid=' + config
if not config? or JSON.stringify(config) is '{}'
config = await @fetch 'https://dev.api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
meta ?= @params.meta
res = {findings:{}, lookups:[], error:[], contents: []}
if config.subscription?
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# need to get their subscriptions link from their config - and need to know how to build the query string for it
openurl = await @svc.oaworks.ill.openurl config, meta
openurl = openurl.replace(config.ill_added_params.replace('?',''),'') if config.ill_added_params
if typeof config.subscription is 'string'
config.subscription = config.subscription.split(',')
if typeof config.subscription_type is 'string'
config.subscription_type = config.subscription_type.split(',')
config.subscription_type ?= []
for s of config.subscription
sub = config.subscription[s]
if typeof sub is 'object'
subtype = sub.type
sub = sub.url
else
subtype = config.subscription_type[s] ? 'unknown'
sub = sub.trim()
if sub
if subtype is 'serialssolutions' or sub.indexOf('serialssolutions') isnt -1 # and sub.indexOf('.xml.') is -1
tid = sub.split('.search')[0]
tid = tid.split('//')[1] if tid.indexOf('//') isnt -1
#bs = if sub.indexOf('://') isnt -1 then sub.split('://')[0] else 'http' # always use http because https on the xml endpoint fails
sub = 'http://' + tid + '.openurl.xml.serialssolutions.com/openurlxml?version=1.0&genre=article&'
else if (subtype is 'sfx' or sub.indexOf('sfx.') isnt -1) and sub.indexOf('sfx.response_type=simplexml') is -1
sub += (if sub.indexOf('?') is -1 then '?' else '&') + 'sfx.response_type=simplexml'
else if (subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1) and sub.indexOf('response_type') is -1
# https://github.com/OAButton/discussion/issues/1793
#sub = 'https://trails-msu.userservices.exlibrisgroup.com/view/uresolver/01TRAILS_MSU/openurl?svc_dat=CTO&response_type=xml&sid=InstantILL&'
sub = sub.split('?')[0] + '?svc_dat=CTO&response_type=xml&sid=InstantILL&'
#ID=doi:10.1108%2FNFS-09-2019-0293&genre=article&atitle=Impact%20of%20processing%20and%20packaging%20on%20the%20quality%20of%20murici%20jelly%20%5BByrsonima%20crassifolia%20(L.)%20rich%5D%20during%20storage.&title=Nutrition%20&%20Food%20Science&issn=00346659&volume=50&issue=5&date=20200901&au=Da%20Cunha,%20Mariana%20Crivelari&spage=871&pages=871-883
url = sub + (if sub.indexOf('?') is -1 then '?' else '&') + openurl
url = url.split('snc.idm.oclc.org/login?url=')[1] if url.indexOf('snc.idm.oclc.org/login?url=') isnt -1
url = url.replace('cache=true','')
if subtype is 'sfx' or sub.indexOf('sfx.') isnt -1 and url.indexOf('=10.') isnt -1
url = url.replace('=10.','=doi:10.')
if subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1 and url.indexOf('doi=10.') isnt -1
url = url.replace 'doi=10.', 'ID=doi:10.'
pg = ''
spg = ''
error = false
res.lookups.push url
try
# proxy may still be required if our main machine was registered with some of these ILL service providers...
pg = if url.indexOf('.xml.serialssolutions') isnt -1 or url.indexOf('sfx.response_type=simplexml') isnt -1 or url.indexOf('response_type=xml') isnt -1 then await @fetch(url) else await @puppet url
spg = if pg.indexOf('<body') isnt -1 then pg.toLowerCase().split('<body')[1].split('</body')[0] else pg
res.contents.push spg
catch err
error = true
# sfx
# with access:
# https://cricksfx.hosted.exlibrisgroup.com/crick?sid=Elsevier:Scopus&_service_type=getFullTxt&issn=00225193&isbn=&volume=467&issue=&spage=7&epage=14&pages=7-14&artnum=&date=2019&id=doi:10.1016%2fj.jtbi.2019.01.031&title=Journal+of+Theoretical+Biology&atitle=Potential+relations+between+post-spliced+introns+and+mature+mRNAs+in+the+Caenorhabditis+elegans+genome&aufirst=S.&auinit=S.&auinit1=S&aulast=Bo
# which will contain a link like:
# <A title="Navigate to target in new window" HREF="javascript:openSFXMenuLink(this, 'basic1', undefined, '_blank');">Go to Journal website at</A>
# but the content can be different on different sfx language pages, so need to find this link via the tag attributes, then trigger it, then get the page it opens
# can test this with 10.1016/j.jtbi.2019.01.031 on instantill page
# note there is also now an sfx xml endpoint that we have found to check
if subtype is 'sfx' or url.indexOf('sfx.') isnt -1
res.error.push 'sfx' if error
if spg.indexOf('getFullTxt') isnt -1 and spg.indexOf('<target_url>') isnt -1
try
# this will get the first target that has a getFullTxt type and has a target_url element with a value in it, or will error
res.url = spg.split('getFullTxt')[1].split('</target>')[0].split('<target_url>')[1].split('</target_url>')[0].trim()
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
return res
else
res.url = undefined
res.findings.sfx = undefined
else
if spg.indexOf('<a title="navigate to target in new window') isnt -1 and spg.split('<a title="navigate to target in new window')[1].split('">')[0].indexOf('basic1') isnt -1
# tried to get the next link after the click through, but was not worth putting more time into it. For now, seems like this will have to do
res.url = url
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
return res
else
res.url = undefined
res.findings.sfx = undefined
# eds
# note eds does need a login, but IP address range is supposed to get round that
# our IP is supposed to be registered with the library as being one of their internal ones so should not need login
# however a curl from our IP to it still does not seem to work - will try with puppeteer to see if it is blocking in other ways
# not sure why the links here are via an oclc login - tested, and we will use without it
# with access:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=RE&aulast=Marx&atitle=Platelet-rich+plasma:+growth+factor+enhancement+for+bone+grafts&id=doi:10.1016/S1079-2104(98)90029-4&title=Oral+Surgery,+Oral+Medicine,+Oral+Pathology,+Oral+Radiology,+and+Endodontology&volume=85&issue=6&date=1998&spage=638&issn=1079-2104
# can be tested on instantill page with 10.1016/S1079-2104(98)90029-4
# without:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=MP&aulast=Newton&atitle=Librarian+roles+in+institutional+repository+data+set+collecting:+outcomes+of+a+research+library+task+force&id=doi:10.1080/01462679.2011.530546
else if subtype is 'eds' or url.indexOf('ebscohost.') isnt -1
res.error.push 'eds' if error
if spg.indexOf('view this ') isnt -1 and pg.indexOf('<a data-auto="menu-link" href="') isnt -1
res.url = url.replace('://','______').split('/')[0].replace('______','://') + pg.split('<a data-auto="menu-link" href="')[1].split('" title="')[0]
res.findings.eds = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'eds'
return res
else
res.url = undefined
# serials solutions
# the HTML source code for the No Results page includes a span element with the class SS_NoResults. This class is only found on the No Results page (confirmed by serialssolutions)
# with:
# https://rx8kl6yf4x.search.serialssolutions.com/?genre=article&issn=14085348&title=Annales%3A%20Series%20Historia%20et%20Sociologia&volume=28&issue=1&date=20180101&atitle=HOW%20TO%20UNDERSTAND%20THE%20WAR%20IN%20SYRIA.&spage=13&PAGES=13-28&AUTHOR=%C5%A0TERBENC%2C%20Primo%C5%BE&&aufirst=&aulast=&sid=EBSCO:aph&pid=
# can test this on instantill page with How to understand the war in Syria - Annales Series Historia et Sociologia 2018
# but the with link has a suppressed link that has to be clicked to get the actual page with the content on it
# <a href="?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick">Click here</a>
# which is the only link with the showsuppressedlinks param and the clickhere content
# then the page with the content link is like:
# https://rx8kl6yf4x.search.serialssolutions.com/?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick
# and the content is found in a link like this:
# <div id="ArticleCL" class="cl">
# <a target="_blank" href="./log?L=RX8KL6YF4X&D=EAP&J=TC0000940997&P=Link&PT=EZProxy&A=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&H=c7306f7121&U=http%3A%2F%2Fwww.ulib.iupui.edu%2Fcgi-bin%2Fproxy.pl%3Furl%3Dhttp%3A%2F%2Fopenurl.ebscohost.com%2Flinksvc%2Flinking.aspx%3Fgenre%3Darticle%26issn%3D1408-5348%26title%3DAnnales%2BSeries%2Bhistoria%2Bet%2Bsociologia%26date%3D2018%26volume%3D28%26issue%3D1%26spage%3D13%26atitle%3DHOW%2BTO%2BUNDERSTAND%2BTHE%2BWAR%2BIN%2BSYRIA.%26aulast%3D%25C5%25A0TERBENC%26aufirst%3DPrimo%C5%BE">Article</a>
# </div>
# without:
# https://rx8kl6yf4x.search.serialssolutions.com/directLink?&atitle=Writing+at+the+Speed+of+Sound%3A+Music+Stenography+and+Recording+beyond+the+Phonograph&author=<NAME>%2C+<NAME>+<NAME>&issn=01482076&title=Nineteenth+Century+Music&volume=41&issue=2&date=2017-10-01&spage=121&id=doi:&sid=ProQ_ss&genre=article
# we also have an xml alternative for serials solutions
# see https://journal.code4lib.org/articles/108
else if subtype is 'serialssolutions' or url.indexOf('serialssolutions.') isnt -1
res.error.push 'serialssolutions' if error
if spg.indexOf('<ssopenurl:url type="article">') isnt -1
fnd = spg.split('<ssopenurl:url type="article">')[1].split('</ssopenurl:url>')[0].trim().replace(/&/g, '&') # this gets us something that has an empty accountid param - do we need that for it to work?
if fnd.length
res.url = fnd
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
return res
else
res.url = undefined
res.findings.serials = undefined
# disable journal matching for now until we have time to get it more accurate - some things get journal links but are not subscribed
#else if spg.indexOf('<ssopenurl:result format="journal">') isnt -1
# # we assume if there is a journal result but not a URL that it means the institution has a journal subscription but we don't have a link
# res.journal = true
# res.found = 'serials'
# return res
else
if spg.indexOf('ss_noresults') is -1
try
surl = url.split('?')[0] + '?ShowSupressedLinks' + pg.split('?ShowSupressedLinks')[1].split('">')[0]
npg = await @puppet surl # would this still need proxy?
if npg.indexOf('ArticleCL') isnt -1 and npg.split('DatabaseCL')[0].indexOf('href="./log') isnt -1
res.url = surl.split('?')[0] + npg.split('ArticleCL')[1].split('DatabaseCL')[0].split('href="')[1].split('">')[0].replace(/&/g, '&')
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
return res
else
res.url = undefined
res.findings.serials = undefined
catch
res.error.push 'serialssolutions' if error
else if subtype is 'exlibris' or url.indexOf('.exlibris') isnt -1
res.error.push 'exlibris' if error
if spg.indexOf('full_text_indicator') isnt -1 and spg.split('full_text_indicator')[1].replace('">', '').indexOf('true') is 0 and spg.indexOf('resolution_url') isnt -1
res.url = spg.split('<resolution_url>')[1].split('</resolution_url>')[0].replace(/&/g, '&')
res.findings.exlibris = res.url
res.found = 'exlibris'
return res
return res
| true |
# this should default to a search of ILLs as well... with a restrict
# restrict = @auth.role('openaccessbutton.admin') and this.queryParams.all then [] else [{term:{from:@user?._id}}]
P.svc.oaworks.ill = (opts) -> # only worked on POST with optional auth
if not opts?
opts = @copy @params
if opts.ill
opts.doi = opts.ill
delete opts.ill
opts.metadata ?= await @svc.oaworks.metadata opts
opts.pilot = Date.now() if opts.pilot is true
opts.live = Date.now() if opts.live is true
config = opts.config
try config = JSON.parse config
if typeof config is 'string' or (not config and opts.from)
config = await @fetch 'https://api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
if not config? or JSON.stringify(config) is '{}'
config = await @fetch 'https://dev.api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
config ?= {}
vars = name: 'librarian', details: '' # anywhere to get the user name from config?
ordered = ['title','author','volume','issue','date','pages']
for o of opts
if o is 'metadata'
for m of opts[o]
if m isnt 'email'
opts[m] = opts[o][m]
ordered.push(m) if m not in ordered
delete opts.metadata
else
ordered.push(o) if o not in ordered
for r in ordered
if opts[r]
vars[r] = opts[r]
if r is 'author'
authors = '<p>Authors:<br>'
first = true
ats = []
for a in opts[r]
if a.family
if first
first = false
else
authors += ', '
atidy = a.family + (if a.given then ' ' + a.given else '')
authors += atidy
ats.push atidy
vars[r] = ats
delete opts.author if opts.author? # remove author metadata due to messy provisions causing save issues
vars.illid = opts._id = await @uid()
# such as https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in=in%3A&si0qs=0021-9231&si1in=au%3A&si1op=AND&si2in=kw%3A&si2op=AND&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch
# could be provided as: (unless other params are mandatory)
# https://ambslibrary.share.worldcat.org/wms/cmnd/nd/discover/items/search?si0qs=0021-9231
if config.search and config.search.length and (opts.issn or opts.journal)
if config.search.indexOf('worldcat') isnt -1
su = config.search.split('?')[0] + '?ai0id=level3&ai0type=scope&offset=1&pageSize=10&si0in='
su += if opts.issn? then 'in%3A' else 'ti%3A'
su += '&si0qs=' + (opts.issn ? opts.journal)
su += '&sortDirection=descending&sortKey=librarycount&applicationId=nd&requestType=search&searchType=advancedsearch&eventSource=df-advancedsearch'
else
su = config.search
su += if opts.issn then opts.issn else opts.journal
vars.worldcatsearchurl = su
tmpl = await @svc.oaworks.templates 'instantill_create.html'
tmpl = tmpl.content
if not opts.forwarded and not opts.resolved and (config.email or opts.email)
@mail svc: 'oaworks', vars: vars, template: tmpl, to: (config.email ? opts.email), from: "InstantILL <PI:EMAIL:<EMAIL>END_PI>", subject: "ILL request " + opts._id
tmpl = tmpl.replace /Dear.*?\,/, 'Dear PI:NAME:<NAME>END_PI, here is a copy of what was just sent:'
@waitUntil @mail svc: 'oaworks', vars: vars, template: tmpl, from: "InstantILL <PI:EMAIL:<EMAIL>END_PI>", subject: "ILL CREATED " + opts._id, to: 'PI:EMAIL:<EMAIL>END_PI' # ['PI:EMAIL:<EMAIL>END_PI']
return opts
P.svc.oaworks.ill._index = true
P.svc.oaworks.ill.collect = (params) ->
params ?= @copy @params
sid = params.collect # end of the url is an SID
params._id ?= await @uid()
# example AKfycbwPq7xWoTLwnqZHv7gJAwtsHRkreJ1hMJVeeplxDG_MipdIamU6
url = 'https://script.google.com/macros/s/' + sid + '/exec?'
for q of params
url += (if q is '_id' then 'uuid' else q) + '=' + params[q] + '&' if q isnt 'collect'
@waitUntil @fetch url
@waitUntil @svc.rscvd params
return true
P.svc.oaworks.ill.collect._hide = true
P.svc.oaworks.ill.openurl = (config, meta) ->
# Will eventually redirect after reading openurl params passed here, somehow.
# For now a POST of metadata here by a user with an open url registered will build their openurl
config ?= @params.config ? {}
meta ?= @params.meta ? await @svc.oaworks.metadata()
if config.ill_redirect_base_url
config.ill_form ?= config.ill_redirect_base_url
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# add iupui / openURL defaults to config
defaults =
sid: 'sid'
title: 'atitle' # this is what iupui needs (title is also acceptable, but would clash with using title for journal title, which we set below, as iupui do that
doi: 'rft_id' # don't know yet what this should be
pmcid: 'pmcid' # don't know yet what this should be
author: 'aulast' # author should actually be au, but aulast works even if contains the whole author, using aufirst just concatenates
journal: 'title' # this is what iupui needs
page: 'pages' # iupui uses the spage and epage for start and end pages, but pages is allowed in openurl, check if this will work for iupui
published: 'date' # this is what iupui needs, but in format 1991-07-01 - date format may be a problem
year: 'rft.year' # this is what IUPUI uses
for d of defaults
config[d] = defaults[d] if not config[d]
url = ''
url += config.ill_added_params.replace('?','') + '&' if config.ill_added_params
url += config.sid + '=InstantILL&'
for k of meta
v = ''
if k is 'author'
for author in (if Array.isArray(meta.author) then meta.author else [meta.author])
v += ', ' if v.length
v += if typeof author is 'string' then author else if author.family then author.family + (if author.given then ', ' + author.given else '') else JSON.stringify author
else if k in ['doi','pmid','pmc','pmcid','url','journal','title','year','issn','volume','issue','page','crossref_type','publisher','published','notes']
v = meta[k]
url += (if config[k] then config[k] else k) + '=' + encodeURIComponent(v) + '&' if v
if meta.usermetadata
nfield = if config.notes then config.notes else 'notes'
url = url.replace 'usermetadata=true', ''
if url.indexOf(nfield+'=') is -1
url += '&' + nfield + '=The user provided some metadata.'
else
url = url.replace nfield+'=', nfield+'=The user provided some metadata. '
return url.replace '/&&/g', '&'
P.svc.oaworks.ill.subscription = (config, meta) ->
if not config and not meta and (@params.sub or @params.subscription) # assume values are being passed directly on GET request
config = @copy @params
config.subscription = config.sub if config.sub
if @params.meta
meta = @params.meta
delete config.meta
else if config.doi and @keys(config).length is 2
console.log config.doi
meta = await @svc.oaworks.metadata config.doi
delete config.doi
else
meta = @copy config
delete config.doi
config ?= @params.config ? {}
if typeof config is 'string'
config = await @fetch 'https://api.cottagelabs.com/service/oab/ill/config?uid=' + config
if not config? or JSON.stringify(config) is '{}'
config = await @fetch 'https://dev.api.cottagelabs.com/service/oab/ill/config?uid=' + (opts.from ? config)
meta ?= @params.meta
res = {findings:{}, lookups:[], error:[], contents: []}
if config.subscription?
if config.ill_redirect_params
config.ill_added_params ?= config.ill_redirect_params
# need to get their subscriptions link from their config - and need to know how to build the query string for it
openurl = await @svc.oaworks.ill.openurl config, meta
openurl = openurl.replace(config.ill_added_params.replace('?',''),'') if config.ill_added_params
if typeof config.subscription is 'string'
config.subscription = config.subscription.split(',')
if typeof config.subscription_type is 'string'
config.subscription_type = config.subscription_type.split(',')
config.subscription_type ?= []
for s of config.subscription
sub = config.subscription[s]
if typeof sub is 'object'
subtype = sub.type
sub = sub.url
else
subtype = config.subscription_type[s] ? 'unknown'
sub = sub.trim()
if sub
if subtype is 'serialssolutions' or sub.indexOf('serialssolutions') isnt -1 # and sub.indexOf('.xml.') is -1
tid = sub.split('.search')[0]
tid = tid.split('//')[1] if tid.indexOf('//') isnt -1
#bs = if sub.indexOf('://') isnt -1 then sub.split('://')[0] else 'http' # always use http because https on the xml endpoint fails
sub = 'http://' + tid + '.openurl.xml.serialssolutions.com/openurlxml?version=1.0&genre=article&'
else if (subtype is 'sfx' or sub.indexOf('sfx.') isnt -1) and sub.indexOf('sfx.response_type=simplexml') is -1
sub += (if sub.indexOf('?') is -1 then '?' else '&') + 'sfx.response_type=simplexml'
else if (subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1) and sub.indexOf('response_type') is -1
# https://github.com/OAButton/discussion/issues/1793
#sub = 'https://trails-msu.userservices.exlibrisgroup.com/view/uresolver/01TRAILS_MSU/openurl?svc_dat=CTO&response_type=xml&sid=InstantILL&'
sub = sub.split('?')[0] + '?svc_dat=CTO&response_type=xml&sid=InstantILL&'
#ID=doi:10.1108%2FNFS-09-2019-0293&genre=article&atitle=Impact%20of%20processing%20and%20packaging%20on%20the%20quality%20of%20murici%20jelly%20%5BByrsonima%20crassifolia%20(L.)%20rich%5D%20during%20storage.&title=Nutrition%20&%20Food%20Science&issn=00346659&volume=50&issue=5&date=20200901&au=Da%20Cunha,%20Mariana%20Crivelari&spage=871&pages=871-883
url = sub + (if sub.indexOf('?') is -1 then '?' else '&') + openurl
url = url.split('snc.idm.oclc.org/login?url=')[1] if url.indexOf('snc.idm.oclc.org/login?url=') isnt -1
url = url.replace('cache=true','')
if subtype is 'sfx' or sub.indexOf('sfx.') isnt -1 and url.indexOf('=10.') isnt -1
url = url.replace('=10.','=doi:10.')
if subtype is 'exlibris' or sub.indexOf('.exlibris') isnt -1 and url.indexOf('doi=10.') isnt -1
url = url.replace 'doi=10.', 'ID=doi:10.'
pg = ''
spg = ''
error = false
res.lookups.push url
try
# proxy may still be required if our main machine was registered with some of these ILL service providers...
pg = if url.indexOf('.xml.serialssolutions') isnt -1 or url.indexOf('sfx.response_type=simplexml') isnt -1 or url.indexOf('response_type=xml') isnt -1 then await @fetch(url) else await @puppet url
spg = if pg.indexOf('<body') isnt -1 then pg.toLowerCase().split('<body')[1].split('</body')[0] else pg
res.contents.push spg
catch err
error = true
# sfx
# with access:
# https://cricksfx.hosted.exlibrisgroup.com/crick?sid=Elsevier:Scopus&_service_type=getFullTxt&issn=00225193&isbn=&volume=467&issue=&spage=7&epage=14&pages=7-14&artnum=&date=2019&id=doi:10.1016%2fj.jtbi.2019.01.031&title=Journal+of+Theoretical+Biology&atitle=Potential+relations+between+post-spliced+introns+and+mature+mRNAs+in+the+Caenorhabditis+elegans+genome&aufirst=S.&auinit=S.&auinit1=S&aulast=Bo
# which will contain a link like:
# <A title="Navigate to target in new window" HREF="javascript:openSFXMenuLink(this, 'basic1', undefined, '_blank');">Go to Journal website at</A>
# but the content can be different on different sfx language pages, so need to find this link via the tag attributes, then trigger it, then get the page it opens
# can test this with 10.1016/j.jtbi.2019.01.031 on instantill page
# note there is also now an sfx xml endpoint that we have found to check
if subtype is 'sfx' or url.indexOf('sfx.') isnt -1
res.error.push 'sfx' if error
if spg.indexOf('getFullTxt') isnt -1 and spg.indexOf('<target_url>') isnt -1
try
# this will get the first target that has a getFullTxt type and has a target_url element with a value in it, or will error
res.url = spg.split('getFullTxt')[1].split('</target>')[0].split('<target_url>')[1].split('</target_url>')[0].trim()
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
return res
else
res.url = undefined
res.findings.sfx = undefined
else
if spg.indexOf('<a title="navigate to target in new window') isnt -1 and spg.split('<a title="navigate to target in new window')[1].split('">')[0].indexOf('basic1') isnt -1
# tried to get the next link after the click through, but was not worth putting more time into it. For now, seems like this will have to do
res.url = url
res.findings.sfx = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'sfx'
return res
else
res.url = undefined
res.findings.sfx = undefined
# eds
# note eds does need a login, but IP address range is supposed to get round that
# our IP is supposed to be registered with the library as being one of their internal ones so should not need login
# however a curl from our IP to it still does not seem to work - will try with puppeteer to see if it is blocking in other ways
# not sure why the links here are via an oclc login - tested, and we will use without it
# with access:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=RE&aulast=Marx&atitle=Platelet-rich+plasma:+growth+factor+enhancement+for+bone+grafts&id=doi:10.1016/S1079-2104(98)90029-4&title=Oral+Surgery,+Oral+Medicine,+Oral+Pathology,+Oral+Radiology,+and+Endodontology&volume=85&issue=6&date=1998&spage=638&issn=1079-2104
# can be tested on instantill page with 10.1016/S1079-2104(98)90029-4
# without:
# https://snc.idm.oclc.org/login?url=http://resolver.ebscohost.com/openurl?sid=google&auinit=MP&aulast=Newton&atitle=Librarian+roles+in+institutional+repository+data+set+collecting:+outcomes+of+a+research+library+task+force&id=doi:10.1080/01462679.2011.530546
else if subtype is 'eds' or url.indexOf('ebscohost.') isnt -1
res.error.push 'eds' if error
if spg.indexOf('view this ') isnt -1 and pg.indexOf('<a data-auto="menu-link" href="') isnt -1
res.url = url.replace('://','______').split('/')[0].replace('______','://') + pg.split('<a data-auto="menu-link" href="')[1].split('" title="')[0]
res.findings.eds = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'eds'
return res
else
res.url = undefined
# serials solutions
# the HTML source code for the No Results page includes a span element with the class SS_NoResults. This class is only found on the No Results page (confirmed by serialssolutions)
# with:
# https://rx8kl6yf4x.search.serialssolutions.com/?genre=article&issn=14085348&title=Annales%3A%20Series%20Historia%20et%20Sociologia&volume=28&issue=1&date=20180101&atitle=HOW%20TO%20UNDERSTAND%20THE%20WAR%20IN%20SYRIA.&spage=13&PAGES=13-28&AUTHOR=%C5%A0TERBENC%2C%20Primo%C5%BE&&aufirst=&aulast=&sid=EBSCO:aph&pid=
# can test this on instantill page with How to understand the war in Syria - Annales Series Historia et Sociologia 2018
# but the with link has a suppressed link that has to be clicked to get the actual page with the content on it
# <a href="?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick">Click here</a>
# which is the only link with the showsuppressedlinks param and the clickhere content
# then the page with the content link is like:
# https://rx8kl6yf4x.search.serialssolutions.com/?ShowSupressedLinks=yes&SS_LibHash=RX8KL6YF4X&url_ver=Z39.88-2004&rfr_id=info:sid/sersol:RefinerQuery&rft_val_fmt=info:ofi/fmt:kev:mtx:journal&SS_ReferentFormat=JournalFormat&SS_formatselector=radio&rft.genre=article&SS_genreselector=1&rft.aulast=%C5%A0TERBENC&rft.aufirst=Primo%C5%BE&rft.date=2018-01-01&rft.issue=1&rft.volume=28&rft.atitle=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&rft.spage=13&rft.title=Annales%3A+Series+Historia+et+Sociologia&rft.issn=1408-5348&SS_issnh=1408-5348&rft.isbn=&SS_isbnh=&rft.au=%C5%A0TERBENC%2C+Primo%C5%BE&rft.pub=Zgodovinsko+dru%C5%A1tvo+za+ju%C5%BEno+Primorsko¶mdict=en-US&SS_PostParamDict=disableOneClick
# and the content is found in a link like this:
# <div id="ArticleCL" class="cl">
# <a target="_blank" href="./log?L=RX8KL6YF4X&D=EAP&J=TC0000940997&P=Link&PT=EZProxy&A=HOW+TO+UNDERSTAND+THE+WAR+IN+SYRIA.&H=c7306f7121&U=http%3A%2F%2Fwww.ulib.iupui.edu%2Fcgi-bin%2Fproxy.pl%3Furl%3Dhttp%3A%2F%2Fopenurl.ebscohost.com%2Flinksvc%2Flinking.aspx%3Fgenre%3Darticle%26issn%3D1408-5348%26title%3DAnnales%2BSeries%2Bhistoria%2Bet%2Bsociologia%26date%3D2018%26volume%3D28%26issue%3D1%26spage%3D13%26atitle%3DHOW%2BTO%2BUNDERSTAND%2BTHE%2BWAR%2BIN%2BSYRIA.%26aulast%3D%25C5%25A0TERBENC%26aufirst%3DPrimo%C5%BE">Article</a>
# </div>
# without:
# https://rx8kl6yf4x.search.serialssolutions.com/directLink?&atitle=Writing+at+the+Speed+of+Sound%3A+Music+Stenography+and+Recording+beyond+the+Phonograph&author=PI:NAME:<NAME>END_PI%2C+PI:NAME:<NAME>END_PI+PI:NAME:<NAME>END_PI&issn=01482076&title=Nineteenth+Century+Music&volume=41&issue=2&date=2017-10-01&spage=121&id=doi:&sid=ProQ_ss&genre=article
# we also have an xml alternative for serials solutions
# see https://journal.code4lib.org/articles/108
else if subtype is 'serialssolutions' or url.indexOf('serialssolutions.') isnt -1
res.error.push 'serialssolutions' if error
if spg.indexOf('<ssopenurl:url type="article">') isnt -1
fnd = spg.split('<ssopenurl:url type="article">')[1].split('</ssopenurl:url>')[0].trim().replace(/&/g, '&') # this gets us something that has an empty accountid param - do we need that for it to work?
if fnd.length
res.url = fnd
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
return res
else
res.url = undefined
res.findings.serials = undefined
# disable journal matching for now until we have time to get it more accurate - some things get journal links but are not subscribed
#else if spg.indexOf('<ssopenurl:result format="journal">') isnt -1
# # we assume if there is a journal result but not a URL that it means the institution has a journal subscription but we don't have a link
# res.journal = true
# res.found = 'serials'
# return res
else
if spg.indexOf('ss_noresults') is -1
try
surl = url.split('?')[0] + '?ShowSupressedLinks' + pg.split('?ShowSupressedLinks')[1].split('">')[0]
npg = await @puppet surl # would this still need proxy?
if npg.indexOf('ArticleCL') isnt -1 and npg.split('DatabaseCL')[0].indexOf('href="./log') isnt -1
res.url = surl.split('?')[0] + npg.split('ArticleCL')[1].split('DatabaseCL')[0].split('href="')[1].split('">')[0].replace(/&/g, '&')
res.findings.serials = res.url
if res.url?
if res.url.indexOf('getitnow') is -1
res.found = 'serials'
return res
else
res.url = undefined
res.findings.serials = undefined
catch
res.error.push 'serialssolutions' if error
else if subtype is 'exlibris' or url.indexOf('.exlibris') isnt -1
res.error.push 'exlibris' if error
if spg.indexOf('full_text_indicator') isnt -1 and spg.split('full_text_indicator')[1].replace('">', '').indexOf('true') is 0 and spg.indexOf('resolution_url') isnt -1
res.url = spg.split('<resolution_url>')[1].split('</resolution_url>')[0].replace(/&/g, '&')
res.findings.exlibris = res.url
res.found = 'exlibris'
return res
return res
|
[
{
"context": "r()', () ->\n data = null\n example = \n name: \"Simple name\"\n description: \"Very clear description\"\n re",
"end": 199,
"score": 0.8866887092590332,
"start": 188,
"tag": "NAME",
"value": "Simple name"
},
{
"context": " before () -> \n example = \n... | test/unit/example-to-http-payload-pair-test.coffee | HBOCodeLabs/dredd | 0 | {assert} = require 'chai'
exampleToHttpPayloadPairs = require '../../src/example-to-http-payload-pair'
describe 'exampleToHttpPayloadPair()', () ->
data = null
example =
name: "Simple name"
description: "Very clear description"
requests: [
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
]
responses: [
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
]
before () ->
data = exampleToHttpPayloadPairs example
it 'should return an object', () ->
assert.isObject data
it 'should set response status', () ->
assert.isNotNull data['pair']['response']['status']
describe 'when single request and response per example', () ->
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return no warnings', () ->
assert.equal data['errors'].length, 0
it 'should return example request and response pair', () ->
assert.notEqual Object.keys(data['pair']), 0
describe 'when response schema is empty string', () ->
before () ->
example['responses'][0]['schema'] = ""
data = exampleToHttpPayloadPairs example
it 'should remove schema key from response', () ->
assert.isUndefined data['pair']['response']['schema']
describe 'when response schema is not empty string', () ->
before () ->
example['responses'][0]['schema'] = "{}"
data = exampleToHttpPayloadPairs example
it 'should add schema key to response', () ->
assert.isDefined data['pair']['response']['schema']
describe 'when multiple requests per example', () ->
before () ->
example =
name: "Simple name"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}, {
name: "Text request"
headers:
'Content-Type': 'text/plain'
'Accept': 'text/plain'
body: 'Foo is bar'
}
]
responses: [
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "Multiple requests, using first."
assert.include warning, text
describe 'returned payload pair', () ->
it 'should contain first request', () ->
assert.equal example['requests'][0]['body'], data['pair']['request']['body']
describe 'when multiple responses per example', () ->
before () ->
example =
name: "Simple name"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}
]
responses: [
{
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
},
{
name: '404'
headers:
'Content-Type': 'application/json'
body: '{"message": "Not found"}'
}
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "Multiple responses, using first."
assert.include warning, text
describe 'returned payload pair', () ->
it 'should contain first response', () ->
assert.equal example['responses'][0]['body'], data['pair']['response']['body']
describe 'when no request', () ->
before () ->
example =
name: "Simple name"
description: "Very clear description"
requests: []
responses: [
{
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
}
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return no warnings', () ->
assert.equal data['warnings'].length, 0
describe 'returned payload pair request', () ->
request = {}
before () ->
request = data['pair']['request']
it 'should have body with empty string', () ->
assert.equal request['body'], ''
describe 'when no response', () ->
before () ->
example =
name: "Simple name"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}
]
responses: [
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "No response available."
assert.include warning, text
it 'should not return any response pair', () ->
assert.deepEqual data['pair'], {}
| 157694 | {assert} = require 'chai'
exampleToHttpPayloadPairs = require '../../src/example-to-http-payload-pair'
describe 'exampleToHttpPayloadPair()', () ->
data = null
example =
name: "<NAME>"
description: "Very clear description"
requests: [
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
]
responses: [
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
]
before () ->
data = exampleToHttpPayloadPairs example
it 'should return an object', () ->
assert.isObject data
it 'should set response status', () ->
assert.isNotNull data['pair']['response']['status']
describe 'when single request and response per example', () ->
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return no warnings', () ->
assert.equal data['errors'].length, 0
it 'should return example request and response pair', () ->
assert.notEqual Object.keys(data['pair']), 0
describe 'when response schema is empty string', () ->
before () ->
example['responses'][0]['schema'] = ""
data = exampleToHttpPayloadPairs example
it 'should remove schema key from response', () ->
assert.isUndefined data['pair']['response']['schema']
describe 'when response schema is not empty string', () ->
before () ->
example['responses'][0]['schema'] = "{}"
data = exampleToHttpPayloadPairs example
it 'should add schema key to response', () ->
assert.isDefined data['pair']['response']['schema']
describe 'when multiple requests per example', () ->
before () ->
example =
name: "<NAME>"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}, {
name: "Text request"
headers:
'Content-Type': 'text/plain'
'Accept': 'text/plain'
body: 'Foo is bar'
}
]
responses: [
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "Multiple requests, using first."
assert.include warning, text
describe 'returned payload pair', () ->
it 'should contain first request', () ->
assert.equal example['requests'][0]['body'], data['pair']['request']['body']
describe 'when multiple responses per example', () ->
before () ->
example =
name: "Simple name"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}
]
responses: [
{
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
},
{
name: '404'
headers:
'Content-Type': 'application/json'
body: '{"message": "Not found"}'
}
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "Multiple responses, using first."
assert.include warning, text
describe 'returned payload pair', () ->
it 'should contain first response', () ->
assert.equal example['responses'][0]['body'], data['pair']['response']['body']
describe 'when no request', () ->
before () ->
example =
name: "<NAME>"
description: "Very clear description"
requests: []
responses: [
{
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
}
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return no warnings', () ->
assert.equal data['warnings'].length, 0
describe 'returned payload pair request', () ->
request = {}
before () ->
request = data['pair']['request']
it 'should have body with empty string', () ->
assert.equal request['body'], ''
describe 'when no response', () ->
before () ->
example =
name: "<NAME>"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}
]
responses: [
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "No response available."
assert.include warning, text
it 'should not return any response pair', () ->
assert.deepEqual data['pair'], {}
| true | {assert} = require 'chai'
exampleToHttpPayloadPairs = require '../../src/example-to-http-payload-pair'
describe 'exampleToHttpPayloadPair()', () ->
data = null
example =
name: "PI:NAME:<NAME>END_PI"
description: "Very clear description"
requests: [
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
]
responses: [
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
]
before () ->
data = exampleToHttpPayloadPairs example
it 'should return an object', () ->
assert.isObject data
it 'should set response status', () ->
assert.isNotNull data['pair']['response']['status']
describe 'when single request and response per example', () ->
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return no warnings', () ->
assert.equal data['errors'].length, 0
it 'should return example request and response pair', () ->
assert.notEqual Object.keys(data['pair']), 0
describe 'when response schema is empty string', () ->
before () ->
example['responses'][0]['schema'] = ""
data = exampleToHttpPayloadPairs example
it 'should remove schema key from response', () ->
assert.isUndefined data['pair']['response']['schema']
describe 'when response schema is not empty string', () ->
before () ->
example['responses'][0]['schema'] = "{}"
data = exampleToHttpPayloadPairs example
it 'should add schema key to response', () ->
assert.isDefined data['pair']['response']['schema']
describe 'when multiple requests per example', () ->
before () ->
example =
name: "PI:NAME:<NAME>END_PI"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}, {
name: "Text request"
headers:
'Content-Type': 'text/plain'
'Accept': 'text/plain'
body: 'Foo is bar'
}
]
responses: [
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "Multiple requests, using first."
assert.include warning, text
describe 'returned payload pair', () ->
it 'should contain first request', () ->
assert.equal example['requests'][0]['body'], data['pair']['request']['body']
describe 'when multiple responses per example', () ->
before () ->
example =
name: "Simple name"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}
]
responses: [
{
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
},
{
name: '404'
headers:
'Content-Type': 'application/json'
body: '{"message": "Not found"}'
}
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "Multiple responses, using first."
assert.include warning, text
describe 'returned payload pair', () ->
it 'should contain first response', () ->
assert.equal example['responses'][0]['body'], data['pair']['response']['body']
describe 'when no request', () ->
before () ->
example =
name: "PI:NAME:<NAME>END_PI"
description: "Very clear description"
requests: []
responses: [
{
name: '200'
headers:
'Content-Type': 'application/json'
body: '{"foo": "bar"}'
}
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return no warnings', () ->
assert.equal data['warnings'].length, 0
describe 'returned payload pair request', () ->
request = {}
before () ->
request = data['pair']['request']
it 'should have body with empty string', () ->
assert.equal request['body'], ''
describe 'when no response', () ->
before () ->
example =
name: "PI:NAME:<NAME>END_PI"
description: "Very clear description"
requests: [
{
name: "JSON request"
headers:
'Content-Type': 'application/json'
'Accept': 'application/json'
body: '{"foo": "bar"}'
}
]
responses: [
]
data = exampleToHttpPayloadPairs example
it 'should return no error', () ->
assert.equal data['errors'].length, 0
it 'should return some warnings', () ->
assert.equal data['warnings'].length, 1
describe 'returned warning', () ->
warning = ''
before () ->
warning = data['warnings'][data['warnings'].length - 1]
it 'sohuld contain proper text', () ->
text = "No response available."
assert.include warning, text
it 'should not return any response pair', () ->
assert.deepEqual data['pair'], {}
|
[
{
"context": "###\n# Copyright jtlebi.fr <admin@jtlebi.fr> and other contributors.",
"end": 17,
"score": 0.9726555347442627,
"start": 16,
"tag": "EMAIL",
"value": "j"
},
{
"context": "###\n# Copyright jtlebi.fr <admin@jtlebi.fr> and other contributors.\n#\n# ",
"end": 22,
"sc... | src/lib/configurator.coffee | yadutaf/Weathermap-archive | 1 | ###
# Copyright jtlebi.fr <admin@jtlebi.fr> and other contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
###
# This file abstracts config files load. Only JSON is supported at the moment
# but it allows comments to be included to ease the configuration. We also
# watch the file so that it is automagically reloaded every time a change is done
# Each time config is reloaded, a callback will be called for
# further processing
Fs = require 'fs'
parse = (blob) ->
# Remove commnts to get *valid* json
blob = blob.replace(/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/gm,"").replace(/#.*/g,"").replace(/\/\/.*/g,"")
# Try to parse it
try
config = JSON.parse blob
catch e
console.error "There is a syntax error in your config file. Not reloading !"
console.error e.message
false
loadFile = (path, cb, sync=false) ->
if sync
data = Fs.readFileSync path
cb parse data.toString()
else
Fs.readFile path, (err, data) ->
cb parse data.toString() if not err else cb false
module.exports = (path, cb) ->
Fs.watchFile path, (c, p) =>
if c.mtime < p.mtime #reload only if modified
loadFile path, cb
loadFile path, cb, true
| 110657 | ###
# Copyright <EMAIL>tlebi<EMAIL>.fr <<EMAIL>> and other contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
###
# This file abstracts config files load. Only JSON is supported at the moment
# but it allows comments to be included to ease the configuration. We also
# watch the file so that it is automagically reloaded every time a change is done
# Each time config is reloaded, a callback will be called for
# further processing
Fs = require 'fs'
parse = (blob) ->
# Remove commnts to get *valid* json
blob = blob.replace(/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/gm,"").replace(/#.*/g,"").replace(/\/\/.*/g,"")
# Try to parse it
try
config = JSON.parse blob
catch e
console.error "There is a syntax error in your config file. Not reloading !"
console.error e.message
false
loadFile = (path, cb, sync=false) ->
if sync
data = Fs.readFileSync path
cb parse data.toString()
else
Fs.readFile path, (err, data) ->
cb parse data.toString() if not err else cb false
module.exports = (path, cb) ->
Fs.watchFile path, (c, p) =>
if c.mtime < p.mtime #reload only if modified
loadFile path, cb
loadFile path, cb, true
| true | ###
# Copyright PI:EMAIL:<EMAIL>END_PItlebiPI:EMAIL:<EMAIL>END_PI.fr <PI:EMAIL:<EMAIL>END_PI> and other contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
###
# This file abstracts config files load. Only JSON is supported at the moment
# but it allows comments to be included to ease the configuration. We also
# watch the file so that it is automagically reloaded every time a change is done
# Each time config is reloaded, a callback will be called for
# further processing
Fs = require 'fs'
parse = (blob) ->
# Remove commnts to get *valid* json
blob = blob.replace(/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/gm,"").replace(/#.*/g,"").replace(/\/\/.*/g,"")
# Try to parse it
try
config = JSON.parse blob
catch e
console.error "There is a syntax error in your config file. Not reloading !"
console.error e.message
false
loadFile = (path, cb, sync=false) ->
if sync
data = Fs.readFileSync path
cb parse data.toString()
else
Fs.readFile path, (err, data) ->
cb parse data.toString() if not err else cb false
module.exports = (path, cb) ->
Fs.watchFile path, (c, p) =>
if c.mtime < p.mtime #reload only if modified
loadFile path, cb
loadFile path, cb, true
|
[
{
"context": "╯°□°)╯︵ ┻━┻)'\n keywords = 'Zorium'\n name = 'Zorium Seed'\n twitterHandle = '@ZoriumJS'\n themeColor =",
"end": 496,
"score": 0.7866171002388,
"start": 485,
"tag": "NAME",
"value": "Zorium Seed"
},
{
"context": "rium'\n name = 'Zorium Seed'\n twitterH... | src/components/head/index.coffee | rachelmcquirk/hiking | 0 | z = require 'zorium'
paperColors = require 'zorium-paper/colors.json'
config = require '../../config'
gulpConfig = require '../../../gulp_config'
module.exports = class Head
render: ({styles, bundlePath, title}) ->
isInliningSource = config.ENV is config.ENVS.PROD
webpackDevHostname = gulpConfig.WEBPACK_DEV_HOSTNAME
webpackDevPort = gulpConfig.WEBPACK_DEV_PORT
title ?= 'Hiking'
description = 'Zorium Seed - (╯°□°)╯︵ ┻━┻)'
keywords = 'Zorium'
name = 'Zorium Seed'
twitterHandle = '@ZoriumJS'
themeColor = paperColors.$cyan700
favicon = '/images/zorium_icon_32.png'
icon1024 = '/images/zorium_icon_1024.png'
icon256 = '/images/zorium_icon_256.png'
url = 'https://zorium.org'
z 'head',
z 'title', "#{title}"
z 'meta', {name: 'description', content: "#{description}"}
z 'meta', {name: 'keywords', content: "#{keywords}"}
# mobile
z 'meta',
name: 'viewport'
content: 'initial-scale=1.0, width=device-width, minimum-scale=1.0,
maximum-scale=1.0, user-scalable=0, minimal-ui'
z 'meta', {name: 'msapplication-tap-highlight', content: 'no'}
z 'meta', {name: 'apple-mobile-web-app-capable', content: 'yes'}
# Schema.org markup for Google+
z 'meta', {itemprop: 'name', content: "#{name}"}
z 'meta', {itemprop: 'description', content: "#{description}"}
z 'meta', {itemprop: 'image', content: "#{icon256}"}
# Twitter card
z 'meta', {name: 'twitter:card', content: 'summary_large_image'}
z 'meta', {name: 'twitter:site', content: "#{twitterHandle}"}
z 'meta', {name: 'twitter:creator', content: "#{twitterHandle}"}
z 'meta', {name: 'twitter:title', content: "#{title}"}
z 'meta', {name: 'twitter:description', content: "#{description}"}
z 'meta', {name: 'twitter:image:src', content: "#{icon1024}"}
# Open Graph
z 'meta', {property: 'og:title', content: "#{name}"}
z 'meta', {property: 'og:type', content: 'website'}
z 'meta', {property: 'og:url', content: "#{url}"}
z 'meta', {property: 'og:image', content: "#{icon1024}"}
z 'meta', {property: 'og:description', content: "#{description}"}
z 'meta', {property: 'og:site_name', content: "#{name}"}
# iOS
z 'link', {rel: 'apple-touch-icon', href: "#{icon256}"}
# misc
z 'meta', {name: 'theme-color', content: "#{themeColor}"}
z 'link', {rel: 'shortcut icon', href: "#{favicon}"}
# fonts
z 'style',
innerHTML: '
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 300;
src:
local("Roboto Light"),
local("Roboto-Light"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfZBw1xU1rKptJj_0jans920.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfbO3LdcAZYWl9Si6vvxL-qU.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfSZ2oysoEQEeKwjgmXLRnTc.ttf) format("truetype");
}
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 400;
src:
local("Roboto"),
local("Roboto-Regular"),
url(https://fonts.gstatic.com/s/roboto/v15/oMMgfZMQthOryQo9n' +
'22dcuvvDin1pK8aKteLpeZ5c0A.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/CrYjSnGjrRCn0pd9V' +
'QsnFOvvDin1pK8aKteLpeZ5c0A.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/QHD8zigcbDB8aPfIo' +
'aupKOvvDin1pK8aKteLpeZ5c0A.ttf) format("truetype");
}
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 500;
src:
local("Roboto Medium"),
local("Roboto-Medium"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUZBw1xU1rKptJj_0jans920.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUbO3LdcAZYWl9Si6vvxL-qU.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUSZ2oysoEQEeKwjgmXLRnTc.ttf) format("truetype");
}
'
# styles
if isInliningSource
z 'style.styles',
innerHTML: styles
else
null
# scripts
z 'script',
src: 'https://maps.googleapis.com/maps/api/js?key=AIzaSyAhdBkbeqCvLyGS41_sZDhZuzlyu45al8w'
z 'script.bundle',
async: true
src: if isInliningSource then bundlePath \
else "//#{webpackDevHostname}:#{webpackDevPort}/bundle.js"
| 166274 | z = require 'zorium'
paperColors = require 'zorium-paper/colors.json'
config = require '../../config'
gulpConfig = require '../../../gulp_config'
module.exports = class Head
render: ({styles, bundlePath, title}) ->
isInliningSource = config.ENV is config.ENVS.PROD
webpackDevHostname = gulpConfig.WEBPACK_DEV_HOSTNAME
webpackDevPort = gulpConfig.WEBPACK_DEV_PORT
title ?= 'Hiking'
description = 'Zorium Seed - (╯°□°)╯︵ ┻━┻)'
keywords = 'Zorium'
name = '<NAME>'
twitterHandle = '@ZoriumJS'
themeColor = paperColors.$cyan700
favicon = '/images/zorium_icon_32.png'
icon1024 = '/images/zorium_icon_1024.png'
icon256 = '/images/zorium_icon_256.png'
url = 'https://zorium.org'
z 'head',
z 'title', "#{title}"
z 'meta', {name: 'description', content: "#{description}"}
z 'meta', {name: 'keywords', content: "#{keywords}"}
# mobile
z 'meta',
name: 'viewport'
content: 'initial-scale=1.0, width=device-width, minimum-scale=1.0,
maximum-scale=1.0, user-scalable=0, minimal-ui'
z 'meta', {name: 'msapplication-tap-highlight', content: 'no'}
z 'meta', {name: 'apple-mobile-web-app-capable', content: 'yes'}
# Schema.org markup for Google+
z 'meta', {itemprop: 'name', content: "#{name}"}
z 'meta', {itemprop: 'description', content: "#{description}"}
z 'meta', {itemprop: 'image', content: "#{icon256}"}
# Twitter card
z 'meta', {name: 'twitter:card', content: 'summary_large_image'}
z 'meta', {name: 'twitter:site', content: "#{twitterHandle}"}
z 'meta', {name: 'twitter:creator', content: "#{twitterHandle}"}
z 'meta', {name: 'twitter:title', content: "#{title}"}
z 'meta', {name: 'twitter:description', content: "#{description}"}
z 'meta', {name: 'twitter:image:src', content: "#{icon1024}"}
# Open Graph
z 'meta', {property: 'og:title', content: "#{name}"}
z 'meta', {property: 'og:type', content: 'website'}
z 'meta', {property: 'og:url', content: "#{url}"}
z 'meta', {property: 'og:image', content: "#{icon1024}"}
z 'meta', {property: 'og:description', content: "#{description}"}
z 'meta', {property: 'og:site_name', content: "#{name}"}
# iOS
z 'link', {rel: 'apple-touch-icon', href: "#{icon256}"}
# misc
z 'meta', {name: 'theme-color', content: "#{themeColor}"}
z 'link', {rel: 'shortcut icon', href: "#{favicon}"}
# fonts
z 'style',
innerHTML: '
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 300;
src:
local("Roboto Light"),
local("Roboto-Light"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfZBw1xU1rKptJj_0jans920.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfbO3LdcAZYWl9Si6vvxL-qU.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfSZ2oysoEQEeKwjgmXLRnTc.ttf) format("truetype");
}
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 400;
src:
local("Roboto"),
local("Roboto-Regular"),
url(https://fonts.gstatic.com/s/roboto/v15/oMMgfZMQthOryQo9n' +
'22dcuvvDin1pK8aKteLpeZ5c0A.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/CrYjSnGjrRCn0pd9V' +
'QsnFOvvDin1pK8aKteLpeZ5c0A.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/QHD8zigcbDB8aPfIo' +
'aupKOvvDin1pK8aKteLpeZ5c0A.ttf) format("truetype");
}
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 500;
src:
local("Roboto Medium"),
local("Roboto-Medium"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUZBw1xU1rKptJj_0jans920.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUbO3LdcAZYWl9Si6vvxL-qU.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUSZ2oysoEQEeKwjgmXLRnTc.ttf) format("truetype");
}
'
# styles
if isInliningSource
z 'style.styles',
innerHTML: styles
else
null
# scripts
z 'script',
src: 'https://maps.googleapis.com/maps/api/js?key=<KEY>'
z 'script.bundle',
async: true
src: if isInliningSource then bundlePath \
else "//#{webpackDevHostname}:#{webpackDevPort}/bundle.js"
| true | z = require 'zorium'
paperColors = require 'zorium-paper/colors.json'
config = require '../../config'
gulpConfig = require '../../../gulp_config'
module.exports = class Head
render: ({styles, bundlePath, title}) ->
isInliningSource = config.ENV is config.ENVS.PROD
webpackDevHostname = gulpConfig.WEBPACK_DEV_HOSTNAME
webpackDevPort = gulpConfig.WEBPACK_DEV_PORT
title ?= 'Hiking'
description = 'Zorium Seed - (╯°□°)╯︵ ┻━┻)'
keywords = 'Zorium'
name = 'PI:NAME:<NAME>END_PI'
twitterHandle = '@ZoriumJS'
themeColor = paperColors.$cyan700
favicon = '/images/zorium_icon_32.png'
icon1024 = '/images/zorium_icon_1024.png'
icon256 = '/images/zorium_icon_256.png'
url = 'https://zorium.org'
z 'head',
z 'title', "#{title}"
z 'meta', {name: 'description', content: "#{description}"}
z 'meta', {name: 'keywords', content: "#{keywords}"}
# mobile
z 'meta',
name: 'viewport'
content: 'initial-scale=1.0, width=device-width, minimum-scale=1.0,
maximum-scale=1.0, user-scalable=0, minimal-ui'
z 'meta', {name: 'msapplication-tap-highlight', content: 'no'}
z 'meta', {name: 'apple-mobile-web-app-capable', content: 'yes'}
# Schema.org markup for Google+
z 'meta', {itemprop: 'name', content: "#{name}"}
z 'meta', {itemprop: 'description', content: "#{description}"}
z 'meta', {itemprop: 'image', content: "#{icon256}"}
# Twitter card
z 'meta', {name: 'twitter:card', content: 'summary_large_image'}
z 'meta', {name: 'twitter:site', content: "#{twitterHandle}"}
z 'meta', {name: 'twitter:creator', content: "#{twitterHandle}"}
z 'meta', {name: 'twitter:title', content: "#{title}"}
z 'meta', {name: 'twitter:description', content: "#{description}"}
z 'meta', {name: 'twitter:image:src', content: "#{icon1024}"}
# Open Graph
z 'meta', {property: 'og:title', content: "#{name}"}
z 'meta', {property: 'og:type', content: 'website'}
z 'meta', {property: 'og:url', content: "#{url}"}
z 'meta', {property: 'og:image', content: "#{icon1024}"}
z 'meta', {property: 'og:description', content: "#{description}"}
z 'meta', {property: 'og:site_name', content: "#{name}"}
# iOS
z 'link', {rel: 'apple-touch-icon', href: "#{icon256}"}
# misc
z 'meta', {name: 'theme-color', content: "#{themeColor}"}
z 'link', {rel: 'shortcut icon', href: "#{favicon}"}
# fonts
z 'style',
innerHTML: '
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 300;
src:
local("Roboto Light"),
local("Roboto-Light"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfZBw1xU1rKptJj_0jans920.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfbO3LdcAZYWl9Si6vvxL-qU.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/Hgo13k-tfSpn0qi1S' +
'FdUfSZ2oysoEQEeKwjgmXLRnTc.ttf) format("truetype");
}
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 400;
src:
local("Roboto"),
local("Roboto-Regular"),
url(https://fonts.gstatic.com/s/roboto/v15/oMMgfZMQthOryQo9n' +
'22dcuvvDin1pK8aKteLpeZ5c0A.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/CrYjSnGjrRCn0pd9V' +
'QsnFOvvDin1pK8aKteLpeZ5c0A.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/QHD8zigcbDB8aPfIo' +
'aupKOvvDin1pK8aKteLpeZ5c0A.ttf) format("truetype");
}
@font-face {
font-family: "Roboto";
font-style: normal;
font-weight: 500;
src:
local("Roboto Medium"),
local("Roboto-Medium"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUZBw1xU1rKptJj_0jans920.woff2) format("woff2"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUbO3LdcAZYWl9Si6vvxL-qU.woff) format("woff"),
url(https://fonts.gstatic.com/s/roboto/v15/RxZJdnzeo3R5zSexg' +
'e8UUSZ2oysoEQEeKwjgmXLRnTc.ttf) format("truetype");
}
'
# styles
if isInliningSource
z 'style.styles',
innerHTML: styles
else
null
# scripts
z 'script',
src: 'https://maps.googleapis.com/maps/api/js?key=PI:KEY:<KEY>END_PI'
z 'script.bundle',
async: true
src: if isInliningSource then bundlePath \
else "//#{webpackDevHostname}:#{webpackDevPort}/bundle.js"
|
[
{
"context": ":\n user: process.env.NODEMAILER_EMAIL\n pass: process.env.NODEMAILER_PASS\n\napp.post '/email', (req, res, next) ->\n ",
"end": 408,
"score": 0.9190142750740051,
"start": 388,
"tag": "PASSWORD",
"value": "process.env.NODEMAIL"
},
{
"context": ".NODEMAILER_EMAIL\... | app.coffee | TerryWareNet/terry-ware-net | 0 | express = require 'express'
bodyParser = require 'body-parser'
nodemailer = require 'nodemailer'
app = express()
app.use bodyParser.json()
app.use bodyParser.urlencoded extended: true
app.use express.static __dirname + '/old-site'
transporter = nodemailer.createTransport
service: process.env.NODEMAILER_SERVICE
secure: true
auth:
user: process.env.NODEMAILER_EMAIL
pass: process.env.NODEMAILER_PASS
app.post '/email', (req, res, next) ->
orderedFields = [ 'Subject', 'CustomerName', 'EmailAddress', 'Comments' ]
text = "Received web inquiry:\n\n"
for field in orderedFields when value = req.body[field]
value = value.replace /\r\n/g, '\n'
if -1 isnt value.indexOf '\n'
value = '\n' + value.replace /^/mg, ' '
text += "#{field}: #{value}\n"
transporter.sendMail
from: process.env.NODEMAILER_EMAIL
to: process.env.NODEMAILER_TO_EMAIL || process.env.NODEMAILER_EMAIL
subject: req.body.subject || 'inquiries'
text: text
(err, info) ->
if err?
console.error "Error sending email: ", err
return res.status(500).send()
console.log "Sent email with req body", req.body
res.redirect req.body.redirect || '/'
app.listen (port = process.env.PORT or 8080), ->
console.log "Listening on port #{port}"
| 34504 | express = require 'express'
bodyParser = require 'body-parser'
nodemailer = require 'nodemailer'
app = express()
app.use bodyParser.json()
app.use bodyParser.urlencoded extended: true
app.use express.static __dirname + '/old-site'
transporter = nodemailer.createTransport
service: process.env.NODEMAILER_SERVICE
secure: true
auth:
user: process.env.NODEMAILER_EMAIL
pass: <PASSWORD>ER_<PASSWORD>
app.post '/email', (req, res, next) ->
orderedFields = [ 'Subject', 'CustomerName', 'EmailAddress', 'Comments' ]
text = "Received web inquiry:\n\n"
for field in orderedFields when value = req.body[field]
value = value.replace /\r\n/g, '\n'
if -1 isnt value.indexOf '\n'
value = '\n' + value.replace /^/mg, ' '
text += "#{field}: #{value}\n"
transporter.sendMail
from: process.env.NODEMAILER_EMAIL
to: process.env.NODEMAILER_TO_EMAIL || process.env.NODEMAILER_EMAIL
subject: req.body.subject || 'inquiries'
text: text
(err, info) ->
if err?
console.error "Error sending email: ", err
return res.status(500).send()
console.log "Sent email with req body", req.body
res.redirect req.body.redirect || '/'
app.listen (port = process.env.PORT or 8080), ->
console.log "Listening on port #{port}"
| true | express = require 'express'
bodyParser = require 'body-parser'
nodemailer = require 'nodemailer'
app = express()
app.use bodyParser.json()
app.use bodyParser.urlencoded extended: true
app.use express.static __dirname + '/old-site'
transporter = nodemailer.createTransport
service: process.env.NODEMAILER_SERVICE
secure: true
auth:
user: process.env.NODEMAILER_EMAIL
pass: PI:PASSWORD:<PASSWORD>END_PIER_PI:PASSWORD:<PASSWORD>END_PI
app.post '/email', (req, res, next) ->
orderedFields = [ 'Subject', 'CustomerName', 'EmailAddress', 'Comments' ]
text = "Received web inquiry:\n\n"
for field in orderedFields when value = req.body[field]
value = value.replace /\r\n/g, '\n'
if -1 isnt value.indexOf '\n'
value = '\n' + value.replace /^/mg, ' '
text += "#{field}: #{value}\n"
transporter.sendMail
from: process.env.NODEMAILER_EMAIL
to: process.env.NODEMAILER_TO_EMAIL || process.env.NODEMAILER_EMAIL
subject: req.body.subject || 'inquiries'
text: text
(err, info) ->
if err?
console.error "Error sending email: ", err
return res.status(500).send()
console.log "Sent email with req body", req.body
res.redirect req.body.redirect || '/'
app.listen (port = process.env.PORT or 8080), ->
console.log "Listening on port #{port}"
|
[
{
"context": "# Copyright 2012, 2014, 2015 Dominik Heier\n#\n# This file is part of coffee-snake.\n#\n# coffee",
"end": 42,
"score": 0.9998148083686829,
"start": 29,
"tag": "NAME",
"value": "Dominik Heier"
}
] | stake.coffee | EmployeeEnrichment/Snake_07.20 | 0 | # Copyright 2012, 2014, 2015 Dominik Heier
#
# This file is part of coffee-snake.
#
# coffee-snake is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Game extends atom.Game
constructor: (h, w, ps) ->
super
atom.input.bind atom.key.LEFT_ARROW, 'move_left'
atom.input.bind atom.key.RIGHT_ARROW, 'move_right'
atom.input.bind atom.key.UP_ARROW, 'move_up'
atom.input.bind atom.key.DOWN_ARROW, 'move_down'
atom.input.bind atom.key.SPACE, 'toggle_pause'
@height = h
@width = w
@pixelsize = ps
# Style the canvas
window.onresize = (e) -> return
canvas_container = document.getElementById('canvas_container')
canvas_container.style.width = @width * @pixelsize + "px"
atom.canvas.style.border = "#fff 1px solid"
atom.canvas.style.position = "relative"
atom.canvas.height = @height * @pixelsize
atom.canvas.width = @width * @pixelsize
#Start the game
@startGame()
startGame: ->
# Initialize
_x = Math.floor(@width / 2)
_y = Math.floor(@height / 2)
@snake = [ [ _x, _y ], [ --_x, _y ], [ --_x, _y ], [ --_x, _y ] ]
@dir = ""
@newdir = "right"
@score = 0
@gstarted = true
@gpaused = false
@food = []
@last_dt = 0.00
@delay = 0.08
@noshow = true
@gpaused = true
[@tx , @ty] = [@width * @pixelsize, @height * @pixelsize]
@genFood() # generate food pixel
@showIntro() # show intro screen
genFood: ->
x = undefined
y = undefined
loop
x = Math.floor(Math.random() * (@width - 1))
y = Math.floor(Math.random() * (@height - 1))
break unless @testCollision(x, y)
@food = [ x, y ]
drawFood: ->
atom.context.beginPath()
atom.context.arc (@food[0] * @pixelsize) + @pixelsize / 2, (@food[1] * @pixelsize) + @pixelsize / 2, @pixelsize / 2, 0, Math.PI * 2, false
atom.context.fill()
drawSnake: ->
i = 0
l = @snake.length
while i < l
x = @snake[i][0]
y = @snake[i][1]
atom.context.fillRect x * @pixelsize, y * @pixelsize, @pixelsize, @pixelsize
i++
testCollision: (x, y) ->
return true if x < 0 or x > @width - 1
return true if y < 0 or y > @height - 1
i = 0
l = @snake.length
while i < l
return true if x is @snake[i][0] and y is @snake[i][1]
i++
false
endGame: ->
@gstarted = false
@noshow = true
atom.context.fillStyle = "#fff"
atom.context.strokeStyle = '#000'
# Game over
[mess, x , y] = ["Game Over", @tx / 2 , @ty / 2.4]
atom.context.font = "bold 30px monospace"
atom.context.textAlign = "center"
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
# score
atom.context.font = "bold 25px monospace"
[mess, x , y] = ["Score: " + @score, @tx / 2 , @ty / 1.7]
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
togglePause: ->
unless @gpaused
@noshow = true
@gpaused = true
[mess, x , y] = ["Paused", @tx / 2, @ty / 2]
atom.context.fillStyle = "#fff"
atom.context.font = "bold 30px monospace"
atom.context.textAlign = "center"
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
else
@gpaused = false
@noshow = false
showIntro: ->
atom.context.fillStyle = "#000000"
atom.context.font = "30px sans-serif"
atom.context.textAlign = "center"
atom.context.textAlign = "left"
atom.context.font = "30px monospace"
atom.context.fillText "Instructions:", 2 * @pixelsize, @ty / 3
atom.context.font = "18px monospace"
atom.context.fillText "Use arrow keys to change direction.", 2 * @pixelsize, @ty / 2.3
atom.context.fillText "Press space to start/pause.", 2 * @pixelsize, @ty / 2.1
atom.context.fillText "Pro-tip: Press space now!", 2 * @pixelsize, @ty / 1.7
update: (dt) ->
# Check keyboard input
if atom.input.pressed 'move_left'
@newdir = "left" unless @dir is "right"
console.log "left"
else if atom.input.pressed 'move_up'
@newdir = "up" unless @dir is "down"
else if atom.input.pressed 'move_right'
@newdir = "right" unless @dir is "left"
else if atom.input.pressed 'move_down'
@newdir = "down" unless @dir is "up"
else if atom.input.pressed 'toggle_pause'
unless @gstarted
@eraseCanvas()
@startGame()
else
@togglePause()
# Slow down the game
if @last_dt < @delay
@last_dt += dt
return
else
@last_dt = 0.00
# Don't do anything if game is paused or stopped
return if not @gstarted or @gpaused
# Update snake
x = @snake[0][0]
y = @snake[0][1]
switch @newdir
when "up"
y--
when "right"
x++
when "down"
y++
when "left"
x--
# Check for collision with self or wall
if @testCollision(x, y)
@endGame()
return
# Move the snake
@snake.unshift [ x, y ]
if x is @food[0] and y is @food[1]
@score++
@genFood()
else
@snake.pop()
@dir = @newdir
eraseCanvas: ->
atom.context.fillStyle = "#000"
atom.context.fillRect 0, 0, @width * @pixelsize, @height * @pixelsize
atom.context.fillStyle = "#fff"
draw: ->
unless @noshow
@eraseCanvas()
@drawFood()
@drawSnake()
game = new Game(15, 20, 30)
game.run()
| 46291 | # Copyright 2012, 2014, 2015 <NAME>
#
# This file is part of coffee-snake.
#
# coffee-snake is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Game extends atom.Game
constructor: (h, w, ps) ->
super
atom.input.bind atom.key.LEFT_ARROW, 'move_left'
atom.input.bind atom.key.RIGHT_ARROW, 'move_right'
atom.input.bind atom.key.UP_ARROW, 'move_up'
atom.input.bind atom.key.DOWN_ARROW, 'move_down'
atom.input.bind atom.key.SPACE, 'toggle_pause'
@height = h
@width = w
@pixelsize = ps
# Style the canvas
window.onresize = (e) -> return
canvas_container = document.getElementById('canvas_container')
canvas_container.style.width = @width * @pixelsize + "px"
atom.canvas.style.border = "#fff 1px solid"
atom.canvas.style.position = "relative"
atom.canvas.height = @height * @pixelsize
atom.canvas.width = @width * @pixelsize
#Start the game
@startGame()
startGame: ->
# Initialize
_x = Math.floor(@width / 2)
_y = Math.floor(@height / 2)
@snake = [ [ _x, _y ], [ --_x, _y ], [ --_x, _y ], [ --_x, _y ] ]
@dir = ""
@newdir = "right"
@score = 0
@gstarted = true
@gpaused = false
@food = []
@last_dt = 0.00
@delay = 0.08
@noshow = true
@gpaused = true
[@tx , @ty] = [@width * @pixelsize, @height * @pixelsize]
@genFood() # generate food pixel
@showIntro() # show intro screen
genFood: ->
x = undefined
y = undefined
loop
x = Math.floor(Math.random() * (@width - 1))
y = Math.floor(Math.random() * (@height - 1))
break unless @testCollision(x, y)
@food = [ x, y ]
drawFood: ->
atom.context.beginPath()
atom.context.arc (@food[0] * @pixelsize) + @pixelsize / 2, (@food[1] * @pixelsize) + @pixelsize / 2, @pixelsize / 2, 0, Math.PI * 2, false
atom.context.fill()
drawSnake: ->
i = 0
l = @snake.length
while i < l
x = @snake[i][0]
y = @snake[i][1]
atom.context.fillRect x * @pixelsize, y * @pixelsize, @pixelsize, @pixelsize
i++
testCollision: (x, y) ->
return true if x < 0 or x > @width - 1
return true if y < 0 or y > @height - 1
i = 0
l = @snake.length
while i < l
return true if x is @snake[i][0] and y is @snake[i][1]
i++
false
endGame: ->
@gstarted = false
@noshow = true
atom.context.fillStyle = "#fff"
atom.context.strokeStyle = '#000'
# Game over
[mess, x , y] = ["Game Over", @tx / 2 , @ty / 2.4]
atom.context.font = "bold 30px monospace"
atom.context.textAlign = "center"
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
# score
atom.context.font = "bold 25px monospace"
[mess, x , y] = ["Score: " + @score, @tx / 2 , @ty / 1.7]
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
togglePause: ->
unless @gpaused
@noshow = true
@gpaused = true
[mess, x , y] = ["Paused", @tx / 2, @ty / 2]
atom.context.fillStyle = "#fff"
atom.context.font = "bold 30px monospace"
atom.context.textAlign = "center"
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
else
@gpaused = false
@noshow = false
showIntro: ->
atom.context.fillStyle = "#000000"
atom.context.font = "30px sans-serif"
atom.context.textAlign = "center"
atom.context.textAlign = "left"
atom.context.font = "30px monospace"
atom.context.fillText "Instructions:", 2 * @pixelsize, @ty / 3
atom.context.font = "18px monospace"
atom.context.fillText "Use arrow keys to change direction.", 2 * @pixelsize, @ty / 2.3
atom.context.fillText "Press space to start/pause.", 2 * @pixelsize, @ty / 2.1
atom.context.fillText "Pro-tip: Press space now!", 2 * @pixelsize, @ty / 1.7
update: (dt) ->
# Check keyboard input
if atom.input.pressed 'move_left'
@newdir = "left" unless @dir is "right"
console.log "left"
else if atom.input.pressed 'move_up'
@newdir = "up" unless @dir is "down"
else if atom.input.pressed 'move_right'
@newdir = "right" unless @dir is "left"
else if atom.input.pressed 'move_down'
@newdir = "down" unless @dir is "up"
else if atom.input.pressed 'toggle_pause'
unless @gstarted
@eraseCanvas()
@startGame()
else
@togglePause()
# Slow down the game
if @last_dt < @delay
@last_dt += dt
return
else
@last_dt = 0.00
# Don't do anything if game is paused or stopped
return if not @gstarted or @gpaused
# Update snake
x = @snake[0][0]
y = @snake[0][1]
switch @newdir
when "up"
y--
when "right"
x++
when "down"
y++
when "left"
x--
# Check for collision with self or wall
if @testCollision(x, y)
@endGame()
return
# Move the snake
@snake.unshift [ x, y ]
if x is @food[0] and y is @food[1]
@score++
@genFood()
else
@snake.pop()
@dir = @newdir
eraseCanvas: ->
atom.context.fillStyle = "#000"
atom.context.fillRect 0, 0, @width * @pixelsize, @height * @pixelsize
atom.context.fillStyle = "#fff"
draw: ->
unless @noshow
@eraseCanvas()
@drawFood()
@drawSnake()
game = new Game(15, 20, 30)
game.run()
| true | # Copyright 2012, 2014, 2015 PI:NAME:<NAME>END_PI
#
# This file is part of coffee-snake.
#
# coffee-snake is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class Game extends atom.Game
constructor: (h, w, ps) ->
super
atom.input.bind atom.key.LEFT_ARROW, 'move_left'
atom.input.bind atom.key.RIGHT_ARROW, 'move_right'
atom.input.bind atom.key.UP_ARROW, 'move_up'
atom.input.bind atom.key.DOWN_ARROW, 'move_down'
atom.input.bind atom.key.SPACE, 'toggle_pause'
@height = h
@width = w
@pixelsize = ps
# Style the canvas
window.onresize = (e) -> return
canvas_container = document.getElementById('canvas_container')
canvas_container.style.width = @width * @pixelsize + "px"
atom.canvas.style.border = "#fff 1px solid"
atom.canvas.style.position = "relative"
atom.canvas.height = @height * @pixelsize
atom.canvas.width = @width * @pixelsize
#Start the game
@startGame()
startGame: ->
# Initialize
_x = Math.floor(@width / 2)
_y = Math.floor(@height / 2)
@snake = [ [ _x, _y ], [ --_x, _y ], [ --_x, _y ], [ --_x, _y ] ]
@dir = ""
@newdir = "right"
@score = 0
@gstarted = true
@gpaused = false
@food = []
@last_dt = 0.00
@delay = 0.08
@noshow = true
@gpaused = true
[@tx , @ty] = [@width * @pixelsize, @height * @pixelsize]
@genFood() # generate food pixel
@showIntro() # show intro screen
genFood: ->
x = undefined
y = undefined
loop
x = Math.floor(Math.random() * (@width - 1))
y = Math.floor(Math.random() * (@height - 1))
break unless @testCollision(x, y)
@food = [ x, y ]
drawFood: ->
atom.context.beginPath()
atom.context.arc (@food[0] * @pixelsize) + @pixelsize / 2, (@food[1] * @pixelsize) + @pixelsize / 2, @pixelsize / 2, 0, Math.PI * 2, false
atom.context.fill()
drawSnake: ->
i = 0
l = @snake.length
while i < l
x = @snake[i][0]
y = @snake[i][1]
atom.context.fillRect x * @pixelsize, y * @pixelsize, @pixelsize, @pixelsize
i++
testCollision: (x, y) ->
return true if x < 0 or x > @width - 1
return true if y < 0 or y > @height - 1
i = 0
l = @snake.length
while i < l
return true if x is @snake[i][0] and y is @snake[i][1]
i++
false
endGame: ->
@gstarted = false
@noshow = true
atom.context.fillStyle = "#fff"
atom.context.strokeStyle = '#000'
# Game over
[mess, x , y] = ["Game Over", @tx / 2 , @ty / 2.4]
atom.context.font = "bold 30px monospace"
atom.context.textAlign = "center"
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
# score
atom.context.font = "bold 25px monospace"
[mess, x , y] = ["Score: " + @score, @tx / 2 , @ty / 1.7]
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
togglePause: ->
unless @gpaused
@noshow = true
@gpaused = true
[mess, x , y] = ["Paused", @tx / 2, @ty / 2]
atom.context.fillStyle = "#fff"
atom.context.font = "bold 30px monospace"
atom.context.textAlign = "center"
atom.context.fillText mess, x, y
atom.context.strokeText mess, x, y
else
@gpaused = false
@noshow = false
showIntro: ->
atom.context.fillStyle = "#000000"
atom.context.font = "30px sans-serif"
atom.context.textAlign = "center"
atom.context.textAlign = "left"
atom.context.font = "30px monospace"
atom.context.fillText "Instructions:", 2 * @pixelsize, @ty / 3
atom.context.font = "18px monospace"
atom.context.fillText "Use arrow keys to change direction.", 2 * @pixelsize, @ty / 2.3
atom.context.fillText "Press space to start/pause.", 2 * @pixelsize, @ty / 2.1
atom.context.fillText "Pro-tip: Press space now!", 2 * @pixelsize, @ty / 1.7
update: (dt) ->
# Check keyboard input
if atom.input.pressed 'move_left'
@newdir = "left" unless @dir is "right"
console.log "left"
else if atom.input.pressed 'move_up'
@newdir = "up" unless @dir is "down"
else if atom.input.pressed 'move_right'
@newdir = "right" unless @dir is "left"
else if atom.input.pressed 'move_down'
@newdir = "down" unless @dir is "up"
else if atom.input.pressed 'toggle_pause'
unless @gstarted
@eraseCanvas()
@startGame()
else
@togglePause()
# Slow down the game
if @last_dt < @delay
@last_dt += dt
return
else
@last_dt = 0.00
# Don't do anything if game is paused or stopped
return if not @gstarted or @gpaused
# Update snake
x = @snake[0][0]
y = @snake[0][1]
switch @newdir
when "up"
y--
when "right"
x++
when "down"
y++
when "left"
x--
# Check for collision with self or wall
if @testCollision(x, y)
@endGame()
return
# Move the snake
@snake.unshift [ x, y ]
if x is @food[0] and y is @food[1]
@score++
@genFood()
else
@snake.pop()
@dir = @newdir
eraseCanvas: ->
atom.context.fillStyle = "#000"
atom.context.fillRect 0, 0, @width * @pixelsize, @height * @pixelsize
atom.context.fillStyle = "#fff"
draw: ->
unless @noshow
@eraseCanvas()
@drawFood()
@drawSnake()
game = new Game(15, 20, 30)
game.run()
|
[
{
"context": "# @shojib\n\ndefine [], () ->\n\n \"use strict\"\n\n # Service\n ",
"end": 9,
"score": 0.9996089339256287,
"start": 2,
"tag": "USERNAME",
"value": "@shojib"
},
{
"context": "Cweb_url%2Cmultimedia%2Cpub_date&page=100&api-key=5445ed010346db7ab31fc33e55049350:8:68807489... | src/modules/article/js/factory.coffee | shojib/ngPhonegap | 1 | # @shojib
define [], () ->
"use strict"
# Service
Factory = (resource) ->
resource "http://api.nytimes.com/svc/search/v2/articlesearch.json?begin_date=20000101&end_date=20140214&sort=newest&fl=headline%2Clead_paragraph%2Cweb_url%2Cmultimedia%2Cpub_date&page=100&api-key=5445ed010346db7ab31fc33e55049350:8:68807489",
get:
method: "GET"
# Params injection
Factory.$inject = ["$resource"]
# Return
Factory
| 162597 | # @shojib
define [], () ->
"use strict"
# Service
Factory = (resource) ->
resource "http://api.nytimes.com/svc/search/v2/articlesearch.json?begin_date=20000101&end_date=20140214&sort=newest&fl=headline%2Clead_paragraph%2Cweb_url%2Cmultimedia%2Cpub_date&page=100&api-key=<KEY>",
get:
method: "GET"
# Params injection
Factory.$inject = ["$resource"]
# Return
Factory
| true | # @shojib
define [], () ->
"use strict"
# Service
Factory = (resource) ->
resource "http://api.nytimes.com/svc/search/v2/articlesearch.json?begin_date=20000101&end_date=20140214&sort=newest&fl=headline%2Clead_paragraph%2Cweb_url%2Cmultimedia%2Cpub_date&page=100&api-key=PI:KEY:<KEY>END_PI",
get:
method: "GET"
# Params injection
Factory.$inject = ["$resource"]
# Return
Factory
|
[
{
"context": "# @file rand.coffee\n# @Copyright (c) 2016 Taylor Siviter\n# This source code is licensed under the MIT Lice",
"end": 56,
"score": 0.9997962713241577,
"start": 42,
"tag": "NAME",
"value": "Taylor Siviter"
}
] | src/maths/rand.coffee | siviter-t/lampyridae.coffee | 4 | # @file rand.coffee
# @Copyright (c) 2016 Taylor Siviter
# This source code is licensed under the MIT License.
# For full information, see the LICENSE file in the project root.
require 'lampyridae'
### Generates a random float from the given interval [l, u) where u > l.
#
# @param l [Number] The lower bound of the interval
# @param u [Number] The upper bound of the interval
# @return [Number] Random number from the interval [l, u)
###
Lampyridae.rand = (l, u) ->
if arguments.length == 0 then return Math.random()
return (u - l) * Math.random() + l
# end function Lampyridae.rand
module.exports = Lampyridae.rand | 79558 | # @file rand.coffee
# @Copyright (c) 2016 <NAME>
# This source code is licensed under the MIT License.
# For full information, see the LICENSE file in the project root.
require 'lampyridae'
### Generates a random float from the given interval [l, u) where u > l.
#
# @param l [Number] The lower bound of the interval
# @param u [Number] The upper bound of the interval
# @return [Number] Random number from the interval [l, u)
###
Lampyridae.rand = (l, u) ->
if arguments.length == 0 then return Math.random()
return (u - l) * Math.random() + l
# end function Lampyridae.rand
module.exports = Lampyridae.rand | true | # @file rand.coffee
# @Copyright (c) 2016 PI:NAME:<NAME>END_PI
# This source code is licensed under the MIT License.
# For full information, see the LICENSE file in the project root.
require 'lampyridae'
### Generates a random float from the given interval [l, u) where u > l.
#
# @param l [Number] The lower bound of the interval
# @param u [Number] The upper bound of the interval
# @return [Number] Random number from the interval [l, u)
###
Lampyridae.rand = (l, u) ->
if arguments.length == 0 then return Math.random()
return (u - l) * Math.random() + l
# end function Lampyridae.rand
module.exports = Lampyridae.rand |
[
{
"context": "encoded and loaded from localstorage\n#\n# @author Olivier Bossel <olivier.bossel@gmail.com>\n# @created 23.11.15\n#",
"end": 147,
"score": 0.9998769164085388,
"start": 133,
"tag": "NAME",
"value": "Olivier Bossel"
},
{
"context": "d from localstorage\n#\n# @author ... | node_modules/sugarcss/coffee/sugar-webfonts.coffee | hagsey/nlpt2 | 0 | ###
# Sugar-webfonts.js
#
# This little js file allow you to use webfonts based64 encoded and loaded from localstorage
#
# @author Olivier Bossel <olivier.bossel@gmail.com>
# @created 23.11.15
# @updated 23.11.15
# @version 1.0.0
###
((factory) ->
if typeof define == 'function' and define.amd
# AMD. Register as an anonymous module.
define [ ], factory
else if typeof exports == 'object'
# Node/CommonJS
factory()
else
# Browser globals
factory()
return
) () ->
window.SugarWebfonts =
# variables
_key : 'sugar-webfonts'
_cache : null
# track if already inited
_inited : false
# default settings that can be overrided on init
_settings :
version : '581fea09a1e08e3770d777ca504608ee'
json_path : '/fonts/fonts.json'
debug : false
###
Init
###
init : (settings) ->
# extend settings
@_settings = @_extend @_settings, settings
# update inited state
@_inited = true
# check if a cachebuster is set
cb_split = @_settings.json_path.split '#'
@_settings.version = cb_split[1] if cb_split.length == 2
@_settings.json_path = cb_split[0] if cb_split.length == 2
try
@_cache = window.localStorage.getItem(@_key)
if @_cache
@_cache = JSON.parse @_cache
if @_cache.version == @_settings.version
@_debug 'No new version of your fonts.'
@_insertFont @_cache.value
else
@_debug 'new version of your fonts.'
# Busting cache when version doesn't match
window.localStorage.removeItem @_key
@_cache = null
catch e
# Most likely LocalStorage disabled
@_debug 'your browser seems to not support the localStorage api'
if not @_cache
# Fonts not in LocalStorage or version did not match
window.addEventListener 'load', =>
request = new XMLHttpRequest
response = undefined
request.open 'GET', @_settings.json_path, true
_this = @
request.onload = ->
if @status == 200
try
response = JSON.parse @responseText
fontface = '';
for index, font of response.fonts
fontface += '@font-face{'
for prop, value of font
value = '"'+value+'"' if prop == 'font-family'
fontface += prop + ':' + value + ';'
fontface += '}';
_this._insertFont fontface
window.localStorage.setItem _this._key, JSON.stringify
version : _this._settings.version
value : fontface
catch e
# LocalStorage is probably full
request.send()
###
Extend settingd
###
_extend : (obj, mixin) ->
obj[name] = method for name, method of mixin
obj
###
Insert font
###
_insertFont : (value) ->
@_debug 'inserting fonts'
style = document.createElement('style')
style.innerHTML = value
document.head.appendChild style
###
Debug
###
_debug : ->
console.log 'SUGAR-WEBFONTS', arguments if @_settings.debug
# return the Sugar object
SugarWebfonts | 39256 | ###
# Sugar-webfonts.js
#
# This little js file allow you to use webfonts based64 encoded and loaded from localstorage
#
# @author <NAME> <<EMAIL>>
# @created 23.11.15
# @updated 23.11.15
# @version 1.0.0
###
((factory) ->
if typeof define == 'function' and define.amd
# AMD. Register as an anonymous module.
define [ ], factory
else if typeof exports == 'object'
# Node/CommonJS
factory()
else
# Browser globals
factory()
return
) () ->
window.SugarWebfonts =
# variables
_key : '<KEY>'
_cache : null
# track if already inited
_inited : false
# default settings that can be overrided on init
_settings :
version : '581fea09a1e08e3770d777ca504608ee'
json_path : '/fonts/fonts.json'
debug : false
###
Init
###
init : (settings) ->
# extend settings
@_settings = @_extend @_settings, settings
# update inited state
@_inited = true
# check if a cachebuster is set
cb_split = @_settings.json_path.split '#'
@_settings.version = cb_split[1] if cb_split.length == 2
@_settings.json_path = cb_split[0] if cb_split.length == 2
try
@_cache = window.localStorage.getItem(@_key)
if @_cache
@_cache = JSON.parse @_cache
if @_cache.version == @_settings.version
@_debug 'No new version of your fonts.'
@_insertFont @_cache.value
else
@_debug 'new version of your fonts.'
# Busting cache when version doesn't match
window.localStorage.removeItem @_key
@_cache = null
catch e
# Most likely LocalStorage disabled
@_debug 'your browser seems to not support the localStorage api'
if not @_cache
# Fonts not in LocalStorage or version did not match
window.addEventListener 'load', =>
request = new XMLHttpRequest
response = undefined
request.open 'GET', @_settings.json_path, true
_this = @
request.onload = ->
if @status == 200
try
response = JSON.parse @responseText
fontface = '';
for index, font of response.fonts
fontface += '@font-face{'
for prop, value of font
value = '"'+value+'"' if prop == 'font-family'
fontface += prop + ':' + value + ';'
fontface += '}';
_this._insertFont fontface
window.localStorage.setItem _this._key, JSON.stringify
version : _this._settings.version
value : fontface
catch e
# LocalStorage is probably full
request.send()
###
Extend settingd
###
_extend : (obj, mixin) ->
obj[name] = method for name, method of mixin
obj
###
Insert font
###
_insertFont : (value) ->
@_debug 'inserting fonts'
style = document.createElement('style')
style.innerHTML = value
document.head.appendChild style
###
Debug
###
_debug : ->
console.log 'SUGAR-WEBFONTS', arguments if @_settings.debug
# return the Sugar object
SugarWebfonts | true | ###
# Sugar-webfonts.js
#
# This little js file allow you to use webfonts based64 encoded and loaded from localstorage
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# @created 23.11.15
# @updated 23.11.15
# @version 1.0.0
###
((factory) ->
if typeof define == 'function' and define.amd
# AMD. Register as an anonymous module.
define [ ], factory
else if typeof exports == 'object'
# Node/CommonJS
factory()
else
# Browser globals
factory()
return
) () ->
window.SugarWebfonts =
# variables
_key : 'PI:KEY:<KEY>END_PI'
_cache : null
# track if already inited
_inited : false
# default settings that can be overrided on init
_settings :
version : '581fea09a1e08e3770d777ca504608ee'
json_path : '/fonts/fonts.json'
debug : false
###
Init
###
init : (settings) ->
# extend settings
@_settings = @_extend @_settings, settings
# update inited state
@_inited = true
# check if a cachebuster is set
cb_split = @_settings.json_path.split '#'
@_settings.version = cb_split[1] if cb_split.length == 2
@_settings.json_path = cb_split[0] if cb_split.length == 2
try
@_cache = window.localStorage.getItem(@_key)
if @_cache
@_cache = JSON.parse @_cache
if @_cache.version == @_settings.version
@_debug 'No new version of your fonts.'
@_insertFont @_cache.value
else
@_debug 'new version of your fonts.'
# Busting cache when version doesn't match
window.localStorage.removeItem @_key
@_cache = null
catch e
# Most likely LocalStorage disabled
@_debug 'your browser seems to not support the localStorage api'
if not @_cache
# Fonts not in LocalStorage or version did not match
window.addEventListener 'load', =>
request = new XMLHttpRequest
response = undefined
request.open 'GET', @_settings.json_path, true
_this = @
request.onload = ->
if @status == 200
try
response = JSON.parse @responseText
fontface = '';
for index, font of response.fonts
fontface += '@font-face{'
for prop, value of font
value = '"'+value+'"' if prop == 'font-family'
fontface += prop + ':' + value + ';'
fontface += '}';
_this._insertFont fontface
window.localStorage.setItem _this._key, JSON.stringify
version : _this._settings.version
value : fontface
catch e
# LocalStorage is probably full
request.send()
###
Extend settingd
###
_extend : (obj, mixin) ->
obj[name] = method for name, method of mixin
obj
###
Insert font
###
_insertFont : (value) ->
@_debug 'inserting fonts'
style = document.createElement('style')
style.innerHTML = value
document.head.appendChild style
###
Debug
###
_debug : ->
console.log 'SUGAR-WEBFONTS', arguments if @_settings.debug
# return the Sugar object
SugarWebfonts |
[
{
"context": "ult: 12 }\n name: { type: \"string\", default: 'John Doe' }\n category: { type: \"string\", default: 'am",
"end": 584,
"score": 0.9998292326927185,
"start": 576,
"tag": "NAME",
"value": "John Doe"
}
] | test/client.coffee | coderofsalvation/ohmygraph | 2 | ohmygraph = require 'ohmygraph'
graph =
repositories:
type: "array"
items: [{"$ref":"#/repository"}]
data: { sort:'stars',q:'ohmy',order:'desc' }
request:
get:
config:
method: 'get'
url: '/search/repositories'
payload:
q: '{repositories.data.q}'
sort: '{repositories.data.sort}'
order: '{repositories.data.order}'
data: "{response.items}"
repository:
type: "object"
properties:
id: { type:"number", default: 12 }
name: { type: "string", default: 'John Doe' }
category: { type: "string", default: 'amsterdam' }
data: {}
request:
get:
config:
method: 'get'
url: '/repos/{repository.data.full_name}'
payload: {}
data: "{response}"
post:
type: "request"
config:
method: "post"
url: '/book'
payload:
'fullname': '{book.name}'
'firstname': '{firstname}'
'category': '{book.category}'
schema: {"$ref":"#/book"}
data: "{response}"
omg = ohmygraph.create graph, {baseurl: "https://api.github.com",verbose:2}
#console.log omg.export_functions(true)
#process.exit()
omg.init.client()
client = omg.graph
client.repositories.on 'data', (repositories) ->
console.log "on repositories"
repositories[0].get()
client.repository.on 'data', (repository) ->
console.log "on repository"
console.dir repository
client.repositories.get()
#client.repositories.get {q:"foo"}
#omg.graph.repository.data.get (data) ->
# console.log "\n->receive: "+data
| 119229 | ohmygraph = require 'ohmygraph'
graph =
repositories:
type: "array"
items: [{"$ref":"#/repository"}]
data: { sort:'stars',q:'ohmy',order:'desc' }
request:
get:
config:
method: 'get'
url: '/search/repositories'
payload:
q: '{repositories.data.q}'
sort: '{repositories.data.sort}'
order: '{repositories.data.order}'
data: "{response.items}"
repository:
type: "object"
properties:
id: { type:"number", default: 12 }
name: { type: "string", default: '<NAME>' }
category: { type: "string", default: 'amsterdam' }
data: {}
request:
get:
config:
method: 'get'
url: '/repos/{repository.data.full_name}'
payload: {}
data: "{response}"
post:
type: "request"
config:
method: "post"
url: '/book'
payload:
'fullname': '{book.name}'
'firstname': '{firstname}'
'category': '{book.category}'
schema: {"$ref":"#/book"}
data: "{response}"
omg = ohmygraph.create graph, {baseurl: "https://api.github.com",verbose:2}
#console.log omg.export_functions(true)
#process.exit()
omg.init.client()
client = omg.graph
client.repositories.on 'data', (repositories) ->
console.log "on repositories"
repositories[0].get()
client.repository.on 'data', (repository) ->
console.log "on repository"
console.dir repository
client.repositories.get()
#client.repositories.get {q:"foo"}
#omg.graph.repository.data.get (data) ->
# console.log "\n->receive: "+data
| true | ohmygraph = require 'ohmygraph'
graph =
repositories:
type: "array"
items: [{"$ref":"#/repository"}]
data: { sort:'stars',q:'ohmy',order:'desc' }
request:
get:
config:
method: 'get'
url: '/search/repositories'
payload:
q: '{repositories.data.q}'
sort: '{repositories.data.sort}'
order: '{repositories.data.order}'
data: "{response.items}"
repository:
type: "object"
properties:
id: { type:"number", default: 12 }
name: { type: "string", default: 'PI:NAME:<NAME>END_PI' }
category: { type: "string", default: 'amsterdam' }
data: {}
request:
get:
config:
method: 'get'
url: '/repos/{repository.data.full_name}'
payload: {}
data: "{response}"
post:
type: "request"
config:
method: "post"
url: '/book'
payload:
'fullname': '{book.name}'
'firstname': '{firstname}'
'category': '{book.category}'
schema: {"$ref":"#/book"}
data: "{response}"
omg = ohmygraph.create graph, {baseurl: "https://api.github.com",verbose:2}
#console.log omg.export_functions(true)
#process.exit()
omg.init.client()
client = omg.graph
client.repositories.on 'data', (repositories) ->
console.log "on repositories"
repositories[0].get()
client.repository.on 'data', (repository) ->
console.log "on repository"
console.dir repository
client.repositories.get()
#client.repositories.get {q:"foo"}
#omg.graph.repository.data.get (data) ->
# console.log "\n->receive: "+data
|
[
{
"context": " project: atom.project.getPaths()[0]\n name: 'Test 1'\n command: 'echo test'\n wd: '.'\n m",
"end": 373,
"score": 0.9510395526885986,
"start": 367,
"tag": "NAME",
"value": "Test 1"
},
{
"context": "and = new Command(command)\n command.oldname = 'T... | spec/command-edit-pane-spec.coffee | fstiewitz/build-tools-cpp | 3 | CommandEditPane = require '../lib/view/command-edit-pane'
Command = require '../lib/provider/command'
describe 'Command Edit Pane', ->
view = null
accept = null
cancel = null
command = null
beforeEach ->
accept = jasmine.createSpy('accept')
cancel = jasmine.createSpy('cancel')
command =
project: atom.project.getPaths()[0]
name: 'Test 1'
command: 'echo test'
wd: '.'
modifier:
save_all: {}
stdout:
highlighting: 'nh'
stderr:
highlighting: 'hc'
profile: 'python'
output:
console:
close_success: true
command = new Command(command)
command.oldname = 'Test 1'
view = new CommandEditPane(command)
view.setCallbacks accept, cancel
jasmine.attachToDOM(view.element)
it 'has a pane', ->
expect(view.element).toBeDefined()
it 'has 12 edit panes', ->
expect(view.find('.inset-panel').length).toBe 12
it 'has the correct values', ->
expect(view.panes[0].view.command_name.getModel().getText()).toBe 'Test 1'
expect(view.panes[1].pane.find('#save_all').prop('checked')).toBe true
expect(view.panes[6].view._stderr.panes[0].view.profile[0].selectedIndex).toBe 3
expect(view.panes[7].view.find('#close_success').prop('checked')).toBe true
describe 'On accept', ->
beforeEach ->
view.panes[1].pane.find('#save_all').prop('checked', false)
view.find('.btn-primary').click()
it 'returns the correct values', ->
res = accept.mostRecentCall.args[0]
oldname = accept.mostRecentCall.args[1]
expect(accept).toHaveBeenCalled()
expect(oldname).toBe 'Test 1'
expect(res.project).toBe atom.project.getPaths()[0]
expect(res.command).toBe 'echo test'
expect(res.modifier.save_all).toBeUndefined()
expect(res.stdout.pipeline).toEqual []
expect(res.stderr.pipeline).toEqual [
{
name: 'profile'
config:
profile: 'python'
}
]
expect(res.output.console.close_success).toBe true
expect(res.output.linter).toBeUndefined()
it 'calls the cancel callback', ->
expect(cancel).toHaveBeenCalled()
describe 'Pane can be created with atom.views.getView', ->
[c, p] = []
execute = (callback) ->
waitsForPromise -> atom.packages.activatePackage('build-tools')
runs -> callback()
it 'On getView with default command', ->
execute ->
c = new Command
p = atom.views.getView(c)
jasmine.attachToDOM(p.element)
expect(p.panes[0].view.command_name.getModel().getText()).toBe ''
expect(p.command.oldname).toBeUndefined()
it 'on getView with a valid command', ->
execute ->
command.oldname = undefined
c = new Command(command)
p = atom.views.getView(c)
jasmine.attachToDOM(p.element)
expect(p.panes[0].view.command_name.getModel().getText()).toBe 'Test 1'
expect(p.command.oldname).toBe 'Test 1'
describe 'use blacklist to hide modules', ->
beforeEach ->
view.remove()
view = new CommandEditPane(command)
view.setCallbacks accept, cancel
view.setBlacklist ['general', 'console']
jasmine.attachToDOM(view.element)
it 'shows all views minus the blacklisted ones', ->
expect(view.find('.inset-panel').length).toBe 10
expect(view.panes[0].view.command_name).toBeUndefined()
| 124049 | CommandEditPane = require '../lib/view/command-edit-pane'
Command = require '../lib/provider/command'
describe 'Command Edit Pane', ->
view = null
accept = null
cancel = null
command = null
beforeEach ->
accept = jasmine.createSpy('accept')
cancel = jasmine.createSpy('cancel')
command =
project: atom.project.getPaths()[0]
name: '<NAME>'
command: 'echo test'
wd: '.'
modifier:
save_all: {}
stdout:
highlighting: 'nh'
stderr:
highlighting: 'hc'
profile: 'python'
output:
console:
close_success: true
command = new Command(command)
command.oldname = '<NAME>'
view = new CommandEditPane(command)
view.setCallbacks accept, cancel
jasmine.attachToDOM(view.element)
it 'has a pane', ->
expect(view.element).toBeDefined()
it 'has 12 edit panes', ->
expect(view.find('.inset-panel').length).toBe 12
it 'has the correct values', ->
expect(view.panes[0].view.command_name.getModel().getText()).toBe 'Test 1'
expect(view.panes[1].pane.find('#save_all').prop('checked')).toBe true
expect(view.panes[6].view._stderr.panes[0].view.profile[0].selectedIndex).toBe 3
expect(view.panes[7].view.find('#close_success').prop('checked')).toBe true
describe 'On accept', ->
beforeEach ->
view.panes[1].pane.find('#save_all').prop('checked', false)
view.find('.btn-primary').click()
it 'returns the correct values', ->
res = accept.mostRecentCall.args[0]
oldname = accept.mostRecentCall.args[1]
expect(accept).toHaveBeenCalled()
expect(oldname).toBe 'Test 1'
expect(res.project).toBe atom.project.getPaths()[0]
expect(res.command).toBe 'echo test'
expect(res.modifier.save_all).toBeUndefined()
expect(res.stdout.pipeline).toEqual []
expect(res.stderr.pipeline).toEqual [
{
name: 'profile'
config:
profile: 'python'
}
]
expect(res.output.console.close_success).toBe true
expect(res.output.linter).toBeUndefined()
it 'calls the cancel callback', ->
expect(cancel).toHaveBeenCalled()
describe 'Pane can be created with atom.views.getView', ->
[c, p] = []
execute = (callback) ->
waitsForPromise -> atom.packages.activatePackage('build-tools')
runs -> callback()
it 'On getView with default command', ->
execute ->
c = new Command
p = atom.views.getView(c)
jasmine.attachToDOM(p.element)
expect(p.panes[0].view.command_name.getModel().getText()).toBe ''
expect(p.command.oldname).toBeUndefined()
it 'on getView with a valid command', ->
execute ->
command.oldname = undefined
c = new Command(command)
p = atom.views.getView(c)
jasmine.attachToDOM(p.element)
expect(p.panes[0].view.command_name.getModel().getText()).toBe 'Test 1'
expect(p.command.oldname).toBe 'Test 1'
describe 'use blacklist to hide modules', ->
beforeEach ->
view.remove()
view = new CommandEditPane(command)
view.setCallbacks accept, cancel
view.setBlacklist ['general', 'console']
jasmine.attachToDOM(view.element)
it 'shows all views minus the blacklisted ones', ->
expect(view.find('.inset-panel').length).toBe 10
expect(view.panes[0].view.command_name).toBeUndefined()
| true | CommandEditPane = require '../lib/view/command-edit-pane'
Command = require '../lib/provider/command'
describe 'Command Edit Pane', ->
view = null
accept = null
cancel = null
command = null
beforeEach ->
accept = jasmine.createSpy('accept')
cancel = jasmine.createSpy('cancel')
command =
project: atom.project.getPaths()[0]
name: 'PI:NAME:<NAME>END_PI'
command: 'echo test'
wd: '.'
modifier:
save_all: {}
stdout:
highlighting: 'nh'
stderr:
highlighting: 'hc'
profile: 'python'
output:
console:
close_success: true
command = new Command(command)
command.oldname = 'PI:NAME:<NAME>END_PI'
view = new CommandEditPane(command)
view.setCallbacks accept, cancel
jasmine.attachToDOM(view.element)
it 'has a pane', ->
expect(view.element).toBeDefined()
it 'has 12 edit panes', ->
expect(view.find('.inset-panel').length).toBe 12
it 'has the correct values', ->
expect(view.panes[0].view.command_name.getModel().getText()).toBe 'Test 1'
expect(view.panes[1].pane.find('#save_all').prop('checked')).toBe true
expect(view.panes[6].view._stderr.panes[0].view.profile[0].selectedIndex).toBe 3
expect(view.panes[7].view.find('#close_success').prop('checked')).toBe true
describe 'On accept', ->
beforeEach ->
view.panes[1].pane.find('#save_all').prop('checked', false)
view.find('.btn-primary').click()
it 'returns the correct values', ->
res = accept.mostRecentCall.args[0]
oldname = accept.mostRecentCall.args[1]
expect(accept).toHaveBeenCalled()
expect(oldname).toBe 'Test 1'
expect(res.project).toBe atom.project.getPaths()[0]
expect(res.command).toBe 'echo test'
expect(res.modifier.save_all).toBeUndefined()
expect(res.stdout.pipeline).toEqual []
expect(res.stderr.pipeline).toEqual [
{
name: 'profile'
config:
profile: 'python'
}
]
expect(res.output.console.close_success).toBe true
expect(res.output.linter).toBeUndefined()
it 'calls the cancel callback', ->
expect(cancel).toHaveBeenCalled()
describe 'Pane can be created with atom.views.getView', ->
[c, p] = []
execute = (callback) ->
waitsForPromise -> atom.packages.activatePackage('build-tools')
runs -> callback()
it 'On getView with default command', ->
execute ->
c = new Command
p = atom.views.getView(c)
jasmine.attachToDOM(p.element)
expect(p.panes[0].view.command_name.getModel().getText()).toBe ''
expect(p.command.oldname).toBeUndefined()
it 'on getView with a valid command', ->
execute ->
command.oldname = undefined
c = new Command(command)
p = atom.views.getView(c)
jasmine.attachToDOM(p.element)
expect(p.panes[0].view.command_name.getModel().getText()).toBe 'Test 1'
expect(p.command.oldname).toBe 'Test 1'
describe 'use blacklist to hide modules', ->
beforeEach ->
view.remove()
view = new CommandEditPane(command)
view.setCallbacks accept, cancel
view.setBlacklist ['general', 'console']
jasmine.attachToDOM(view.element)
it 'shows all views minus the blacklisted ones', ->
expect(view.find('.inset-panel').length).toBe 10
expect(view.panes[0].view.command_name).toBeUndefined()
|
[
{
"context": " if dcodeIO.bcrypt.compareSync req.body.password, user.local.password\n LocalSettings.setGlobal 'loggedInUs",
"end": 16177,
"score": 0.7963927984237671,
"start": 16158,
"tag": "PASSWORD",
"value": "user.local.password"
}
] | src/index.coffee | ndxbxrme/ndx-local-server | 0 | module = null
try
module = angular.module 'ndx'
catch e
module =angular.module 'ndx', []
module.provider 'Server', ->
config =
sharedAll: true
$get: ($http, $q, $rootElement, $window, LocalSettings, Auth, ndxdb, socket, rest) ->
autoId = LocalSettings.getGlobal('endpoints')?.autoId or '_id'
offline = LocalSettings.getGlobal('offline')
endpoints = null
original =
$post: $http.post
$get: $http.get
$put: $http.put
$delete: $http.delete
makeRegex = (str) ->
params = []
regex = new RegExp '^' + str.replace(/(:[^\/]+)/gi, (all, param) ->
params.push param.replace(':', '')
'([^\/]*)'
) + '$'
return
regex: regex
params: params
fns: []
isOnline = ->
not offline
Req = (method, uri, config, params, endpoint, restrict) ->
uri: uri
method: method
endpoint: endpoint
body: config or {}
params: params
restrict: restrict
Res = (method, uri, config, defer) ->
status = 200
method: method
data: config
status: (_status) ->
status = _status
@
end: (str) ->
defer.resolve
status: status
data: str
json: (data) ->
defer.resolve
status: status
data: data
reject: (data) ->
defer.reject data
Ndx = ->
routes =
get: []
post: []
put: []
delete: []
makeRoute = (method, route, args) ->
myroute = makeRegex route
i = 1
while i++ < args.length - 1
myroute.fns.push args[i]
myroute.endpoint = args[0]
routes[method].push myroute
routeRequest = (method, uri, config) ->
route = null
for testroute in routes[method]
if testroute.regex.test(uri)
route = testroute
break
if route
restrict = getRestrict route.endpoint
if restrict.local
return original['$' + method] uri, config
defer = $q.defer()
callFn = (index, req, res) ->
if route.fns[index]
route.fns[index] req, res, ->
index++
callFn index, req, res
ex = route.regex.exec uri
params = {}
for param, i in route.params
console.log decodeURIComponent(ex[i+1])
params[param] = decodeURIComponent(ex[i+1])
req = Req method, uri, config, params, route.endpoint, restrict
res = Res method, uri, config, defer
callFn 0, req, res
return defer.promise
else
return original['$' + method] uri, config
app:
routeRequest: routeRequest
get: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'get', r, arguments
else
makeRoute 'get', route, arguments
post: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'post', r, arguments
else
makeRoute 'post', route, arguments
put: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'put', r, arguments
else
makeRoute 'put', route, arguments
delete: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'delete', r, arguments
else
makeRoute 'delete', route, arguments
database: ndxdb
settings:
AUTO_ID: autoId
SOFT_DELETE: true
ndx = Ndx()
## REST FUNCTIONS
hasDeleted = (obj) ->
truth = false
if typeof(obj) is 'object'
for key of obj
if key is 'deleted'
return true
else
if truth = hasDeleted obj[key]
return true
truth
getRestrict = (tableName) ->
if endpoints
if endpoints.restrict
role = null
restrict = null
if user = Auth.getUser()
if user.roles
for key of user.roles
if user.roles[key]
role = key
break
tableRestrict = endpoints.restrict[tableName] or endpoints.restrict.default
if tableRestrict
return tableRestrict[role] or tableRestrict.default or {}
return {}
selectFn = (tableName, all) ->
(req, res, next) ->
myTableName = tableName
restrict = req.restrict
if all and restrict.all
return res.json
total: 0
page: 1
pageSize: 0
items: []
if not all or not restrict.sharedAll
myTableName += "_#{Auth.getUser()._id}"
if all
myTableName += "_all"
if req.params and req.params.id
where = {}
if req.params.id.indexOf('{') is 0
where = JSON.parse req.params.id
else
where[ndx.settings.AUTO_ID] = req.params.id
if ndx.settings.SOFT_DELETE and not req.body.showDeleted and not hasDeleted(where)
where.deleted = null
if all
elevateUser ndx.user
ndx.database.select myTableName,
where: where
, (items) ->
if items and items.length
res.json items[0]
else
res.json {}
else
req.body.where = req.body.where or {}
if ndx.settings.SOFT_DELETE and not req.body.showDeleted and not hasDeleted(req.body.where)
req.body.where.deleted = null
if req.body.all or all
elevateUser ndx.user
ndx.database.select myTableName, req.body, (items, total) ->
res.json
total: total
page: req.body.page or 1
pageSize: req.body.pageSize or 0
items: items
upsertFn = (tableName) ->
(req, res, next) ->
myTableName = "#{tableName}_#{Auth.getUser()._id}"
op = if req.params.id then 'update' else 'insert'
where = {}
if req.params.id
where[ndx.settings.AUTO_ID] = req.params.id
req.body.modifiedAt = 0
req.body.insertedAt = req.body.insertedAt or new Date().valueOf()
ndx.database.upsert myTableName, req.body, where, (err, r) ->
res.json(err or r)
if isOnline()
original.$post req.uri, req.body
.then ->
true
, ->
false
deleteFn = (tableName) ->
(req, res, next) ->
myTableName = "#{tableName}_#{Auth.getUser()._id}"
if req.params.id
where = {}
where[ndx.settings.AUTO_ID] = req.params.id
if ndx.settings.SOFT_DELETE
ndx.database.update tableName,
deleted:
by:ndx.user[ndx.settings.AUTO_ID]
at:new Date().valueOf()
modifiedAt: 0
, where
else
ndx.database.delete myTableName, where
if isOnline()
original.$delete req.uri
res.end 'OK'
makeEndpointRoutes = ->
for endpoint in endpoints.endpoints
ndx.app.get endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], selectFn(endpoint)
ndx.app.post endpoint, "/api/#{endpoint}/search", selectFn(endpoint)
ndx.app.get endpoint, "/api/#{endpoint}/:id/all", selectFn(endpoint, true)
ndx.app.post endpoint, "/api/#{endpoint}/search/all", selectFn(endpoint, true)
#ndx.app.post endpoint, "/api/#{endpoint}/modified", modifiedFn(endpoint)
ndx.app.post endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], upsertFn(endpoint)
ndx.app.put endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], upsertFn(endpoint)
ndx.app.delete endpoint, "/api/#{endpoint}/:id", deleteFn(endpoint)
makeTables = ->
if endpoints and endpoints.endpoints
for endpoint in endpoints.endpoints
myTableName = endpoint
restrict = getRestrict myTableName
if restrict.all or restrict.localAll
continue
if not restrict.sharedAll
myTableName += "_#{Auth.getUser()._id}"
myTableName += "_all"
ndx.database.makeTable myTableName
if endpoints and endpoints.endpoints and Auth.getUser()
for endpoint in endpoints.endpoints
restrict = getRestrict endpoint
if restrict.local
continue
ndx.database.makeTable "#{endpoint}_#{Auth.getUser()._id}"
uploadEndpoints = (cb) ->
if endpoints and endpoints.endpoints and Auth.getUser()
async.each endpoints.endpoints, (endpoint, endpointCb) ->
restrict = getRestrict endpoint
if restrict.local
return endpointCb()
myTableName = "#{endpoint}_#{Auth.getUser()._id}"
ndx.database.getDocsToUpload myTableName, (docs) ->
if docs
async.each docs, (doc, docCb) ->
original.$post "/api/#{endpoint}", doc
, ->
endpointCb()
else
endpointCb()
, ->
cb?()
totalFetched = 0
fetchNewForEndpoint = (endpoint, all, endpointCb) ->
if not Auth.getUser()
return endpointCb?()
localEndpoint = endpoint
restrict = getRestrict localEndpoint
if restrict.local
return endpointCb?()
if all and (restrict.all or restrict.localAll)
return endpointCb?()
if not all or not config.sharedAll
localEndpoint += "_#{Auth.getUser()._id}"
if all
localEndpoint += "_all"
PAGE_SIZE = 10
fetchPage = (firstPage) ->
ndx.database.maxModified localEndpoint, (localMaxModified) ->
where =
modifiedAt: {}
if firstPage
where.modifiedAt.$gt = localMaxModified
else
where.modifiedAt.$gte = localMaxModified
original.$post "/api/#{endpoint}/search#{if all then '/all' else ''}",
where: where
sort: 'modifiedAt'
sortDir: 'ASC'
page: 1
pageSize: PAGE_SIZE
.then (modifiedDocs) ->
console.log modifiedDocs.data.total, 'total'
if modifiedDocs.data and modifiedDocs.data.total
async.each modifiedDocs.data.items, (modifiedDoc, upsertCb) ->
ndx.database.upsert localEndpoint, modifiedDoc
upsertCb()
, ->
totalFetched += modifiedDocs.data?.total or 0
if modifiedDocs.data.total > PAGE_SIZE
fetchPage()
else
endpointCb?()
else
endpointCb?()
, ->
endpointCb?()
fetchPage true
fetchNewData = (cb) ->
if endpoints and endpoints.endpoints
async.each endpoints.endpoints, (endpoint, endpointCb) ->
fetchNewForEndpoint endpoint, true, ->
fetchNewForEndpoint endpoint, false, ->
uploadEndpoints endpointCb
, ->
cb?()
fetchCount = 0
fetchAndUpload = (data) ->
totalFetched = 0
if data
fetchNewForEndpoint data.table, true, ->
fetchNewForEndpoint data.table, false, ->
uploadEndpoints ->
if totalFetched > 0
rest.socketRefresh data
else
if fetchCount++ > 0
fetchNewData ->
if totalFetched > 0
rest.socketRefresh data
deleteEndpoint = (endpoint, all) ->
localEndpoint = endpoint
if not all or not config.sharedAll
localEndpoint += "_#{Auth.getUser()._id}"
if all
localEndpoint += "_all"
ndx.database.delete localEndpoint
checkRefresh = ->
if endpoints and endpoints.endpoints and user = Auth.getUser()
lastRefresh = LocalSettings.getGlobal('lastRefresh') or 0
if user.ndxRefresh
for endpoint of user.ndxRefresh
refreshed = false
if lastRefresh < user.ndxRefresh[endpoint] < new Date().valueOf()
deleteEndpoint endpoint, true
deleteEndpoint endpoint, false
if refreshed
LocalSettings.setGlobal 'lastRefresh', new Date().valueOf()
$http.post = (uri, config) ->
ndx.app.routeRequest 'post', uri, config
$http.get = (uri, config) ->
ndx.app.routeRequest 'get', uri, config
$http.put = (uri, config) ->
ndx.app.routeRequest 'put', uri, config
$http.delete = (uri, config) ->
ndx.app.routeRequest 'delete', uri, config
socket.on 'connect', fetchAndUpload
socket.on 'update', fetchAndUpload
socket.on 'insert', fetchAndUpload
socket.on 'delete', fetchAndUpload
Auth.onUser ->
makeTables()
#check for refresh
checkRefresh()
fetchNewData()
ndx.app.get null, '/rest/endpoints', (req, res, next) ->
if isOnline()
original.$get '/rest/endpoints', req.data
.then (response) ->
LocalSettings.setGlobal 'endpoints', response.data
endpoints = response.data
console.log 'endpoints', endpoints
makeEndpointRoutes()
makeTables()
checkRefresh()
fetchAndUpload()
res.json response.data
, ->
endpoints = LocalSettings.getGlobal 'endpoints'
if endpoints
makeEndpointRoutes()
makeTables()
res.json endpoints
else
res.json {}
else
endpoints = LocalSettings.getGlobal 'endpoints'
makeEndpointRoutes()
makeTables()
res.json endpoints
ndx.app.post null, '/api/refresh-login', (req, res, next) ->
if isOnline()
original.$post '/api/refresh-login', req.data
.then (response) ->
if response.status is 200
globalUsers = LocalSettings.getGlobal('users') or {}
globalUsers[response.data[autoId]] = response.data
LocalSettings.setGlobal 'users', globalUsers
LocalSettings.setGlobal 'loggedInUser',
user: response.data
until: new Date().valueOf() + (5 * 60 * 60 * 1000)
res.json response.data
else
res.status(response.status).json response.data
, ->
loggedInUser = LocalSettings.getGlobal 'loggedInUser'
if loggedInUser and loggedInUser.until and loggedInUser.until > new Date().valueOf()
loggedInUser.until = new Date().valueOf() + (5 * 60 * 60 * 1000)
LocalSettings.setGlobal 'loggedInUser', loggedInUser
res.json loggedInUser.user
else
res.status(401).json {}
else
loggedInUser = LocalSettings.getGlobal 'loggedInUser'
if loggedInUser and loggedInUser.until and loggedInUser.until > new Date().valueOf()
loggedInUser.until = new Date().valueOf() + (5 * 60 * 60 * 1000)
LocalSettings.setGlobal 'loggedInUser', loggedInUser
res.json loggedInUser.user
else
res.status(401).json {}
ndx.app.get null, '/api/logout', (req, res, next) ->
LocalSettings.setGlobal 'loggedInUser', null
original.$get req.uri, req.data
.then ->
true
, ->
false
res.end 'OK'
ndx.app.post null, '/api/login', (req, res, next) ->
original.$post req.uri, req.body
.then (response) ->
res.json response.data
, (err) ->
if err.status is 401
res.reject err
else
users = LocalSettings.getGlobal 'users'
user = null
for key of users
user = users[key]
if user.local?.email?.toLowerCase() is req.body.email?.toLowerCase()
break
if user
if dcodeIO.bcrypt.compareSync req.body.password, user.local.password
LocalSettings.setGlobal 'loggedInUser',
user: user
until: new Date().valueOf() + (5 * 60 * 60 * 1000)
res.json user
else
res.reject err
else
res.reject err
setOffline: (val) ->
offline = val
LocalSettings.setGlobal 'offline', offline
isOnline: isOnline
original: original
config: (_config) ->
config = _config
.run (Server) ->
Server.setOffline false
| 71670 | module = null
try
module = angular.module 'ndx'
catch e
module =angular.module 'ndx', []
module.provider 'Server', ->
config =
sharedAll: true
$get: ($http, $q, $rootElement, $window, LocalSettings, Auth, ndxdb, socket, rest) ->
autoId = LocalSettings.getGlobal('endpoints')?.autoId or '_id'
offline = LocalSettings.getGlobal('offline')
endpoints = null
original =
$post: $http.post
$get: $http.get
$put: $http.put
$delete: $http.delete
makeRegex = (str) ->
params = []
regex = new RegExp '^' + str.replace(/(:[^\/]+)/gi, (all, param) ->
params.push param.replace(':', '')
'([^\/]*)'
) + '$'
return
regex: regex
params: params
fns: []
isOnline = ->
not offline
Req = (method, uri, config, params, endpoint, restrict) ->
uri: uri
method: method
endpoint: endpoint
body: config or {}
params: params
restrict: restrict
Res = (method, uri, config, defer) ->
status = 200
method: method
data: config
status: (_status) ->
status = _status
@
end: (str) ->
defer.resolve
status: status
data: str
json: (data) ->
defer.resolve
status: status
data: data
reject: (data) ->
defer.reject data
Ndx = ->
routes =
get: []
post: []
put: []
delete: []
makeRoute = (method, route, args) ->
myroute = makeRegex route
i = 1
while i++ < args.length - 1
myroute.fns.push args[i]
myroute.endpoint = args[0]
routes[method].push myroute
routeRequest = (method, uri, config) ->
route = null
for testroute in routes[method]
if testroute.regex.test(uri)
route = testroute
break
if route
restrict = getRestrict route.endpoint
if restrict.local
return original['$' + method] uri, config
defer = $q.defer()
callFn = (index, req, res) ->
if route.fns[index]
route.fns[index] req, res, ->
index++
callFn index, req, res
ex = route.regex.exec uri
params = {}
for param, i in route.params
console.log decodeURIComponent(ex[i+1])
params[param] = decodeURIComponent(ex[i+1])
req = Req method, uri, config, params, route.endpoint, restrict
res = Res method, uri, config, defer
callFn 0, req, res
return defer.promise
else
return original['$' + method] uri, config
app:
routeRequest: routeRequest
get: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'get', r, arguments
else
makeRoute 'get', route, arguments
post: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'post', r, arguments
else
makeRoute 'post', route, arguments
put: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'put', r, arguments
else
makeRoute 'put', route, arguments
delete: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'delete', r, arguments
else
makeRoute 'delete', route, arguments
database: ndxdb
settings:
AUTO_ID: autoId
SOFT_DELETE: true
ndx = Ndx()
## REST FUNCTIONS
hasDeleted = (obj) ->
truth = false
if typeof(obj) is 'object'
for key of obj
if key is 'deleted'
return true
else
if truth = hasDeleted obj[key]
return true
truth
getRestrict = (tableName) ->
if endpoints
if endpoints.restrict
role = null
restrict = null
if user = Auth.getUser()
if user.roles
for key of user.roles
if user.roles[key]
role = key
break
tableRestrict = endpoints.restrict[tableName] or endpoints.restrict.default
if tableRestrict
return tableRestrict[role] or tableRestrict.default or {}
return {}
selectFn = (tableName, all) ->
(req, res, next) ->
myTableName = tableName
restrict = req.restrict
if all and restrict.all
return res.json
total: 0
page: 1
pageSize: 0
items: []
if not all or not restrict.sharedAll
myTableName += "_#{Auth.getUser()._id}"
if all
myTableName += "_all"
if req.params and req.params.id
where = {}
if req.params.id.indexOf('{') is 0
where = JSON.parse req.params.id
else
where[ndx.settings.AUTO_ID] = req.params.id
if ndx.settings.SOFT_DELETE and not req.body.showDeleted and not hasDeleted(where)
where.deleted = null
if all
elevateUser ndx.user
ndx.database.select myTableName,
where: where
, (items) ->
if items and items.length
res.json items[0]
else
res.json {}
else
req.body.where = req.body.where or {}
if ndx.settings.SOFT_DELETE and not req.body.showDeleted and not hasDeleted(req.body.where)
req.body.where.deleted = null
if req.body.all or all
elevateUser ndx.user
ndx.database.select myTableName, req.body, (items, total) ->
res.json
total: total
page: req.body.page or 1
pageSize: req.body.pageSize or 0
items: items
upsertFn = (tableName) ->
(req, res, next) ->
myTableName = "#{tableName}_#{Auth.getUser()._id}"
op = if req.params.id then 'update' else 'insert'
where = {}
if req.params.id
where[ndx.settings.AUTO_ID] = req.params.id
req.body.modifiedAt = 0
req.body.insertedAt = req.body.insertedAt or new Date().valueOf()
ndx.database.upsert myTableName, req.body, where, (err, r) ->
res.json(err or r)
if isOnline()
original.$post req.uri, req.body
.then ->
true
, ->
false
deleteFn = (tableName) ->
(req, res, next) ->
myTableName = "#{tableName}_#{Auth.getUser()._id}"
if req.params.id
where = {}
where[ndx.settings.AUTO_ID] = req.params.id
if ndx.settings.SOFT_DELETE
ndx.database.update tableName,
deleted:
by:ndx.user[ndx.settings.AUTO_ID]
at:new Date().valueOf()
modifiedAt: 0
, where
else
ndx.database.delete myTableName, where
if isOnline()
original.$delete req.uri
res.end 'OK'
makeEndpointRoutes = ->
for endpoint in endpoints.endpoints
ndx.app.get endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], selectFn(endpoint)
ndx.app.post endpoint, "/api/#{endpoint}/search", selectFn(endpoint)
ndx.app.get endpoint, "/api/#{endpoint}/:id/all", selectFn(endpoint, true)
ndx.app.post endpoint, "/api/#{endpoint}/search/all", selectFn(endpoint, true)
#ndx.app.post endpoint, "/api/#{endpoint}/modified", modifiedFn(endpoint)
ndx.app.post endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], upsertFn(endpoint)
ndx.app.put endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], upsertFn(endpoint)
ndx.app.delete endpoint, "/api/#{endpoint}/:id", deleteFn(endpoint)
makeTables = ->
if endpoints and endpoints.endpoints
for endpoint in endpoints.endpoints
myTableName = endpoint
restrict = getRestrict myTableName
if restrict.all or restrict.localAll
continue
if not restrict.sharedAll
myTableName += "_#{Auth.getUser()._id}"
myTableName += "_all"
ndx.database.makeTable myTableName
if endpoints and endpoints.endpoints and Auth.getUser()
for endpoint in endpoints.endpoints
restrict = getRestrict endpoint
if restrict.local
continue
ndx.database.makeTable "#{endpoint}_#{Auth.getUser()._id}"
uploadEndpoints = (cb) ->
if endpoints and endpoints.endpoints and Auth.getUser()
async.each endpoints.endpoints, (endpoint, endpointCb) ->
restrict = getRestrict endpoint
if restrict.local
return endpointCb()
myTableName = "#{endpoint}_#{Auth.getUser()._id}"
ndx.database.getDocsToUpload myTableName, (docs) ->
if docs
async.each docs, (doc, docCb) ->
original.$post "/api/#{endpoint}", doc
, ->
endpointCb()
else
endpointCb()
, ->
cb?()
totalFetched = 0
fetchNewForEndpoint = (endpoint, all, endpointCb) ->
if not Auth.getUser()
return endpointCb?()
localEndpoint = endpoint
restrict = getRestrict localEndpoint
if restrict.local
return endpointCb?()
if all and (restrict.all or restrict.localAll)
return endpointCb?()
if not all or not config.sharedAll
localEndpoint += "_#{Auth.getUser()._id}"
if all
localEndpoint += "_all"
PAGE_SIZE = 10
fetchPage = (firstPage) ->
ndx.database.maxModified localEndpoint, (localMaxModified) ->
where =
modifiedAt: {}
if firstPage
where.modifiedAt.$gt = localMaxModified
else
where.modifiedAt.$gte = localMaxModified
original.$post "/api/#{endpoint}/search#{if all then '/all' else ''}",
where: where
sort: 'modifiedAt'
sortDir: 'ASC'
page: 1
pageSize: PAGE_SIZE
.then (modifiedDocs) ->
console.log modifiedDocs.data.total, 'total'
if modifiedDocs.data and modifiedDocs.data.total
async.each modifiedDocs.data.items, (modifiedDoc, upsertCb) ->
ndx.database.upsert localEndpoint, modifiedDoc
upsertCb()
, ->
totalFetched += modifiedDocs.data?.total or 0
if modifiedDocs.data.total > PAGE_SIZE
fetchPage()
else
endpointCb?()
else
endpointCb?()
, ->
endpointCb?()
fetchPage true
fetchNewData = (cb) ->
if endpoints and endpoints.endpoints
async.each endpoints.endpoints, (endpoint, endpointCb) ->
fetchNewForEndpoint endpoint, true, ->
fetchNewForEndpoint endpoint, false, ->
uploadEndpoints endpointCb
, ->
cb?()
fetchCount = 0
fetchAndUpload = (data) ->
totalFetched = 0
if data
fetchNewForEndpoint data.table, true, ->
fetchNewForEndpoint data.table, false, ->
uploadEndpoints ->
if totalFetched > 0
rest.socketRefresh data
else
if fetchCount++ > 0
fetchNewData ->
if totalFetched > 0
rest.socketRefresh data
deleteEndpoint = (endpoint, all) ->
localEndpoint = endpoint
if not all or not config.sharedAll
localEndpoint += "_#{Auth.getUser()._id}"
if all
localEndpoint += "_all"
ndx.database.delete localEndpoint
checkRefresh = ->
if endpoints and endpoints.endpoints and user = Auth.getUser()
lastRefresh = LocalSettings.getGlobal('lastRefresh') or 0
if user.ndxRefresh
for endpoint of user.ndxRefresh
refreshed = false
if lastRefresh < user.ndxRefresh[endpoint] < new Date().valueOf()
deleteEndpoint endpoint, true
deleteEndpoint endpoint, false
if refreshed
LocalSettings.setGlobal 'lastRefresh', new Date().valueOf()
$http.post = (uri, config) ->
ndx.app.routeRequest 'post', uri, config
$http.get = (uri, config) ->
ndx.app.routeRequest 'get', uri, config
$http.put = (uri, config) ->
ndx.app.routeRequest 'put', uri, config
$http.delete = (uri, config) ->
ndx.app.routeRequest 'delete', uri, config
socket.on 'connect', fetchAndUpload
socket.on 'update', fetchAndUpload
socket.on 'insert', fetchAndUpload
socket.on 'delete', fetchAndUpload
Auth.onUser ->
makeTables()
#check for refresh
checkRefresh()
fetchNewData()
ndx.app.get null, '/rest/endpoints', (req, res, next) ->
if isOnline()
original.$get '/rest/endpoints', req.data
.then (response) ->
LocalSettings.setGlobal 'endpoints', response.data
endpoints = response.data
console.log 'endpoints', endpoints
makeEndpointRoutes()
makeTables()
checkRefresh()
fetchAndUpload()
res.json response.data
, ->
endpoints = LocalSettings.getGlobal 'endpoints'
if endpoints
makeEndpointRoutes()
makeTables()
res.json endpoints
else
res.json {}
else
endpoints = LocalSettings.getGlobal 'endpoints'
makeEndpointRoutes()
makeTables()
res.json endpoints
ndx.app.post null, '/api/refresh-login', (req, res, next) ->
if isOnline()
original.$post '/api/refresh-login', req.data
.then (response) ->
if response.status is 200
globalUsers = LocalSettings.getGlobal('users') or {}
globalUsers[response.data[autoId]] = response.data
LocalSettings.setGlobal 'users', globalUsers
LocalSettings.setGlobal 'loggedInUser',
user: response.data
until: new Date().valueOf() + (5 * 60 * 60 * 1000)
res.json response.data
else
res.status(response.status).json response.data
, ->
loggedInUser = LocalSettings.getGlobal 'loggedInUser'
if loggedInUser and loggedInUser.until and loggedInUser.until > new Date().valueOf()
loggedInUser.until = new Date().valueOf() + (5 * 60 * 60 * 1000)
LocalSettings.setGlobal 'loggedInUser', loggedInUser
res.json loggedInUser.user
else
res.status(401).json {}
else
loggedInUser = LocalSettings.getGlobal 'loggedInUser'
if loggedInUser and loggedInUser.until and loggedInUser.until > new Date().valueOf()
loggedInUser.until = new Date().valueOf() + (5 * 60 * 60 * 1000)
LocalSettings.setGlobal 'loggedInUser', loggedInUser
res.json loggedInUser.user
else
res.status(401).json {}
ndx.app.get null, '/api/logout', (req, res, next) ->
LocalSettings.setGlobal 'loggedInUser', null
original.$get req.uri, req.data
.then ->
true
, ->
false
res.end 'OK'
ndx.app.post null, '/api/login', (req, res, next) ->
original.$post req.uri, req.body
.then (response) ->
res.json response.data
, (err) ->
if err.status is 401
res.reject err
else
users = LocalSettings.getGlobal 'users'
user = null
for key of users
user = users[key]
if user.local?.email?.toLowerCase() is req.body.email?.toLowerCase()
break
if user
if dcodeIO.bcrypt.compareSync req.body.password, <PASSWORD>
LocalSettings.setGlobal 'loggedInUser',
user: user
until: new Date().valueOf() + (5 * 60 * 60 * 1000)
res.json user
else
res.reject err
else
res.reject err
setOffline: (val) ->
offline = val
LocalSettings.setGlobal 'offline', offline
isOnline: isOnline
original: original
config: (_config) ->
config = _config
.run (Server) ->
Server.setOffline false
| true | module = null
try
module = angular.module 'ndx'
catch e
module =angular.module 'ndx', []
module.provider 'Server', ->
config =
sharedAll: true
$get: ($http, $q, $rootElement, $window, LocalSettings, Auth, ndxdb, socket, rest) ->
autoId = LocalSettings.getGlobal('endpoints')?.autoId or '_id'
offline = LocalSettings.getGlobal('offline')
endpoints = null
original =
$post: $http.post
$get: $http.get
$put: $http.put
$delete: $http.delete
makeRegex = (str) ->
params = []
regex = new RegExp '^' + str.replace(/(:[^\/]+)/gi, (all, param) ->
params.push param.replace(':', '')
'([^\/]*)'
) + '$'
return
regex: regex
params: params
fns: []
isOnline = ->
not offline
Req = (method, uri, config, params, endpoint, restrict) ->
uri: uri
method: method
endpoint: endpoint
body: config or {}
params: params
restrict: restrict
Res = (method, uri, config, defer) ->
status = 200
method: method
data: config
status: (_status) ->
status = _status
@
end: (str) ->
defer.resolve
status: status
data: str
json: (data) ->
defer.resolve
status: status
data: data
reject: (data) ->
defer.reject data
Ndx = ->
routes =
get: []
post: []
put: []
delete: []
makeRoute = (method, route, args) ->
myroute = makeRegex route
i = 1
while i++ < args.length - 1
myroute.fns.push args[i]
myroute.endpoint = args[0]
routes[method].push myroute
routeRequest = (method, uri, config) ->
route = null
for testroute in routes[method]
if testroute.regex.test(uri)
route = testroute
break
if route
restrict = getRestrict route.endpoint
if restrict.local
return original['$' + method] uri, config
defer = $q.defer()
callFn = (index, req, res) ->
if route.fns[index]
route.fns[index] req, res, ->
index++
callFn index, req, res
ex = route.regex.exec uri
params = {}
for param, i in route.params
console.log decodeURIComponent(ex[i+1])
params[param] = decodeURIComponent(ex[i+1])
req = Req method, uri, config, params, route.endpoint, restrict
res = Res method, uri, config, defer
callFn 0, req, res
return defer.promise
else
return original['$' + method] uri, config
app:
routeRequest: routeRequest
get: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'get', r, arguments
else
makeRoute 'get', route, arguments
post: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'post', r, arguments
else
makeRoute 'post', route, arguments
put: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'put', r, arguments
else
makeRoute 'put', route, arguments
delete: (endpoint, route) ->
if Object.prototype.toString.call(route) is '[object Array]'
for r in route
makeRoute 'delete', r, arguments
else
makeRoute 'delete', route, arguments
database: ndxdb
settings:
AUTO_ID: autoId
SOFT_DELETE: true
ndx = Ndx()
## REST FUNCTIONS
hasDeleted = (obj) ->
truth = false
if typeof(obj) is 'object'
for key of obj
if key is 'deleted'
return true
else
if truth = hasDeleted obj[key]
return true
truth
getRestrict = (tableName) ->
if endpoints
if endpoints.restrict
role = null
restrict = null
if user = Auth.getUser()
if user.roles
for key of user.roles
if user.roles[key]
role = key
break
tableRestrict = endpoints.restrict[tableName] or endpoints.restrict.default
if tableRestrict
return tableRestrict[role] or tableRestrict.default or {}
return {}
selectFn = (tableName, all) ->
(req, res, next) ->
myTableName = tableName
restrict = req.restrict
if all and restrict.all
return res.json
total: 0
page: 1
pageSize: 0
items: []
if not all or not restrict.sharedAll
myTableName += "_#{Auth.getUser()._id}"
if all
myTableName += "_all"
if req.params and req.params.id
where = {}
if req.params.id.indexOf('{') is 0
where = JSON.parse req.params.id
else
where[ndx.settings.AUTO_ID] = req.params.id
if ndx.settings.SOFT_DELETE and not req.body.showDeleted and not hasDeleted(where)
where.deleted = null
if all
elevateUser ndx.user
ndx.database.select myTableName,
where: where
, (items) ->
if items and items.length
res.json items[0]
else
res.json {}
else
req.body.where = req.body.where or {}
if ndx.settings.SOFT_DELETE and not req.body.showDeleted and not hasDeleted(req.body.where)
req.body.where.deleted = null
if req.body.all or all
elevateUser ndx.user
ndx.database.select myTableName, req.body, (items, total) ->
res.json
total: total
page: req.body.page or 1
pageSize: req.body.pageSize or 0
items: items
upsertFn = (tableName) ->
(req, res, next) ->
myTableName = "#{tableName}_#{Auth.getUser()._id}"
op = if req.params.id then 'update' else 'insert'
where = {}
if req.params.id
where[ndx.settings.AUTO_ID] = req.params.id
req.body.modifiedAt = 0
req.body.insertedAt = req.body.insertedAt or new Date().valueOf()
ndx.database.upsert myTableName, req.body, where, (err, r) ->
res.json(err or r)
if isOnline()
original.$post req.uri, req.body
.then ->
true
, ->
false
deleteFn = (tableName) ->
(req, res, next) ->
myTableName = "#{tableName}_#{Auth.getUser()._id}"
if req.params.id
where = {}
where[ndx.settings.AUTO_ID] = req.params.id
if ndx.settings.SOFT_DELETE
ndx.database.update tableName,
deleted:
by:ndx.user[ndx.settings.AUTO_ID]
at:new Date().valueOf()
modifiedAt: 0
, where
else
ndx.database.delete myTableName, where
if isOnline()
original.$delete req.uri
res.end 'OK'
makeEndpointRoutes = ->
for endpoint in endpoints.endpoints
ndx.app.get endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], selectFn(endpoint)
ndx.app.post endpoint, "/api/#{endpoint}/search", selectFn(endpoint)
ndx.app.get endpoint, "/api/#{endpoint}/:id/all", selectFn(endpoint, true)
ndx.app.post endpoint, "/api/#{endpoint}/search/all", selectFn(endpoint, true)
#ndx.app.post endpoint, "/api/#{endpoint}/modified", modifiedFn(endpoint)
ndx.app.post endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], upsertFn(endpoint)
ndx.app.put endpoint, ["/api/#{endpoint}", "/api/#{endpoint}/:id"], upsertFn(endpoint)
ndx.app.delete endpoint, "/api/#{endpoint}/:id", deleteFn(endpoint)
makeTables = ->
if endpoints and endpoints.endpoints
for endpoint in endpoints.endpoints
myTableName = endpoint
restrict = getRestrict myTableName
if restrict.all or restrict.localAll
continue
if not restrict.sharedAll
myTableName += "_#{Auth.getUser()._id}"
myTableName += "_all"
ndx.database.makeTable myTableName
if endpoints and endpoints.endpoints and Auth.getUser()
for endpoint in endpoints.endpoints
restrict = getRestrict endpoint
if restrict.local
continue
ndx.database.makeTable "#{endpoint}_#{Auth.getUser()._id}"
uploadEndpoints = (cb) ->
if endpoints and endpoints.endpoints and Auth.getUser()
async.each endpoints.endpoints, (endpoint, endpointCb) ->
restrict = getRestrict endpoint
if restrict.local
return endpointCb()
myTableName = "#{endpoint}_#{Auth.getUser()._id}"
ndx.database.getDocsToUpload myTableName, (docs) ->
if docs
async.each docs, (doc, docCb) ->
original.$post "/api/#{endpoint}", doc
, ->
endpointCb()
else
endpointCb()
, ->
cb?()
totalFetched = 0
fetchNewForEndpoint = (endpoint, all, endpointCb) ->
if not Auth.getUser()
return endpointCb?()
localEndpoint = endpoint
restrict = getRestrict localEndpoint
if restrict.local
return endpointCb?()
if all and (restrict.all or restrict.localAll)
return endpointCb?()
if not all or not config.sharedAll
localEndpoint += "_#{Auth.getUser()._id}"
if all
localEndpoint += "_all"
PAGE_SIZE = 10
fetchPage = (firstPage) ->
ndx.database.maxModified localEndpoint, (localMaxModified) ->
where =
modifiedAt: {}
if firstPage
where.modifiedAt.$gt = localMaxModified
else
where.modifiedAt.$gte = localMaxModified
original.$post "/api/#{endpoint}/search#{if all then '/all' else ''}",
where: where
sort: 'modifiedAt'
sortDir: 'ASC'
page: 1
pageSize: PAGE_SIZE
.then (modifiedDocs) ->
console.log modifiedDocs.data.total, 'total'
if modifiedDocs.data and modifiedDocs.data.total
async.each modifiedDocs.data.items, (modifiedDoc, upsertCb) ->
ndx.database.upsert localEndpoint, modifiedDoc
upsertCb()
, ->
totalFetched += modifiedDocs.data?.total or 0
if modifiedDocs.data.total > PAGE_SIZE
fetchPage()
else
endpointCb?()
else
endpointCb?()
, ->
endpointCb?()
fetchPage true
fetchNewData = (cb) ->
if endpoints and endpoints.endpoints
async.each endpoints.endpoints, (endpoint, endpointCb) ->
fetchNewForEndpoint endpoint, true, ->
fetchNewForEndpoint endpoint, false, ->
uploadEndpoints endpointCb
, ->
cb?()
fetchCount = 0
fetchAndUpload = (data) ->
totalFetched = 0
if data
fetchNewForEndpoint data.table, true, ->
fetchNewForEndpoint data.table, false, ->
uploadEndpoints ->
if totalFetched > 0
rest.socketRefresh data
else
if fetchCount++ > 0
fetchNewData ->
if totalFetched > 0
rest.socketRefresh data
deleteEndpoint = (endpoint, all) ->
localEndpoint = endpoint
if not all or not config.sharedAll
localEndpoint += "_#{Auth.getUser()._id}"
if all
localEndpoint += "_all"
ndx.database.delete localEndpoint
checkRefresh = ->
if endpoints and endpoints.endpoints and user = Auth.getUser()
lastRefresh = LocalSettings.getGlobal('lastRefresh') or 0
if user.ndxRefresh
for endpoint of user.ndxRefresh
refreshed = false
if lastRefresh < user.ndxRefresh[endpoint] < new Date().valueOf()
deleteEndpoint endpoint, true
deleteEndpoint endpoint, false
if refreshed
LocalSettings.setGlobal 'lastRefresh', new Date().valueOf()
$http.post = (uri, config) ->
ndx.app.routeRequest 'post', uri, config
$http.get = (uri, config) ->
ndx.app.routeRequest 'get', uri, config
$http.put = (uri, config) ->
ndx.app.routeRequest 'put', uri, config
$http.delete = (uri, config) ->
ndx.app.routeRequest 'delete', uri, config
socket.on 'connect', fetchAndUpload
socket.on 'update', fetchAndUpload
socket.on 'insert', fetchAndUpload
socket.on 'delete', fetchAndUpload
Auth.onUser ->
makeTables()
#check for refresh
checkRefresh()
fetchNewData()
ndx.app.get null, '/rest/endpoints', (req, res, next) ->
if isOnline()
original.$get '/rest/endpoints', req.data
.then (response) ->
LocalSettings.setGlobal 'endpoints', response.data
endpoints = response.data
console.log 'endpoints', endpoints
makeEndpointRoutes()
makeTables()
checkRefresh()
fetchAndUpload()
res.json response.data
, ->
endpoints = LocalSettings.getGlobal 'endpoints'
if endpoints
makeEndpointRoutes()
makeTables()
res.json endpoints
else
res.json {}
else
endpoints = LocalSettings.getGlobal 'endpoints'
makeEndpointRoutes()
makeTables()
res.json endpoints
ndx.app.post null, '/api/refresh-login', (req, res, next) ->
if isOnline()
original.$post '/api/refresh-login', req.data
.then (response) ->
if response.status is 200
globalUsers = LocalSettings.getGlobal('users') or {}
globalUsers[response.data[autoId]] = response.data
LocalSettings.setGlobal 'users', globalUsers
LocalSettings.setGlobal 'loggedInUser',
user: response.data
until: new Date().valueOf() + (5 * 60 * 60 * 1000)
res.json response.data
else
res.status(response.status).json response.data
, ->
loggedInUser = LocalSettings.getGlobal 'loggedInUser'
if loggedInUser and loggedInUser.until and loggedInUser.until > new Date().valueOf()
loggedInUser.until = new Date().valueOf() + (5 * 60 * 60 * 1000)
LocalSettings.setGlobal 'loggedInUser', loggedInUser
res.json loggedInUser.user
else
res.status(401).json {}
else
loggedInUser = LocalSettings.getGlobal 'loggedInUser'
if loggedInUser and loggedInUser.until and loggedInUser.until > new Date().valueOf()
loggedInUser.until = new Date().valueOf() + (5 * 60 * 60 * 1000)
LocalSettings.setGlobal 'loggedInUser', loggedInUser
res.json loggedInUser.user
else
res.status(401).json {}
ndx.app.get null, '/api/logout', (req, res, next) ->
LocalSettings.setGlobal 'loggedInUser', null
original.$get req.uri, req.data
.then ->
true
, ->
false
res.end 'OK'
ndx.app.post null, '/api/login', (req, res, next) ->
original.$post req.uri, req.body
.then (response) ->
res.json response.data
, (err) ->
if err.status is 401
res.reject err
else
users = LocalSettings.getGlobal 'users'
user = null
for key of users
user = users[key]
if user.local?.email?.toLowerCase() is req.body.email?.toLowerCase()
break
if user
if dcodeIO.bcrypt.compareSync req.body.password, PI:PASSWORD:<PASSWORD>END_PI
LocalSettings.setGlobal 'loggedInUser',
user: user
until: new Date().valueOf() + (5 * 60 * 60 * 1000)
res.json user
else
res.reject err
else
res.reject err
setOffline: (val) ->
offline = val
LocalSettings.setGlobal 'offline', offline
isOnline: isOnline
original: original
config: (_config) ->
config = _config
.run (Server) ->
Server.setOffline false
|
[
{
"context": "]': [\n {type: 'separator'}\n {\n label: 'Juno',\n submenu: [\n {label: 'Run Block', c",
"end": 117,
"score": 0.8369765281677246,
"start": 113,
"tag": "NAME",
"value": "Juno"
},
{
"context": "y': [\n {type: 'separator'}\n {\n label: 'Ju... | menus/julia-client.cson | blegat/atom-julia-client | 0 | 'context-menu':
'atom-text-editor[data-grammar="source julia"]': [
{type: 'separator'}
{
label: 'Juno',
submenu: [
{label: 'Run Block', command: 'julia-client:run-block'}
{label: 'Select Block', command: 'julia-client:select-block'}
{type: 'separator'}
{label: 'Go to Definition', command: 'julia-client:goto-symbol'}
{label: 'Show Documentation', command: 'julia-client:show-documentation'}
{label: 'Format Code', command: 'julia-client:format-code'}
{type: 'separator'}
{label: 'Toggle Breakpoint', command: 'julia-debug:toggle-breakpoint'}
{label: 'Toggle Conditional Breakpoint', command: 'julia-debug:toggle-conditional-breakpoint'}
]
}
{type: 'separator'}
]
'.tree-view li.directory': [
{type: 'separator'}
{
label: 'Juno',
submenu: [
{ label: 'Work in Folder', command: 'julia-client:work-in-current-folder' }
{ label: 'Activate Environment in Folder', command: 'julia-client:activate-environment-in-current-folder' }
{ label: 'New Terminal from Folder', command: 'julia-client:new-terminal-from-current-folder'}
]
}
{type: 'separator'}
]
| 225706 | 'context-menu':
'atom-text-editor[data-grammar="source julia"]': [
{type: 'separator'}
{
label: '<NAME>',
submenu: [
{label: 'Run Block', command: 'julia-client:run-block'}
{label: 'Select Block', command: 'julia-client:select-block'}
{type: 'separator'}
{label: 'Go to Definition', command: 'julia-client:goto-symbol'}
{label: 'Show Documentation', command: 'julia-client:show-documentation'}
{label: 'Format Code', command: 'julia-client:format-code'}
{type: 'separator'}
{label: 'Toggle Breakpoint', command: 'julia-debug:toggle-breakpoint'}
{label: 'Toggle Conditional Breakpoint', command: 'julia-debug:toggle-conditional-breakpoint'}
]
}
{type: 'separator'}
]
'.tree-view li.directory': [
{type: 'separator'}
{
label: '<NAME>',
submenu: [
{ label: 'Work in Folder', command: 'julia-client:work-in-current-folder' }
{ label: 'Activate Environment in Folder', command: 'julia-client:activate-environment-in-current-folder' }
{ label: 'New Terminal from Folder', command: 'julia-client:new-terminal-from-current-folder'}
]
}
{type: 'separator'}
]
| true | 'context-menu':
'atom-text-editor[data-grammar="source julia"]': [
{type: 'separator'}
{
label: 'PI:NAME:<NAME>END_PI',
submenu: [
{label: 'Run Block', command: 'julia-client:run-block'}
{label: 'Select Block', command: 'julia-client:select-block'}
{type: 'separator'}
{label: 'Go to Definition', command: 'julia-client:goto-symbol'}
{label: 'Show Documentation', command: 'julia-client:show-documentation'}
{label: 'Format Code', command: 'julia-client:format-code'}
{type: 'separator'}
{label: 'Toggle Breakpoint', command: 'julia-debug:toggle-breakpoint'}
{label: 'Toggle Conditional Breakpoint', command: 'julia-debug:toggle-conditional-breakpoint'}
]
}
{type: 'separator'}
]
'.tree-view li.directory': [
{type: 'separator'}
{
label: 'PI:NAME:<NAME>END_PI',
submenu: [
{ label: 'Work in Folder', command: 'julia-client:work-in-current-folder' }
{ label: 'Activate Environment in Folder', command: 'julia-client:activate-environment-in-current-folder' }
{ label: 'New Terminal from Folder', command: 'julia-client:new-terminal-from-current-folder'}
]
}
{type: 'separator'}
]
|
[
{
"context": "ivate variables / constants\n #\n\n CACHE_KEY = \"catalogCache\"\n EXPIRE_MS = 12 * 60 * 60 * 1000 # 12 hours\n ",
"end": 1017,
"score": 0.9603187441825867,
"start": 1005,
"tag": "KEY",
"value": "catalogCache"
},
{
"context": "= 12 * 60 * 60 * 1000 # 12 hours\n ... | src/firmware/catalogcache.coffee | kimushu/rubic-chrome | 1 | "use strict"
# Pre dependencies
require("util/primitive")
require("util/map2json")
###*
@class CatalogCache
Catalog cache manager
###
module.exports = class CatalogCache
null
#--------------------------------------------------------------------------------
# Public properties
#
###*
@property {number} lastModified
Timestamp of last modified date (in milliseconds from epoch, UTC)
@readonly
###
@property("lastModified", get: -> @_lastModified)
###*
@property {number} lastFetched
Timestamp of last fetched date (in milliseconds from epoch, UTC)
@readonly
###
@property("lastFetched", get: -> @_lastFetched)
###*
@property {string[]} boards
Array of board IDs
@readonly
###
@property("boards", get: ->
result = []
@_boards.forEach((value, key) =>
result.push(key)
)
return result
)
#--------------------------------------------------------------------------------
# Private variables / constants
#
CACHE_KEY = "catalogCache"
EXPIRE_MS = 12 * 60 * 60 * 1000 # 12 hours
ROOT_OWNER = "kimushu"
ROOT_REPO = "rubic-catalog"
ROOT_BRANCH = "master"
ROOT_PATH = "catalog.json"
#--------------------------------------------------------------------------------
# Public methods
#
###*
@static
@method
Generate instance by loading data from cache
@return {Promise}
Promise object
@return {CatalogCache} return.PromiseValue
Generated instance
###
@load: ->
return Promise.resolve(
).then(=>
return Preferences.get(CACHE_KEY)
).then((values) =>
return new this(values[CACHE_KEY])
)
###*
@method
Get board cache data
@param {string} boardId
Board ID
@return {Object}
JSON object
###
getData: (boardId) ->
return @_boards.get("#{boardId}")
###*
@method
Update cache contents
@param {boolean} [force=false]
Force update (Ignore timestamp)
@param {number} [timeout=Infinity]
Timeout in milliseconds
@return {Promise}
Promise object
@return {boolean} updated
Result (true:yes, false:skipped)
###
update: (force = false, timeout = Infinity) ->
now = Date.now()
return Promise.resolve(false) if !force and now < ((@_lastFetched or 0) + EXPIRE_MS)
return Promise.resolve(
).then(=>
return Preferences.get({confirm_net: true})
).then((values) =>
return unless values.confirm_net
return new Promise((resolve, reject) =>
global.bootbox.dialog({
title: I18n.getMessage("Refresh_catalog")
message: I18n.translateText("""
{Confirm_before_catalog_update}
<div class="checkbox">
<label><input type="checkbox" class="bootbox-input">{Do_not_ask_again}</label>
</div>
""")
inputType: "checkbox"
closeButton: false
buttons: {
cancel: {
label: I18n.getMessage("Cancel")
className: "btn-default"
callback: -> # thin arrow
reject(Error("Cancelled"))
}
confirm: {
label: I18n.getMessage("OK")
className: "btn-primary"
callback: -> # thin arrow
resolve(this.find("input.bootbox-input").prop("checked"))
}
}
})
).then((always) =>
return Preferences.set({confirm_net: false}) if always
) # return new Promise().then()
).then(=>
root = new GitHubFetcher(ROOT_OWNER, ROOT_REPO, ROOT_BRANCH)
obj = null
boards = new Map()
return Promise.resolve(
).then(=>
return root.getAsJSON(ROOT_PATH)
).then((data) =>
obj = data
return (obj.boards or []).reduce(
(promise, b) =>
v = b[1]
boards.set("#{b[0]}", v)
if v.content?
v.lastFetched = now
return promise
return promise unless v.path?
now2 = Date.now()
return promise.then(=>
fetch = new GitHubFetcher(
v.owner or ROOT_OWNER
v.repo or ROOT_REPO
v.branch or ROOT_BRANCH
)
return fetch.getAsJSON(v.path)
).then((data) =>
v.content = data
v.content.lastFetched = now2
return
)
Promise.resolve()
)
).timeout(timeout).then(=>
@_lastModified = obj.lastModified
@_lastFetched = now
@_boards = boards
return @_store()
).then(=>
return true # Last PromiseValue
) # return Promise.resolve().then()...
) # return Promise.resolve().then()...
#--------------------------------------------------------------------------------
# Private methods
#
###*
@private
@method constructor
Constructor of CatalogCache class
@param {Object} [obj={}]
JSON object
###
constructor: (obj = {}) ->
@_lastModified = parseInt(obj.lastModified or 0)
@_lastFetched = parseInt(obj.lastFetched or 0)
@_boards = Map.fromJSON(obj.boards)
return
###*
@private
@method
Store data to cache
@return {Promise}
Promise object
###
_store: ->
obj = {
lastModified: @_lastModified
lastFetched: @_lastFetched
boards: @_boards.toJSON()
}
return Preferences.set({"#{CACHE_KEY}": obj})
# Post dependencies
Preferences = require("app/preferences")
GitHubFetcher = require("util/githubfetcher")
I18n = require("util/i18n")
| 4842 | "use strict"
# Pre dependencies
require("util/primitive")
require("util/map2json")
###*
@class CatalogCache
Catalog cache manager
###
module.exports = class CatalogCache
null
#--------------------------------------------------------------------------------
# Public properties
#
###*
@property {number} lastModified
Timestamp of last modified date (in milliseconds from epoch, UTC)
@readonly
###
@property("lastModified", get: -> @_lastModified)
###*
@property {number} lastFetched
Timestamp of last fetched date (in milliseconds from epoch, UTC)
@readonly
###
@property("lastFetched", get: -> @_lastFetched)
###*
@property {string[]} boards
Array of board IDs
@readonly
###
@property("boards", get: ->
result = []
@_boards.forEach((value, key) =>
result.push(key)
)
return result
)
#--------------------------------------------------------------------------------
# Private variables / constants
#
CACHE_KEY = "<KEY>"
EXPIRE_MS = 12 * 60 * 60 * 1000 # 12 hours
ROOT_OWNER = "kimushu"
ROOT_REPO = "rubic-catalog"
ROOT_BRANCH = "master"
ROOT_PATH = "catalog.json"
#--------------------------------------------------------------------------------
# Public methods
#
###*
@static
@method
Generate instance by loading data from cache
@return {Promise}
Promise object
@return {CatalogCache} return.PromiseValue
Generated instance
###
@load: ->
return Promise.resolve(
).then(=>
return Preferences.get(CACHE_KEY)
).then((values) =>
return new this(values[CACHE_KEY])
)
###*
@method
Get board cache data
@param {string} boardId
Board ID
@return {Object}
JSON object
###
getData: (boardId) ->
return @_boards.get("#{boardId}")
###*
@method
Update cache contents
@param {boolean} [force=false]
Force update (Ignore timestamp)
@param {number} [timeout=Infinity]
Timeout in milliseconds
@return {Promise}
Promise object
@return {boolean} updated
Result (true:yes, false:skipped)
###
update: (force = false, timeout = Infinity) ->
now = Date.now()
return Promise.resolve(false) if !force and now < ((@_lastFetched or 0) + EXPIRE_MS)
return Promise.resolve(
).then(=>
return Preferences.get({confirm_net: true})
).then((values) =>
return unless values.confirm_net
return new Promise((resolve, reject) =>
global.bootbox.dialog({
title: I18n.getMessage("Refresh_catalog")
message: I18n.translateText("""
{Confirm_before_catalog_update}
<div class="checkbox">
<label><input type="checkbox" class="bootbox-input">{Do_not_ask_again}</label>
</div>
""")
inputType: "checkbox"
closeButton: false
buttons: {
cancel: {
label: I18n.getMessage("Cancel")
className: "btn-default"
callback: -> # thin arrow
reject(Error("Cancelled"))
}
confirm: {
label: I18n.getMessage("OK")
className: "btn-primary"
callback: -> # thin arrow
resolve(this.find("input.bootbox-input").prop("checked"))
}
}
})
).then((always) =>
return Preferences.set({confirm_net: false}) if always
) # return new Promise().then()
).then(=>
root = new GitHubFetcher(ROOT_OWNER, ROOT_REPO, ROOT_BRANCH)
obj = null
boards = new Map()
return Promise.resolve(
).then(=>
return root.getAsJSON(ROOT_PATH)
).then((data) =>
obj = data
return (obj.boards or []).reduce(
(promise, b) =>
v = b[1]
boards.set("#{b[0]}", v)
if v.content?
v.lastFetched = now
return promise
return promise unless v.path?
now2 = Date.now()
return promise.then(=>
fetch = new GitHubFetcher(
v.owner or ROOT_OWNER
v.repo or ROOT_REPO
v.branch or ROOT_BRANCH
)
return fetch.getAsJSON(v.path)
).then((data) =>
v.content = data
v.content.lastFetched = now2
return
)
Promise.resolve()
)
).timeout(timeout).then(=>
@_lastModified = obj.lastModified
@_lastFetched = now
@_boards = boards
return @_store()
).then(=>
return true # Last PromiseValue
) # return Promise.resolve().then()...
) # return Promise.resolve().then()...
#--------------------------------------------------------------------------------
# Private methods
#
###*
@private
@method constructor
Constructor of CatalogCache class
@param {Object} [obj={}]
JSON object
###
constructor: (obj = {}) ->
@_lastModified = parseInt(obj.lastModified or 0)
@_lastFetched = parseInt(obj.lastFetched or 0)
@_boards = Map.fromJSON(obj.boards)
return
###*
@private
@method
Store data to cache
@return {Promise}
Promise object
###
_store: ->
obj = {
lastModified: @_lastModified
lastFetched: @_lastFetched
boards: @_boards.toJSON()
}
return Preferences.set({"#{CACHE_KEY}": obj})
# Post dependencies
Preferences = require("app/preferences")
GitHubFetcher = require("util/githubfetcher")
I18n = require("util/i18n")
| true | "use strict"
# Pre dependencies
require("util/primitive")
require("util/map2json")
###*
@class CatalogCache
Catalog cache manager
###
module.exports = class CatalogCache
null
#--------------------------------------------------------------------------------
# Public properties
#
###*
@property {number} lastModified
Timestamp of last modified date (in milliseconds from epoch, UTC)
@readonly
###
@property("lastModified", get: -> @_lastModified)
###*
@property {number} lastFetched
Timestamp of last fetched date (in milliseconds from epoch, UTC)
@readonly
###
@property("lastFetched", get: -> @_lastFetched)
###*
@property {string[]} boards
Array of board IDs
@readonly
###
@property("boards", get: ->
result = []
@_boards.forEach((value, key) =>
result.push(key)
)
return result
)
#--------------------------------------------------------------------------------
# Private variables / constants
#
CACHE_KEY = "PI:KEY:<KEY>END_PI"
EXPIRE_MS = 12 * 60 * 60 * 1000 # 12 hours
ROOT_OWNER = "kimushu"
ROOT_REPO = "rubic-catalog"
ROOT_BRANCH = "master"
ROOT_PATH = "catalog.json"
#--------------------------------------------------------------------------------
# Public methods
#
###*
@static
@method
Generate instance by loading data from cache
@return {Promise}
Promise object
@return {CatalogCache} return.PromiseValue
Generated instance
###
@load: ->
return Promise.resolve(
).then(=>
return Preferences.get(CACHE_KEY)
).then((values) =>
return new this(values[CACHE_KEY])
)
###*
@method
Get board cache data
@param {string} boardId
Board ID
@return {Object}
JSON object
###
getData: (boardId) ->
return @_boards.get("#{boardId}")
###*
@method
Update cache contents
@param {boolean} [force=false]
Force update (Ignore timestamp)
@param {number} [timeout=Infinity]
Timeout in milliseconds
@return {Promise}
Promise object
@return {boolean} updated
Result (true:yes, false:skipped)
###
update: (force = false, timeout = Infinity) ->
now = Date.now()
return Promise.resolve(false) if !force and now < ((@_lastFetched or 0) + EXPIRE_MS)
return Promise.resolve(
).then(=>
return Preferences.get({confirm_net: true})
).then((values) =>
return unless values.confirm_net
return new Promise((resolve, reject) =>
global.bootbox.dialog({
title: I18n.getMessage("Refresh_catalog")
message: I18n.translateText("""
{Confirm_before_catalog_update}
<div class="checkbox">
<label><input type="checkbox" class="bootbox-input">{Do_not_ask_again}</label>
</div>
""")
inputType: "checkbox"
closeButton: false
buttons: {
cancel: {
label: I18n.getMessage("Cancel")
className: "btn-default"
callback: -> # thin arrow
reject(Error("Cancelled"))
}
confirm: {
label: I18n.getMessage("OK")
className: "btn-primary"
callback: -> # thin arrow
resolve(this.find("input.bootbox-input").prop("checked"))
}
}
})
).then((always) =>
return Preferences.set({confirm_net: false}) if always
) # return new Promise().then()
).then(=>
root = new GitHubFetcher(ROOT_OWNER, ROOT_REPO, ROOT_BRANCH)
obj = null
boards = new Map()
return Promise.resolve(
).then(=>
return root.getAsJSON(ROOT_PATH)
).then((data) =>
obj = data
return (obj.boards or []).reduce(
(promise, b) =>
v = b[1]
boards.set("#{b[0]}", v)
if v.content?
v.lastFetched = now
return promise
return promise unless v.path?
now2 = Date.now()
return promise.then(=>
fetch = new GitHubFetcher(
v.owner or ROOT_OWNER
v.repo or ROOT_REPO
v.branch or ROOT_BRANCH
)
return fetch.getAsJSON(v.path)
).then((data) =>
v.content = data
v.content.lastFetched = now2
return
)
Promise.resolve()
)
).timeout(timeout).then(=>
@_lastModified = obj.lastModified
@_lastFetched = now
@_boards = boards
return @_store()
).then(=>
return true # Last PromiseValue
) # return Promise.resolve().then()...
) # return Promise.resolve().then()...
#--------------------------------------------------------------------------------
# Private methods
#
###*
@private
@method constructor
Constructor of CatalogCache class
@param {Object} [obj={}]
JSON object
###
constructor: (obj = {}) ->
@_lastModified = parseInt(obj.lastModified or 0)
@_lastFetched = parseInt(obj.lastFetched or 0)
@_boards = Map.fromJSON(obj.boards)
return
###*
@private
@method
Store data to cache
@return {Promise}
Promise object
###
_store: ->
obj = {
lastModified: @_lastModified
lastFetched: @_lastFetched
boards: @_boards.toJSON()
}
return Preferences.set({"#{CACHE_KEY}": obj})
# Post dependencies
Preferences = require("app/preferences")
GitHubFetcher = require("util/githubfetcher")
I18n = require("util/i18n")
|
[
{
"context": "te.timeline.helpers\n posts: [\n {\n user: 'John Doe'\n action: 'posted a status'\n content: '",
"end": 64,
"score": 0.9985657930374146,
"start": 56,
"tag": "NAME",
"value": "John Doe"
},
{
"context": "\n content: 'First!'\n }\n {\n us... | app/client/views/timeline/timeline.coffee | CollabSpace/GitSocial | 1 | Template.timeline.helpers
posts: [
{
user: 'John Doe'
action: 'posted a status'
content: 'First!'
}
{
user: 'Jack White'
action: 'posted a status'
content: 'Hello World!'
}
]
| 97642 | Template.timeline.helpers
posts: [
{
user: '<NAME>'
action: 'posted a status'
content: 'First!'
}
{
user: '<NAME>'
action: 'posted a status'
content: 'Hello World!'
}
]
| true | Template.timeline.helpers
posts: [
{
user: 'PI:NAME:<NAME>END_PI'
action: 'posted a status'
content: 'First!'
}
{
user: 'PI:NAME:<NAME>END_PI'
action: 'posted a status'
content: 'Hello World!'
}
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.