entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "he color formats conversion\n#\n# Copyright (C) 2011 Nikolay Nemshilov\n#\n\n\n#\n# converts a hex string into an rgb array\n#",
"end": 108,
"score": 0.9998915791511536,
"start": 91,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | stl/fx/src/colors.coffee | lovely-io/lovely.io-stl | 2 | #
# This file contains utils to handle the color formats conversion
#
# Copyright (C) 2011 Nikolay Nemshilov
#
#
# converts a hex string into an rgb array
#
# @param {String} a hex color
# @param {Boolean} flag if need an array
# @return {String} rgb(R,G,B) or Array [R,G,B]
#
to_rgb = (color, in_array)->
match = /#([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})/i.exec(to_hex(color)||'');
if match
match = (parseInt(bit,16) for bit in match.slice(1))
match = if in_array then match else 'rgb('+match+')'
return match
#
# converts a #XXX or rgb(X, X, X) sring into standard #XXXXXX color string
#
# @param {String} color in other formats
# @return {String} hex color
#
to_hex = (color)->
match = /^#(\w)(\w)(\w)$/.exec(color)
if match
match = "#"+ match[1]+match[1]+match[2]+match[2]+match[3]+match[3]
else if match = /^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/.exec(color)
value = match.slice(1)
match = "#"
for bit in value
bit = (bit-0).toString(16)
match += if bit.length is 1 then '0'+bit else bit
else
match = COLORS[color] || color
return match
# a bunch of standard colors map for old browsers
COLORS =
maroon: '#800000'
red: '#ff0000'
orange: '#ffA500'
yellow: '#ffff00'
olive: '#808000'
purple: '#800080'
fuchsia: '#ff00ff'
white: '#ffffff'
lime: '#00ff00'
green: '#008000'
navy: '#000080'
blue: '#0000ff'
aqua: '#00ffff'
teal: '#008080'
black: '#000000'
silver: '#c0c0c0'
gray: '#808080'
brown: '#a52a2a'
| 142993 | #
# This file contains utils to handle the color formats conversion
#
# Copyright (C) 2011 <NAME>
#
#
# converts a hex string into an rgb array
#
# @param {String} a hex color
# @param {Boolean} flag if need an array
# @return {String} rgb(R,G,B) or Array [R,G,B]
#
to_rgb = (color, in_array)->
match = /#([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})/i.exec(to_hex(color)||'');
if match
match = (parseInt(bit,16) for bit in match.slice(1))
match = if in_array then match else 'rgb('+match+')'
return match
#
# converts a #XXX or rgb(X, X, X) sring into standard #XXXXXX color string
#
# @param {String} color in other formats
# @return {String} hex color
#
to_hex = (color)->
match = /^#(\w)(\w)(\w)$/.exec(color)
if match
match = "#"+ match[1]+match[1]+match[2]+match[2]+match[3]+match[3]
else if match = /^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/.exec(color)
value = match.slice(1)
match = "#"
for bit in value
bit = (bit-0).toString(16)
match += if bit.length is 1 then '0'+bit else bit
else
match = COLORS[color] || color
return match
# a bunch of standard colors map for old browsers
COLORS =
maroon: '#800000'
red: '#ff0000'
orange: '#ffA500'
yellow: '#ffff00'
olive: '#808000'
purple: '#800080'
fuchsia: '#ff00ff'
white: '#ffffff'
lime: '#00ff00'
green: '#008000'
navy: '#000080'
blue: '#0000ff'
aqua: '#00ffff'
teal: '#008080'
black: '#000000'
silver: '#c0c0c0'
gray: '#808080'
brown: '#a52a2a'
| true | #
# This file contains utils to handle the color formats conversion
#
# Copyright (C) 2011 PI:NAME:<NAME>END_PI
#
#
# converts a hex string into an rgb array
#
# @param {String} a hex color
# @param {Boolean} flag if need an array
# @return {String} rgb(R,G,B) or Array [R,G,B]
#
to_rgb = (color, in_array)->
match = /#([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})/i.exec(to_hex(color)||'');
if match
match = (parseInt(bit,16) for bit in match.slice(1))
match = if in_array then match else 'rgb('+match+')'
return match
#
# converts a #XXX or rgb(X, X, X) sring into standard #XXXXXX color string
#
# @param {String} color in other formats
# @return {String} hex color
#
to_hex = (color)->
match = /^#(\w)(\w)(\w)$/.exec(color)
if match
match = "#"+ match[1]+match[1]+match[2]+match[2]+match[3]+match[3]
else if match = /^rgb\((\d+),\s*(\d+),\s*(\d+)\)$/.exec(color)
value = match.slice(1)
match = "#"
for bit in value
bit = (bit-0).toString(16)
match += if bit.length is 1 then '0'+bit else bit
else
match = COLORS[color] || color
return match
# a bunch of standard colors map for old browsers
COLORS =
maroon: '#800000'
red: '#ff0000'
orange: '#ffA500'
yellow: '#ffff00'
olive: '#808000'
purple: '#800080'
fuchsia: '#ff00ff'
white: '#ffffff'
lime: '#00ff00'
green: '#008000'
navy: '#000080'
blue: '#0000ff'
aqua: '#00ffff'
teal: '#008080'
black: '#000000'
silver: '#c0c0c0'
gray: '#808080'
brown: '#a52a2a'
|
[
{
"context": "pEqual store.info('nada'), {\n in: -1, key:'nada', value:undefined, overridden:false\n }\n\n ",
"end": 1870,
"score": 0.8592516779899597,
"start": 1866,
"tag": "KEY",
"value": "nada"
},
{
"context": "qual store.info('nada', 5), {\n in: 5, key:'nada... | test/lib/test.coffee | elidoran/node-value-store | 1 | assert = require 'assert'
corepath = require 'path'
buildStore = require '../../lib'
helperFile = (name) -> corepath.resolve __dirname, '..', 'helpers', name
describe 'test value store', ->
describe 'with bad arguments', ->
store = buildStore()
it 'to source() should return undefined', ->
assert.equal store.source(-1), undefined
assert.equal store.source(), undefined
assert.equal store.source(0), undefined
assert.equal store.source(99), undefined
it 'to get() should return undefined', ->
assert.equal store.get(), undefined
assert.equal store.get('nada'), undefined
assert.equal store.get(0), undefined
assert.equal store.get('nada', 5), undefined
assert.equal store.get(0,2), undefined
assert.equal store.get(0.1), undefined
assert.equal store.get(false), undefined
assert.equal store.get(true), undefined
it 'to has() should return false', ->
assert.equal store.has(), false
assert.equal store.has('nada'), false
assert.equal store.has(0), false
assert.equal store.has(0.1), false
assert.equal store.has(false), false
assert.equal store.has(true), false
assert.equal store.has('nada', 5), false
assert.equal store.has(0,2), false
it 'to in() should return -1', ->
assert.equal store.in(), -1
assert.equal store.in('nada'), -1
assert.equal store.in(0), -1
assert.equal store.in(0.1), -1
assert.equal store.in(false), -1
assert.equal store.in(true), -1
assert.equal store.in('nada', 5), -1
assert.equal store.in(0,2), -1
it 'to info() should return undefined type results', ->
assert.deepEqual store.info(), {
in: -1, key:undefined, value:undefined, overridden:false
}
assert.deepEqual store.info('nada'), {
in: -1, key:'nada', value:undefined, overridden:false
}
assert.deepEqual store.info(0), {
in: -1, key:0, value:undefined, overridden:false
}
assert.deepEqual store.info(0.1), {
in: -1, key:0.1, value:undefined, overridden:false
}
assert.deepEqual store.info(false), {
in: -1, key:false, value:undefined, overridden:false
}
assert.deepEqual store.info(true), {
in: -1, key:true, value:undefined, overridden:false
}
assert.deepEqual store.info('nada', 5), {
in: 5, key:'nada', value:undefined, overridden:false
}
assert.deepEqual store.info(0,2), {
in: 2, key:0, value:undefined, overridden:false
}
it 'to all() should return empty array', ->
empty = []
assert.deepEqual store.all(), empty
assert.deepEqual store.all('nada'), empty
assert.deepEqual store.all(0), empty
assert.deepEqual store.all(0.1), empty
assert.deepEqual store.all(false), empty
assert.deepEqual store.all(true), empty
it 'to add() should return error', ->
assert.deepEqual store.add(), error:'Invalid index: 0'
assert.deepEqual store.add('key'), error:'Invalid index: 0'
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
assert.deepEqual store.add('key', 'value', -1), error:'Invalid index: -1'
assert.deepEqual store.add('key', 'value', 99), error:'Invalid index: 99'
it 'to remove() should return error', ->
assert.deepEqual store.remove(), error:'Invalid index: 0'
assert.deepEqual store.remove('key', -1), error:'Invalid index: -1'
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
it 'to set() should return error', ->
assert.deepEqual store.set(), error:'Invalid index: 0'
assert.deepEqual store.set('key'), error:'Invalid index: 0'
assert.deepEqual store.set('key', 'value'), error:'Invalid index: 0'
assert.deepEqual store.set('key', 'value', -1), error:'Invalid index: -1'
assert.deepEqual store.set('key', 'value', 99), error:'Invalid index: 99'
it 'to append() should return error', ->
assert.deepEqual store.append(), {
error:'Must provide a string or object', value:undefined
}
assert.deepEqual store.append(0), {
error:'Must provide a string or object', value:0
}
assert.deepEqual store.append(0.1), {
error:'Must provide a string or object', value:0.1
}
assert.deepEqual store.append(true), {
error:'Must provide a string or object', value:true
}
assert.deepEqual store.append(false), {
error:'Must provide a string or object', value:false
}
assert.deepEqual store.append('bad'), {
error:'String must be a json or ini file'
}
assert.deepEqual store.append('./nonexistent.json'), {
exists: false,
error:'File doesn\'t exist: ' + corepath.resolve './nonexistent.json'
}
it 'to prepend() should return error', ->
assert.deepEqual store.prepend(), {
error:'Must provide a string or object', value:undefined
}
assert.deepEqual store.prepend(0), {
error:'Must provide a string or object', value:0
}
assert.deepEqual store.prepend(0.1), {
error:'Must provide a string or object', value:0.1
}
assert.deepEqual store.prepend(true), {
error:'Must provide a string or object', value:true
}
assert.deepEqual store.prepend(false), {
error:'Must provide a string or object', value:false
}
assert.deepEqual store.prepend('bad'), {
error:'String must be a json or ini file'
}
assert.deepEqual store.prepend('./nonexistent.json'), {
exists: false,
error:'File doesn\'t exist: ' + corepath.resolve './nonexistent.json'
}
it 'to shift() should return empty array', ->
empty = removed:[]
assert.deepEqual store.shift(-1), empty
assert.deepEqual store.shift(0), empty
assert.deepEqual store.shift(100), empty
it 'to pop() should return error', ->
empty = removed:[]
assert.deepEqual store.pop(-1), empty
assert.deepEqual store.pop(0), empty
assert.deepEqual store.pop(100), empty
describe 'built with no initial objects', ->
store = buildStore()
it 'should have an empty array', ->
assert store.array
assert.equal store.array.length, 0
it 'should return zero for count', ->
assert.equal store.count(), 0
it 'should return nada for get()', ->
assert.equal store.get('nada'), undefined
it 'should return false for has()', ->
assert.equal store.has('nada'), false
it 'should return -1 for in()', ->
assert.equal store.in('nada'), -1
it 'should return undefined type results for info()', ->
assert.deepEqual store.info('nada'), {
in:-1
key: 'nada'
value: undefined
overridden: false
}
it 'should return [] for all()', ->
assert.deepEqual store.all('nada'), []
it 'should return error for add() because there\'s no object', ->
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
it 'should return error for remove() because there\'s no object', ->
assert.deepEqual store.remove('key'), error:'Invalid index: 0'
it 'should return error for set() because there\'s no object', ->
assert.deepEqual store.set('key', 'value'), error:'Invalid index: 0'
it 'should return true for append(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
assert store.append({appended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 1, 'array should have one source'
assert.equal store.array[0].appended, true
assert.equal store.array[0].__source, 'append'
# reset
store.array.pop()
it 'should return true for append(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
file = helperFile 'empty.json'
assert store.append(file)
assert.equal store.array.length, 1, 'array should have new object'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'append'
}
# reset
store.array.pop()
it 'should return true for prepend(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
assert store.prepend({prepended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 1, 'array should have one source'
assert.equal store.array[0].prepended, true
assert.equal store.array[0].__source, 'prepend'
# reset
store.array.pop()
it 'should return true for prepend(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
file = helperFile 'empty.json'
assert store.prepend(file)
assert.equal store.array.length, 1, 'array should have new object'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'prepend'
}
# reset
store.array.pop()
it 'should return removed:[] for shift()', ->
assert.deepEqual store.shift(), removed:[]
it 'should return removed:[] for pop()', ->
assert.deepEqual store.pop(), removed:[]
describe 'built with initial empty object', ->
store = buildStore [{}]
it 'should return one for count', ->
assert.equal store.count(), 1
it 'should return nada for get()', ->
assert.equal store.get('nada'), undefined
it 'should return false for has()', ->
assert.equal store.has('nada'), false
it 'should return -1 for in()', ->
assert.equal store.in('nada'), -1
it 'should return undefined type results for info()', ->
assert.deepEqual store.info('nada'), {
in:-1
key: 'nada'
value: undefined
overridden: false
}
it 'should return [] for all()', ->
assert.deepEqual store.all('nada'), []
it 'should return true for add() of new value', ->
assert.deepEqual store.add(), error:'No key specified'
assert.deepEqual store.add('key'), error:'No value specified'
assert.deepEqual store.add('new', 'value'), addedTo:undefined
assert.equal store.array[0].new, 'value'
it 'should return true for add() to existing value', ->
assert.deepEqual store.add('new', 'value2'), addedTo:'value'
assert.deepEqual store.array[0].new, [ 'value', 'value2' ]
# reset
delete store.array[0].new
it 'should return true for remove()', ->
store.array[0].out = 'remove'
assert.deepEqual store.remove('out'), removed:'remove'
assert.equal store.array[0].out, undefined, 'should have removed it'
it 'should return true for set() initial', ->
assert.equal store.array[0].over, undefined, 'should not be an "over" value'
assert.deepEqual store.set('over', 'value'), replaced:undefined
assert.equal store.array[0].over, 'value'
# reset
delete store.array[0].over
it 'should return true for set() overwrite', ->
store.array[0].over = 'value'
assert.equal store.array[0].over, 'value', 'should be a value to overwrite'
assert.deepEqual store.set('over', 'value2'), replaced:'value'
assert.equal store.array[0].over, 'value2'
# reset
delete store.array[0].over
it 'should return true for append(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
assert store.append({appended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 2, 'array should have two'
assert.equal store.array[0].__source, 'constructor'
assert.equal store.array[1].appended, true
assert.equal store.array[1].__source, 'append'
# reset
store.array.pop()
it 'should return true for append(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
file = helperFile 'empty.json'
assert store.append(file)
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[0].__source, 'constructor'
assert.deepEqual store.array[1].__source, {
file: file
format: 'json'
fn: 'append'
}
# reset
store.array.pop()
it 'should return true for prepend(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
assert store.prepend({prepended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[0].prepended, true
assert.deepEqual store.array[0].__source, 'prepend'
assert.deepEqual store.array[1].__source, 'constructor'
# reset
store.array.shift()
it 'should return true for prepend(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
file = helperFile 'empty.json'
assert store.prepend(file)
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[1].__source, 'constructor'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'prepend'
}
# reset
store.array.shift()
it 'should return object in `removed` for shift()', ->
hold = store.array[0]
assert.deepEqual store.shift(), removed:[hold]
# reset
store.array.push hold
it 'should return object in `removed` for pop()', ->
hold = store.array[0]
assert.deepEqual store.pop(), removed:[hold]
# reset
store.array.push hold
describe 'built with initial empty object', ->
store = buildStore [{}, {}, {}]
it 'should return one for count', ->
assert.equal store.count(), 3
it 'should return objects in `removed` for shift()', ->
array = [ {}, {}, {} ]
shifted = store.shift 3
assert.deepEqual shifted, removed:array
# reset
store.array.unshift.apply store.array, shifted.removed
it 'should return objects in `removed` for pop()', ->
array = [ {}, {}, {} ]
popped = store.pop 3
assert.deepEqual popped, removed:array
# reset
store.array.push.apply store.array, popped.removed
| 120120 | assert = require 'assert'
corepath = require 'path'
buildStore = require '../../lib'
helperFile = (name) -> corepath.resolve __dirname, '..', 'helpers', name
describe 'test value store', ->
describe 'with bad arguments', ->
store = buildStore()
it 'to source() should return undefined', ->
assert.equal store.source(-1), undefined
assert.equal store.source(), undefined
assert.equal store.source(0), undefined
assert.equal store.source(99), undefined
it 'to get() should return undefined', ->
assert.equal store.get(), undefined
assert.equal store.get('nada'), undefined
assert.equal store.get(0), undefined
assert.equal store.get('nada', 5), undefined
assert.equal store.get(0,2), undefined
assert.equal store.get(0.1), undefined
assert.equal store.get(false), undefined
assert.equal store.get(true), undefined
it 'to has() should return false', ->
assert.equal store.has(), false
assert.equal store.has('nada'), false
assert.equal store.has(0), false
assert.equal store.has(0.1), false
assert.equal store.has(false), false
assert.equal store.has(true), false
assert.equal store.has('nada', 5), false
assert.equal store.has(0,2), false
it 'to in() should return -1', ->
assert.equal store.in(), -1
assert.equal store.in('nada'), -1
assert.equal store.in(0), -1
assert.equal store.in(0.1), -1
assert.equal store.in(false), -1
assert.equal store.in(true), -1
assert.equal store.in('nada', 5), -1
assert.equal store.in(0,2), -1
it 'to info() should return undefined type results', ->
assert.deepEqual store.info(), {
in: -1, key:undefined, value:undefined, overridden:false
}
assert.deepEqual store.info('nada'), {
in: -1, key:'<KEY>', value:undefined, overridden:false
}
assert.deepEqual store.info(0), {
in: -1, key:0, value:undefined, overridden:false
}
assert.deepEqual store.info(0.1), {
in: -1, key:0.1, value:undefined, overridden:false
}
assert.deepEqual store.info(false), {
in: -1, key:false, value:undefined, overridden:false
}
assert.deepEqual store.info(true), {
in: -1, key:true, value:undefined, overridden:false
}
assert.deepEqual store.info('nada', 5), {
in: 5, key:'<KEY>', value:undefined, overridden:false
}
assert.deepEqual store.info(0,2), {
in: 2, key:0, value:undefined, overridden:false
}
it 'to all() should return empty array', ->
empty = []
assert.deepEqual store.all(), empty
assert.deepEqual store.all('nada'), empty
assert.deepEqual store.all(0), empty
assert.deepEqual store.all(0.1), empty
assert.deepEqual store.all(false), empty
assert.deepEqual store.all(true), empty
it 'to add() should return error', ->
assert.deepEqual store.add(), error:'Invalid index: 0'
assert.deepEqual store.add('key'), error:'Invalid index: 0'
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
assert.deepEqual store.add('key', 'value', -1), error:'Invalid index: -1'
assert.deepEqual store.add('key', 'value', 99), error:'Invalid index: 99'
it 'to remove() should return error', ->
assert.deepEqual store.remove(), error:'Invalid index: 0'
assert.deepEqual store.remove('key', -1), error:'Invalid index: -1'
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
it 'to set() should return error', ->
assert.deepEqual store.set(), error:'Invalid index: 0'
assert.deepEqual store.set('key'), error:'Invalid index: <KEY>'
assert.deepEqual store.set('key', 'value'), error:'Invalid index: 0'
assert.deepEqual store.set('key', 'value', -1), error:'Invalid index: -1'
assert.deepEqual store.set('key', 'value', 99), error:'Invalid index: 99'
it 'to append() should return error', ->
assert.deepEqual store.append(), {
error:'Must provide a string or object', value:undefined
}
assert.deepEqual store.append(0), {
error:'Must provide a string or object', value:0
}
assert.deepEqual store.append(0.1), {
error:'Must provide a string or object', value:0.1
}
assert.deepEqual store.append(true), {
error:'Must provide a string or object', value:true
}
assert.deepEqual store.append(false), {
error:'Must provide a string or object', value:false
}
assert.deepEqual store.append('bad'), {
error:'String must be a json or ini file'
}
assert.deepEqual store.append('./nonexistent.json'), {
exists: false,
error:'File doesn\'t exist: ' + corepath.resolve './nonexistent.json'
}
it 'to prepend() should return error', ->
assert.deepEqual store.prepend(), {
error:'Must provide a string or object', value:undefined
}
assert.deepEqual store.prepend(0), {
error:'Must provide a string or object', value:0
}
assert.deepEqual store.prepend(0.1), {
error:'Must provide a string or object', value:0.1
}
assert.deepEqual store.prepend(true), {
error:'Must provide a string or object', value:true
}
assert.deepEqual store.prepend(false), {
error:'Must provide a string or object', value:false
}
assert.deepEqual store.prepend('bad'), {
error:'String must be a json or ini file'
}
assert.deepEqual store.prepend('./nonexistent.json'), {
exists: false,
error:'File doesn\'t exist: ' + corepath.resolve './nonexistent.json'
}
it 'to shift() should return empty array', ->
empty = removed:[]
assert.deepEqual store.shift(-1), empty
assert.deepEqual store.shift(0), empty
assert.deepEqual store.shift(100), empty
it 'to pop() should return error', ->
empty = removed:[]
assert.deepEqual store.pop(-1), empty
assert.deepEqual store.pop(0), empty
assert.deepEqual store.pop(100), empty
describe 'built with no initial objects', ->
store = buildStore()
it 'should have an empty array', ->
assert store.array
assert.equal store.array.length, 0
it 'should return zero for count', ->
assert.equal store.count(), 0
it 'should return nada for get()', ->
assert.equal store.get('nada'), undefined
it 'should return false for has()', ->
assert.equal store.has('nada'), false
it 'should return -1 for in()', ->
assert.equal store.in('nada'), -1
it 'should return undefined type results for info()', ->
assert.deepEqual store.info('nada'), {
in:-1
key: '<KEY>'
value: undefined
overridden: false
}
it 'should return [] for all()', ->
assert.deepEqual store.all('nada'), []
it 'should return error for add() because there\'s no object', ->
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
it 'should return error for remove() because there\'s no object', ->
assert.deepEqual store.remove('key'), error:'Invalid index: 0'
it 'should return error for set() because there\'s no object', ->
assert.deepEqual store.set('key', 'value'), error:'Invalid index: 0'
it 'should return true for append(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
assert store.append({appended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 1, 'array should have one source'
assert.equal store.array[0].appended, true
assert.equal store.array[0].__source, 'append'
# reset
store.array.pop()
it 'should return true for append(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
file = helperFile 'empty.json'
assert store.append(file)
assert.equal store.array.length, 1, 'array should have new object'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'append'
}
# reset
store.array.pop()
it 'should return true for prepend(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
assert store.prepend({prepended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 1, 'array should have one source'
assert.equal store.array[0].prepended, true
assert.equal store.array[0].__source, 'prepend'
# reset
store.array.pop()
it 'should return true for prepend(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
file = helperFile 'empty.json'
assert store.prepend(file)
assert.equal store.array.length, 1, 'array should have new object'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'prepend'
}
# reset
store.array.pop()
it 'should return removed:[] for shift()', ->
assert.deepEqual store.shift(), removed:[]
it 'should return removed:[] for pop()', ->
assert.deepEqual store.pop(), removed:[]
describe 'built with initial empty object', ->
store = buildStore [{}]
it 'should return one for count', ->
assert.equal store.count(), 1
it 'should return nada for get()', ->
assert.equal store.get('nada'), undefined
it 'should return false for has()', ->
assert.equal store.has('nada'), false
it 'should return -1 for in()', ->
assert.equal store.in('nada'), -1
it 'should return undefined type results for info()', ->
assert.deepEqual store.info('nada'), {
in:-1
key: '<KEY>'
value: undefined
overridden: false
}
it 'should return [] for all()', ->
assert.deepEqual store.all('nada'), []
it 'should return true for add() of new value', ->
assert.deepEqual store.add(), error:'No key specified'
assert.deepEqual store.add('key'), error:'No value specified'
assert.deepEqual store.add('new', 'value'), addedTo:undefined
assert.equal store.array[0].new, 'value'
it 'should return true for add() to existing value', ->
assert.deepEqual store.add('new', 'value2'), addedTo:'value'
assert.deepEqual store.array[0].new, [ 'value', 'value2' ]
# reset
delete store.array[0].new
it 'should return true for remove()', ->
store.array[0].out = 'remove'
assert.deepEqual store.remove('out'), removed:'remove'
assert.equal store.array[0].out, undefined, 'should have removed it'
it 'should return true for set() initial', ->
assert.equal store.array[0].over, undefined, 'should not be an "over" value'
assert.deepEqual store.set('over', 'value'), replaced:undefined
assert.equal store.array[0].over, 'value'
# reset
delete store.array[0].over
it 'should return true for set() overwrite', ->
store.array[0].over = 'value'
assert.equal store.array[0].over, 'value', 'should be a value to overwrite'
assert.deepEqual store.set('over', 'value2'), replaced:'value'
assert.equal store.array[0].over, 'value2'
# reset
delete store.array[0].over
it 'should return true for append(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
assert store.append({appended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 2, 'array should have two'
assert.equal store.array[0].__source, 'constructor'
assert.equal store.array[1].appended, true
assert.equal store.array[1].__source, 'append'
# reset
store.array.pop()
it 'should return true for append(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
file = helperFile 'empty.json'
assert store.append(file)
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[0].__source, 'constructor'
assert.deepEqual store.array[1].__source, {
file: file
format: 'json'
fn: 'append'
}
# reset
store.array.pop()
it 'should return true for prepend(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
assert store.prepend({prepended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[0].prepended, true
assert.deepEqual store.array[0].__source, 'prepend'
assert.deepEqual store.array[1].__source, 'constructor'
# reset
store.array.shift()
it 'should return true for prepend(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
file = helperFile 'empty.json'
assert store.prepend(file)
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[1].__source, 'constructor'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'prepend'
}
# reset
store.array.shift()
it 'should return object in `removed` for shift()', ->
hold = store.array[0]
assert.deepEqual store.shift(), removed:[hold]
# reset
store.array.push hold
it 'should return object in `removed` for pop()', ->
hold = store.array[0]
assert.deepEqual store.pop(), removed:[hold]
# reset
store.array.push hold
describe 'built with initial empty object', ->
store = buildStore [{}, {}, {}]
it 'should return one for count', ->
assert.equal store.count(), 3
it 'should return objects in `removed` for shift()', ->
array = [ {}, {}, {} ]
shifted = store.shift 3
assert.deepEqual shifted, removed:array
# reset
store.array.unshift.apply store.array, shifted.removed
it 'should return objects in `removed` for pop()', ->
array = [ {}, {}, {} ]
popped = store.pop 3
assert.deepEqual popped, removed:array
# reset
store.array.push.apply store.array, popped.removed
| true | assert = require 'assert'
corepath = require 'path'
buildStore = require '../../lib'
helperFile = (name) -> corepath.resolve __dirname, '..', 'helpers', name
describe 'test value store', ->
describe 'with bad arguments', ->
store = buildStore()
it 'to source() should return undefined', ->
assert.equal store.source(-1), undefined
assert.equal store.source(), undefined
assert.equal store.source(0), undefined
assert.equal store.source(99), undefined
it 'to get() should return undefined', ->
assert.equal store.get(), undefined
assert.equal store.get('nada'), undefined
assert.equal store.get(0), undefined
assert.equal store.get('nada', 5), undefined
assert.equal store.get(0,2), undefined
assert.equal store.get(0.1), undefined
assert.equal store.get(false), undefined
assert.equal store.get(true), undefined
it 'to has() should return false', ->
assert.equal store.has(), false
assert.equal store.has('nada'), false
assert.equal store.has(0), false
assert.equal store.has(0.1), false
assert.equal store.has(false), false
assert.equal store.has(true), false
assert.equal store.has('nada', 5), false
assert.equal store.has(0,2), false
it 'to in() should return -1', ->
assert.equal store.in(), -1
assert.equal store.in('nada'), -1
assert.equal store.in(0), -1
assert.equal store.in(0.1), -1
assert.equal store.in(false), -1
assert.equal store.in(true), -1
assert.equal store.in('nada', 5), -1
assert.equal store.in(0,2), -1
it 'to info() should return undefined type results', ->
assert.deepEqual store.info(), {
in: -1, key:undefined, value:undefined, overridden:false
}
assert.deepEqual store.info('nada'), {
in: -1, key:'PI:KEY:<KEY>END_PI', value:undefined, overridden:false
}
assert.deepEqual store.info(0), {
in: -1, key:0, value:undefined, overridden:false
}
assert.deepEqual store.info(0.1), {
in: -1, key:0.1, value:undefined, overridden:false
}
assert.deepEqual store.info(false), {
in: -1, key:false, value:undefined, overridden:false
}
assert.deepEqual store.info(true), {
in: -1, key:true, value:undefined, overridden:false
}
assert.deepEqual store.info('nada', 5), {
in: 5, key:'PI:KEY:<KEY>END_PI', value:undefined, overridden:false
}
assert.deepEqual store.info(0,2), {
in: 2, key:0, value:undefined, overridden:false
}
it 'to all() should return empty array', ->
empty = []
assert.deepEqual store.all(), empty
assert.deepEqual store.all('nada'), empty
assert.deepEqual store.all(0), empty
assert.deepEqual store.all(0.1), empty
assert.deepEqual store.all(false), empty
assert.deepEqual store.all(true), empty
it 'to add() should return error', ->
assert.deepEqual store.add(), error:'Invalid index: 0'
assert.deepEqual store.add('key'), error:'Invalid index: 0'
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
assert.deepEqual store.add('key', 'value', -1), error:'Invalid index: -1'
assert.deepEqual store.add('key', 'value', 99), error:'Invalid index: 99'
it 'to remove() should return error', ->
assert.deepEqual store.remove(), error:'Invalid index: 0'
assert.deepEqual store.remove('key', -1), error:'Invalid index: -1'
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
it 'to set() should return error', ->
assert.deepEqual store.set(), error:'Invalid index: 0'
assert.deepEqual store.set('key'), error:'Invalid index: PI:KEY:<KEY>END_PI'
assert.deepEqual store.set('key', 'value'), error:'Invalid index: 0'
assert.deepEqual store.set('key', 'value', -1), error:'Invalid index: -1'
assert.deepEqual store.set('key', 'value', 99), error:'Invalid index: 99'
it 'to append() should return error', ->
assert.deepEqual store.append(), {
error:'Must provide a string or object', value:undefined
}
assert.deepEqual store.append(0), {
error:'Must provide a string or object', value:0
}
assert.deepEqual store.append(0.1), {
error:'Must provide a string or object', value:0.1
}
assert.deepEqual store.append(true), {
error:'Must provide a string or object', value:true
}
assert.deepEqual store.append(false), {
error:'Must provide a string or object', value:false
}
assert.deepEqual store.append('bad'), {
error:'String must be a json or ini file'
}
assert.deepEqual store.append('./nonexistent.json'), {
exists: false,
error:'File doesn\'t exist: ' + corepath.resolve './nonexistent.json'
}
it 'to prepend() should return error', ->
assert.deepEqual store.prepend(), {
error:'Must provide a string or object', value:undefined
}
assert.deepEqual store.prepend(0), {
error:'Must provide a string or object', value:0
}
assert.deepEqual store.prepend(0.1), {
error:'Must provide a string or object', value:0.1
}
assert.deepEqual store.prepend(true), {
error:'Must provide a string or object', value:true
}
assert.deepEqual store.prepend(false), {
error:'Must provide a string or object', value:false
}
assert.deepEqual store.prepend('bad'), {
error:'String must be a json or ini file'
}
assert.deepEqual store.prepend('./nonexistent.json'), {
exists: false,
error:'File doesn\'t exist: ' + corepath.resolve './nonexistent.json'
}
it 'to shift() should return empty array', ->
empty = removed:[]
assert.deepEqual store.shift(-1), empty
assert.deepEqual store.shift(0), empty
assert.deepEqual store.shift(100), empty
it 'to pop() should return error', ->
empty = removed:[]
assert.deepEqual store.pop(-1), empty
assert.deepEqual store.pop(0), empty
assert.deepEqual store.pop(100), empty
describe 'built with no initial objects', ->
store = buildStore()
it 'should have an empty array', ->
assert store.array
assert.equal store.array.length, 0
it 'should return zero for count', ->
assert.equal store.count(), 0
it 'should return nada for get()', ->
assert.equal store.get('nada'), undefined
it 'should return false for has()', ->
assert.equal store.has('nada'), false
it 'should return -1 for in()', ->
assert.equal store.in('nada'), -1
it 'should return undefined type results for info()', ->
assert.deepEqual store.info('nada'), {
in:-1
key: 'PI:KEY:<KEY>END_PI'
value: undefined
overridden: false
}
it 'should return [] for all()', ->
assert.deepEqual store.all('nada'), []
it 'should return error for add() because there\'s no object', ->
assert.deepEqual store.add('key', 'value'), error:'Invalid index: 0'
it 'should return error for remove() because there\'s no object', ->
assert.deepEqual store.remove('key'), error:'Invalid index: 0'
it 'should return error for set() because there\'s no object', ->
assert.deepEqual store.set('key', 'value'), error:'Invalid index: 0'
it 'should return true for append(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
assert store.append({appended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 1, 'array should have one source'
assert.equal store.array[0].appended, true
assert.equal store.array[0].__source, 'append'
# reset
store.array.pop()
it 'should return true for append(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
file = helperFile 'empty.json'
assert store.append(file)
assert.equal store.array.length, 1, 'array should have new object'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'append'
}
# reset
store.array.pop()
it 'should return true for prepend(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
assert store.prepend({prepended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 1, 'array should have one source'
assert.equal store.array[0].prepended, true
assert.equal store.array[0].__source, 'prepend'
# reset
store.array.pop()
it 'should return true for prepend(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 0, 'array should be empty'
file = helperFile 'empty.json'
assert store.prepend(file)
assert.equal store.array.length, 1, 'array should have new object'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'prepend'
}
# reset
store.array.pop()
it 'should return removed:[] for shift()', ->
assert.deepEqual store.shift(), removed:[]
it 'should return removed:[] for pop()', ->
assert.deepEqual store.pop(), removed:[]
describe 'built with initial empty object', ->
store = buildStore [{}]
it 'should return one for count', ->
assert.equal store.count(), 1
it 'should return nada for get()', ->
assert.equal store.get('nada'), undefined
it 'should return false for has()', ->
assert.equal store.has('nada'), false
it 'should return -1 for in()', ->
assert.equal store.in('nada'), -1
it 'should return undefined type results for info()', ->
assert.deepEqual store.info('nada'), {
in:-1
key: 'PI:KEY:<KEY>END_PI'
value: undefined
overridden: false
}
it 'should return [] for all()', ->
assert.deepEqual store.all('nada'), []
it 'should return true for add() of new value', ->
assert.deepEqual store.add(), error:'No key specified'
assert.deepEqual store.add('key'), error:'No value specified'
assert.deepEqual store.add('new', 'value'), addedTo:undefined
assert.equal store.array[0].new, 'value'
it 'should return true for add() to existing value', ->
assert.deepEqual store.add('new', 'value2'), addedTo:'value'
assert.deepEqual store.array[0].new, [ 'value', 'value2' ]
# reset
delete store.array[0].new
it 'should return true for remove()', ->
store.array[0].out = 'remove'
assert.deepEqual store.remove('out'), removed:'remove'
assert.equal store.array[0].out, undefined, 'should have removed it'
it 'should return true for set() initial', ->
assert.equal store.array[0].over, undefined, 'should not be an "over" value'
assert.deepEqual store.set('over', 'value'), replaced:undefined
assert.equal store.array[0].over, 'value'
# reset
delete store.array[0].over
it 'should return true for set() overwrite', ->
store.array[0].over = 'value'
assert.equal store.array[0].over, 'value', 'should be a value to overwrite'
assert.deepEqual store.set('over', 'value2'), replaced:'value'
assert.equal store.array[0].over, 'value2'
# reset
delete store.array[0].over
it 'should return true for append(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
assert store.append({appended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 2, 'array should have two'
assert.equal store.array[0].__source, 'constructor'
assert.equal store.array[1].appended, true
assert.equal store.array[1].__source, 'append'
# reset
store.array.pop()
it 'should return true for append(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
file = helperFile 'empty.json'
assert store.append(file)
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[0].__source, 'constructor'
assert.deepEqual store.array[1].__source, {
file: file
format: 'json'
fn: 'append'
}
# reset
store.array.pop()
it 'should return true for prepend(object)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
assert store.prepend({prepended:true})
assert store.array, 'array should still exist'
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[0].prepended, true
assert.deepEqual store.array[0].__source, 'prepend'
assert.deepEqual store.array[1].__source, 'constructor'
# reset
store.array.shift()
it 'should return true for prepend(string)', ->
assert store.array, 'array should exist'
assert.equal store.array.length, 1, 'array should have one'
file = helperFile 'empty.json'
assert store.prepend(file)
assert.equal store.array.length, 2, 'array should have two'
assert.deepEqual store.array[1].__source, 'constructor'
assert.deepEqual store.array[0].__source, {
file: file
format: 'json'
fn: 'prepend'
}
# reset
store.array.shift()
it 'should return object in `removed` for shift()', ->
hold = store.array[0]
assert.deepEqual store.shift(), removed:[hold]
# reset
store.array.push hold
it 'should return object in `removed` for pop()', ->
hold = store.array[0]
assert.deepEqual store.pop(), removed:[hold]
# reset
store.array.push hold
describe 'built with initial empty object', ->
store = buildStore [{}, {}, {}]
it 'should return one for count', ->
assert.equal store.count(), 3
it 'should return objects in `removed` for shift()', ->
array = [ {}, {}, {} ]
shifted = store.shift 3
assert.deepEqual shifted, removed:array
# reset
store.array.unshift.apply store.array, shifted.removed
it 'should return objects in `removed` for pop()', ->
array = [ {}, {}, {} ]
popped = store.pop 3
assert.deepEqual popped, removed:array
# reset
store.array.push.apply store.array, popped.removed
|
[
{
"context": "#\n# gulpfile.coffee\n#\n# Copyright (c) 2016 Junpei Kawamoto\n#\n# This software is released under the MIT Licen",
"end": 58,
"score": 0.9998284578323364,
"start": 43,
"tag": "NAME",
"value": "Junpei Kawamoto"
}
] | gulpfile.coffee | jkawamoto/community-centre-search-fukuoka | 1 | #
# gulpfile.coffee
#
# Copyright (c) 2016 Junpei Kawamoto
#
# This software is released under the MIT License.
#
# http://opensource.org/licenses/mit-license.php
#
gulp = require "gulp"
coffee = require "gulp-coffee"
chmod = require "gulp-chmod"
inject = require "gulp-inject-string"
yaml = require "gulp-yaml"
del = require "del"
gulp.task "clean", ->
del ["./bin/**/*", "./lib/**/*"]
gulp.task "bin", ["clean"], ->
gulp.src "./src/cli.coffee"
.pipe coffee()
.pipe chmod 0o755
.pipe inject.prepend "#!/usr/bin/env node\n"
.pipe gulp.dest "./bin/"
gulp.task "lib", ["clean"], ->
gulp.src ["./src/*.coffee", "!./src/cli.coffee"]
.pipe coffee()
.pipe gulp.dest "./lib/"
gulp.task "build", ["bin", "lib"]
gulp.task "default", ["build"]
| 52136 | #
# gulpfile.coffee
#
# Copyright (c) 2016 <NAME>
#
# This software is released under the MIT License.
#
# http://opensource.org/licenses/mit-license.php
#
gulp = require "gulp"
coffee = require "gulp-coffee"
chmod = require "gulp-chmod"
inject = require "gulp-inject-string"
yaml = require "gulp-yaml"
del = require "del"
gulp.task "clean", ->
del ["./bin/**/*", "./lib/**/*"]
gulp.task "bin", ["clean"], ->
gulp.src "./src/cli.coffee"
.pipe coffee()
.pipe chmod 0o755
.pipe inject.prepend "#!/usr/bin/env node\n"
.pipe gulp.dest "./bin/"
gulp.task "lib", ["clean"], ->
gulp.src ["./src/*.coffee", "!./src/cli.coffee"]
.pipe coffee()
.pipe gulp.dest "./lib/"
gulp.task "build", ["bin", "lib"]
gulp.task "default", ["build"]
| true | #
# gulpfile.coffee
#
# Copyright (c) 2016 PI:NAME:<NAME>END_PI
#
# This software is released under the MIT License.
#
# http://opensource.org/licenses/mit-license.php
#
gulp = require "gulp"
coffee = require "gulp-coffee"
chmod = require "gulp-chmod"
inject = require "gulp-inject-string"
yaml = require "gulp-yaml"
del = require "del"
gulp.task "clean", ->
del ["./bin/**/*", "./lib/**/*"]
gulp.task "bin", ["clean"], ->
gulp.src "./src/cli.coffee"
.pipe coffee()
.pipe chmod 0o755
.pipe inject.prepend "#!/usr/bin/env node\n"
.pipe gulp.dest "./bin/"
gulp.task "lib", ["clean"], ->
gulp.src ["./src/*.coffee", "!./src/cli.coffee"]
.pipe coffee()
.pipe gulp.dest "./lib/"
gulp.task "build", ["bin", "lib"]
gulp.task "default", ["build"]
|
[
{
"context": "n} instance represents a Kaffee plugin.\n\n @author Fabian M. <mail.fabianm@gmail.com>\n###\nclass Plugin\n\t###\n\t",
"end": 248,
"score": 0.9998392462730408,
"start": 240,
"tag": "NAME",
"value": "Fabian M"
},
{
"context": "represents a Kaffee plugin.\n\n @author Fa... | kaffee/src/main/kaffee/plugin/plugin.coffee | fabianm/kaffee | 1 | Path = require 'path'
Goal = require './goal'
Request = require '../execution/request'
Result = require '../execution/result'
EventManager = require '../event/manager'
###
A {@link Plugin} instance represents a Kaffee plugin.
@author Fabian M. <mail.fabianm@gmail.com>
###
class Plugin
###
Constructs a new {@link Plugin} instance.
@since 0.2.1
@param name The name of this {@link Plugin}.
@param project The {@link Project} of this {@link Plugin}.
@param configuration The configuration of this {@link Plugin}.
###
constructor: (@name, @project, @configuration = {}) ->
@goals = []
@event = new EventManager "plugin-#{ @name }", project.getEventManager(), this
###
Loads this plugin.
@since 0.3.0
###
load: ->
@event.fire "enter", this
@logger = @getLogger()
try
# Modify path.
module.paths = process.mainModule.paths.concat module.paths, [Path.join @project.getConfiguration().getWorkspace().getPath(), "node_modules"]
obj = require @getModule()
throw "Module #{ @getModule() } isn't a valid module." if typeof obj isnt 'function'
obj.call this, @configuration
catch e
@event.getLogger().error e
return
@logger = undefined
@event.fire "leave", this
true
###
Returns the name of the module of this {@link Plugin}.
@since 0.3.3
@return The name of the module of this {@link Plugin}.
###
getModule: -> @configuration.module or @name
###
Returns the aliases of this {@link Plugin}.
@since 0.3.3
@return The aliases of this {@link Plugin}.
###
getAliases: -> @configuration.alias or []
###
Returns the name of this {@link Plugin}.
@since 0.2.1
@return The name of this {@link Plugin}.
###
getName: -> @name
###
Returns the {@link Project} of this {@link Plugin}.
@since 0.2.1
@return The {@link Project} of this {@link Plugin}.
###
getProject: -> @project
###
Returns the configuration of this {@link Plugin}.
@since 0.2.1
@return The configuration of this {@link Plugin}.
###
getConfiguration: -> @configuration
###
Returns the {@link Goal}s of this {@link Plugin}.
@since 0.2.1
@return The {@link Goal}s of this {@link Plugin}.
###
getGoals: -> @goals
###
Returns the {@link EventManager} of this {@link Plugin}.
@since 0.3.0
@return The {@link EventManager} of this {@link Plugin}.
###
getEventManager: -> @event
###
Returns the logging object of this {@link Plugin}.
@since 0.3.1
@return The logging object of this {@link Plugin}.
###
getLogger: -> @getEventManager().getLogger()
###
Returns a {@link Goal} of this {@link Plugin}.
@since 0.3.0
@param name The name of the goal to get.
@return The {@link Goal}.
###
getGoal: (name) -> return goal for goal in @goals when goal.getName() is name
###
Determines if this {@link Plugin} has a {@link Goal}.
@since 0.2.1
@param name The name of the {@link Goal}.
@return <code>true</code> if this {@link Plugin} has this {@link Goal}, <code>false</code> otherwise.
###
hasGoal: (name) -> !!@getGoal name
###
Determines if this {@link Plugin} has defined an archtype.
@since 0.3.3
@return <code>true</code> if this {@link Plugin} has defined an archtype, <code>false</code> otherwise.
###
hasArchtype: -> !!@archtype
###
Returns the archtype of this {@link Plugin}.
@since 0.3.3
@return The archtype of this {@link Plugin} if it has one.
###
getArchtype: -> @archtype
###
Defines an archtype.
@since 0.3.3
@param archtype The archtype to define.
###
archtype: (archtype) -> @archtype = archtype if typeof archtype is 'object'
###
Registers a goal.
@since 0.3.0
@param name The name of the goal to register.
@param call The function of this goal.
###
register: (name, call) -> @goals.push new Goal(this, name, call)
module.exports = Plugin
| 6058 | Path = require 'path'
Goal = require './goal'
Request = require '../execution/request'
Result = require '../execution/result'
EventManager = require '../event/manager'
###
A {@link Plugin} instance represents a Kaffee plugin.
@author <NAME>. <<EMAIL>>
###
class Plugin
###
Constructs a new {@link Plugin} instance.
@since 0.2.1
@param name The name of this {@link Plugin}.
@param project The {@link Project} of this {@link Plugin}.
@param configuration The configuration of this {@link Plugin}.
###
constructor: (@name, @project, @configuration = {}) ->
@goals = []
@event = new EventManager "plugin-#{ @name }", project.getEventManager(), this
###
Loads this plugin.
@since 0.3.0
###
load: ->
@event.fire "enter", this
@logger = @getLogger()
try
# Modify path.
module.paths = process.mainModule.paths.concat module.paths, [Path.join @project.getConfiguration().getWorkspace().getPath(), "node_modules"]
obj = require @getModule()
throw "Module #{ @getModule() } isn't a valid module." if typeof obj isnt 'function'
obj.call this, @configuration
catch e
@event.getLogger().error e
return
@logger = undefined
@event.fire "leave", this
true
###
Returns the name of the module of this {@link Plugin}.
@since 0.3.3
@return The name of the module of this {@link Plugin}.
###
getModule: -> @configuration.module or @name
###
Returns the aliases of this {@link Plugin}.
@since 0.3.3
@return The aliases of this {@link Plugin}.
###
getAliases: -> @configuration.alias or []
###
Returns the name of this {@link Plugin}.
@since 0.2.1
@return The name of this {@link Plugin}.
###
getName: -> @name
###
Returns the {@link Project} of this {@link Plugin}.
@since 0.2.1
@return The {@link Project} of this {@link Plugin}.
###
getProject: -> @project
###
Returns the configuration of this {@link Plugin}.
@since 0.2.1
@return The configuration of this {@link Plugin}.
###
getConfiguration: -> @configuration
###
Returns the {@link Goal}s of this {@link Plugin}.
@since 0.2.1
@return The {@link Goal}s of this {@link Plugin}.
###
getGoals: -> @goals
###
Returns the {@link EventManager} of this {@link Plugin}.
@since 0.3.0
@return The {@link EventManager} of this {@link Plugin}.
###
getEventManager: -> @event
###
Returns the logging object of this {@link Plugin}.
@since 0.3.1
@return The logging object of this {@link Plugin}.
###
getLogger: -> @getEventManager().getLogger()
###
Returns a {@link Goal} of this {@link Plugin}.
@since 0.3.0
@param name The name of the goal to get.
@return The {@link Goal}.
###
getGoal: (name) -> return goal for goal in @goals when goal.getName() is name
###
Determines if this {@link Plugin} has a {@link Goal}.
@since 0.2.1
@param name The name of the {@link Goal}.
@return <code>true</code> if this {@link Plugin} has this {@link Goal}, <code>false</code> otherwise.
###
hasGoal: (name) -> !!@getGoal name
###
Determines if this {@link Plugin} has defined an archtype.
@since 0.3.3
@return <code>true</code> if this {@link Plugin} has defined an archtype, <code>false</code> otherwise.
###
hasArchtype: -> !!@archtype
###
Returns the archtype of this {@link Plugin}.
@since 0.3.3
@return The archtype of this {@link Plugin} if it has one.
###
getArchtype: -> @archtype
###
Defines an archtype.
@since 0.3.3
@param archtype The archtype to define.
###
archtype: (archtype) -> @archtype = archtype if typeof archtype is 'object'
###
Registers a goal.
@since 0.3.0
@param name The name of the goal to register.
@param call The function of this goal.
###
register: (name, call) -> @goals.push new Goal(this, name, call)
module.exports = Plugin
| true | Path = require 'path'
Goal = require './goal'
Request = require '../execution/request'
Result = require '../execution/result'
EventManager = require '../event/manager'
###
A {@link Plugin} instance represents a Kaffee plugin.
@author PI:NAME:<NAME>END_PI. <PI:EMAIL:<EMAIL>END_PI>
###
class Plugin
###
Constructs a new {@link Plugin} instance.
@since 0.2.1
@param name The name of this {@link Plugin}.
@param project The {@link Project} of this {@link Plugin}.
@param configuration The configuration of this {@link Plugin}.
###
constructor: (@name, @project, @configuration = {}) ->
@goals = []
@event = new EventManager "plugin-#{ @name }", project.getEventManager(), this
###
Loads this plugin.
@since 0.3.0
###
load: ->
@event.fire "enter", this
@logger = @getLogger()
try
# Modify path.
module.paths = process.mainModule.paths.concat module.paths, [Path.join @project.getConfiguration().getWorkspace().getPath(), "node_modules"]
obj = require @getModule()
throw "Module #{ @getModule() } isn't a valid module." if typeof obj isnt 'function'
obj.call this, @configuration
catch e
@event.getLogger().error e
return
@logger = undefined
@event.fire "leave", this
true
###
Returns the name of the module of this {@link Plugin}.
@since 0.3.3
@return The name of the module of this {@link Plugin}.
###
getModule: -> @configuration.module or @name
###
Returns the aliases of this {@link Plugin}.
@since 0.3.3
@return The aliases of this {@link Plugin}.
###
getAliases: -> @configuration.alias or []
###
Returns the name of this {@link Plugin}.
@since 0.2.1
@return The name of this {@link Plugin}.
###
getName: -> @name
###
Returns the {@link Project} of this {@link Plugin}.
@since 0.2.1
@return The {@link Project} of this {@link Plugin}.
###
getProject: -> @project
###
Returns the configuration of this {@link Plugin}.
@since 0.2.1
@return The configuration of this {@link Plugin}.
###
getConfiguration: -> @configuration
###
Returns the {@link Goal}s of this {@link Plugin}.
@since 0.2.1
@return The {@link Goal}s of this {@link Plugin}.
###
getGoals: -> @goals
###
Returns the {@link EventManager} of this {@link Plugin}.
@since 0.3.0
@return The {@link EventManager} of this {@link Plugin}.
###
getEventManager: -> @event
###
Returns the logging object of this {@link Plugin}.
@since 0.3.1
@return The logging object of this {@link Plugin}.
###
getLogger: -> @getEventManager().getLogger()
###
Returns a {@link Goal} of this {@link Plugin}.
@since 0.3.0
@param name The name of the goal to get.
@return The {@link Goal}.
###
getGoal: (name) -> return goal for goal in @goals when goal.getName() is name
###
Determines if this {@link Plugin} has a {@link Goal}.
@since 0.2.1
@param name The name of the {@link Goal}.
@return <code>true</code> if this {@link Plugin} has this {@link Goal}, <code>false</code> otherwise.
###
hasGoal: (name) -> !!@getGoal name
###
Determines if this {@link Plugin} has defined an archtype.
@since 0.3.3
@return <code>true</code> if this {@link Plugin} has defined an archtype, <code>false</code> otherwise.
###
hasArchtype: -> !!@archtype
###
Returns the archtype of this {@link Plugin}.
@since 0.3.3
@return The archtype of this {@link Plugin} if it has one.
###
getArchtype: -> @archtype
###
Defines an archtype.
@since 0.3.3
@param archtype The archtype to define.
###
archtype: (archtype) -> @archtype = archtype if typeof archtype is 'object'
###
Registers a goal.
@since 0.3.0
@param name The name of the goal to register.
@param call The function of this goal.
###
register: (name, call) -> @goals.push new Goal(this, name, call)
module.exports = Plugin
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9983279705047607,
"start": 12,
"tag": "NAME",
"value": "Joyent"
},
{
"context": "perty(prop) will break.\n# See: https://github.com/joyent/node/issues/1707\nhasOwnProperty = (... | lib/repl.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# A repl library that you can include in your own code to get a runtime
# * interface to your program.
# *
# * var repl = require("repl");
# * // start repl on stdin
# * repl.start("prompt> ");
# *
# * // listen for unix socket connections and start repl on them
# * net.createServer(function(socket) {
# * repl.start("node via Unix socket> ", socket);
# * }).listen("/tmp/node-repl-sock");
# *
# * // listen for TCP socket connections and start repl on them
# * net.createServer(function(socket) {
# * repl.start("node via TCP socket> ", socket);
# * }).listen(5001);
# *
# * // expose foo to repl context
# * repl.start("node > ").context.foo = "stdin is fun";
#
# If obj.hasOwnProperty has been overridden, then calling
# obj.hasOwnProperty(prop) will break.
# See: https://github.com/joyent/node/issues/1707
hasOwnProperty = (obj, prop) ->
Object::hasOwnProperty.call obj, prop
# hack for require.resolve("./relative") to work properly.
# hack for repl require to work properly with node_modules folders
# Can overridden with custom print functions, such as `probe` or `eyes.js`.
# This is the default "writer" value if none is passed in the REPL options.
REPLServer = (prompt, stream, eval_, useGlobal, ignoreUndefined) ->
# an options object was given
# just for backwards compat, see github.com/joyent/node/pull/7127
defaultEval = (code, context, file, cb) ->
err = undefined
result = undefined
# first, create the Script object to check the syntax
try
script = vm.createScript(code,
filename: file
displayErrors: false
)
catch e
debug "parse error %j", code, e
if isRecoverableError(e)
err = new Recoverable(e)
else
err = e
unless err
try
if self.useGlobal
result = script.runInThisContext(displayErrors: false)
else
result = script.runInContext(context,
displayErrors: false
)
catch e
err = e
if err and process.domain
debug "not recoverable, send to domain"
process.domain.emit "error", err
process.domain.exit()
return
cb err, result
return
# legacy API, passing a 'stream'/'socket' option
# use stdin and stdout as the default streams if none were given
# We're given custom object with 2 streams, or the `process` object
# We're given a duplex readable/writable Stream, like a `net.Socket`
complete = (text, callback) ->
self.complete text, callback
return
return new REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) unless this instanceof REPLServer
options = undefined
input = undefined
output = undefined
dom = undefined
if util.isObject(prompt)
options = prompt
stream = options.stream or options.socket
input = options.input
output = options.output
eval_ = options.eval
useGlobal = options.useGlobal
ignoreUndefined = options.ignoreUndefined
prompt = options.prompt
dom = options.domain
else unless util.isString(prompt)
throw new Error("An options Object, or a prompt String are required")
else
options = {}
self = this
self._domain = dom or domain.create()
self.useGlobal = !!useGlobal
self.ignoreUndefined = !!ignoreUndefined
self.rli = this
eval_ = eval_ or defaultEval
self.eval = self._domain.bind(eval_)
self._domain.on "error", (e) ->
debug "domain error"
self.outputStream.write (e.stack or e) + "\n"
self.bufferedCommand = ""
self.lines.level = []
self.displayPrompt()
return
if not input and not output
stream = process unless stream
if stream.stdin and stream.stdout
input = stream.stdin
output = stream.stdout
else
input = stream
output = stream
self.inputStream = input
self.outputStream = output
self.resetContext()
self.bufferedCommand = ""
self.lines.level = []
rl.Interface.apply this, [
self.inputStream
self.outputStream
complete
options.terminal
]
self.setPrompt (if not util.isUndefined(prompt) then prompt else "> ")
@commands = {}
defineDefaultCommands this
# figure out which "writer" function to use
self.writer = options.writer or exports.writer
options.useColors = self.terminal if util.isUndefined(options.useColors)
self.useColors = !!options.useColors
if self.useColors and self.writer is util.inspect
# Turn on ANSI coloring.
self.writer = (obj, showHidden, depth) ->
util.inspect obj, showHidden, depth, true
self.setPrompt self._prompt
self.on "close", ->
self.emit "exit"
return
sawSIGINT = false
self.on "SIGINT", ->
empty = self.line.length is 0
self.clearLine()
if not (self.bufferedCommand and self.bufferedCommand.length > 0) and empty
if sawSIGINT
self.close()
sawSIGINT = false
return
self.output.write "(^C again to quit)\n"
sawSIGINT = true
else
sawSIGINT = false
self.bufferedCommand = ""
self.lines.level = []
self.displayPrompt()
return
self.on "line", (cmd) ->
# Check to see if a REPL keyword was used. If it returns true,
# display next prompt and return.
# It's confusing for `{ a : 1 }` to be interpreted as a block
# statement rather than an object literal. So, we first try
# to wrap it in parentheses, so that it will be interpreted as
# an expression.
# otherwise we just append a \n so that it will be either
# terminated, or continued onto the next expression if it's an
# unexpected end of input.
finish = (e, ret) ->
debug "finish", e, ret
self.memory cmd
if e and not self.bufferedCommand and cmd.trim().match(/^npm /)
self.outputStream.write "npm should be run outside of the " + "node repl, in your normal shell.\n" + "(Press Control-D to exit.)\n"
self.bufferedCommand = ""
self.displayPrompt()
return
# If error was SyntaxError and not JSON.parse error
if e
if e instanceof Recoverable
# Start buffering data like that:
# {
# ... x: 1
# ... }
self.bufferedCommand += cmd + "\n"
self.displayPrompt()
return
else
self._domain.emit "error", e
# Clear buffer if no SyntaxErrors
self.bufferedCommand = ""
# If we got any output - print it (if no error)
if not e and (not self.ignoreUndefined or not util.isUndefined(ret))
self.context._ = ret
self.outputStream.write self.writer(ret) + "\n"
# Display prompt again
self.displayPrompt()
return
debug "line %j", cmd
sawSIGINT = false
skipCatchall = false
cmd = trimWhitespace(cmd)
if cmd and cmd.charAt(0) is "." and isNaN(parseFloat(cmd))
matches = cmd.match(/^\.([^\s]+)\s*(.*)$/)
keyword = matches and matches[1]
rest = matches and matches[2]
if self.parseREPLKeyword(keyword, rest) is true
return
else
self.outputStream.write "Invalid REPL keyword\n"
skipCatchall = true
unless skipCatchall
evalCmd = self.bufferedCommand + cmd
if /^\s*\{/.test(evalCmd) and /\}\s*$/.test(evalCmd)
evalCmd = "(" + evalCmd + ")\n"
else
evalCmd = evalCmd + "\n"
debug "eval %j", evalCmd
self.eval evalCmd, self.context, "repl", finish
else
finish null
return
self.on "SIGCONT", ->
self.displayPrompt true
return
self.displayPrompt()
return
# prompt is a string to print on each line for the prompt,
# source is a stream to use for I/O, defaulting to stdin/stdout.
# make built-in modules available directly
# (loaded lazily)
# allow the creation of other globals with this name
# Allow REPL extensions to extend the new context
# Do not overwrite `_initialPrompt` here
# When invoked as an API method, overwrite _initialPrompt
# A stream to push an array into a REPL
# used in REPLServer.complete
ArrayStream = ->
Stream.call this
@run = (data) ->
self = this
data.forEach (line) ->
self.emit "data", line + "\n"
return
return
return
# Provide a list of completions for the given leading text. This is
# given to the readline interface for handling tab completion.
#
# Example:
# complete('var foo = util.')
# -> [['util.print', 'util.debug', 'util.log', 'util.inspect', 'util.pump'],
# 'util.' ]
#
# Warning: This eval's code like "foo.bar.baz", so it will run property
# getter code.
# There may be local variables to evaluate, try a nested REPL
# Get a new array of inputed lines
# Kill off all function declarations to push all local variables into
# global scope
# make a new "input" stream
# make a nested REPL
# eval the flattened code
# all this is only profitable if the nested REPL
# does not have a bufferedCommand
# list of completion lists, one for each inheritance "level"
# REPL commands (e.g. ".break").
# require('...<Tab>')
# Exclude versioned names that 'npm' installs.
# Handle variable member lookup.
# We support simple chained expressions like the following (no function
# calls, etc.). That is for simplicity and also because we *eval* that
# leading expression so for safety (see WARNING above) don't want to
# eval function calls.
#
# foo.bar<|> # completions for 'foo' with filter 'bar'
# spam.eggs.<|> # completions for 'spam.eggs' with filter ''
# foo<|> # all scope vars with filter 'foo'
# foo.<|> # completions for 'foo' with filter ''
# Resolve expr and get its completions.
# If context is instance of vm.ScriptContext
# Get global vars synchronously
# Add grouped globals
# if (e) console.log(e);
# works for non-objects
# Circular refs possible? Let's guard against that.
#console.log("completion error walking prototype chain:" + e);
# Will be called when all completionGroups are in place
# Useful for async autocompletion
# Filter, sort (within each group), uniq and merge the completion groups.
# unique completions across all groups
# Completion group 0 is the "closest"
# (least far up the inheritance chain)
# so we put its completions last: to be closest in the REPL.
# separator btwn groups
###*
Used to parse and execute the Node REPL commands.
@param {keyword} keyword The command entered to check.
@return {Boolean} If true it means don't continue parsing the command.
###
# save the line so I can do magic later
# TODO should I tab the level?
# I don't want to not change the format too much...
# I need to know "depth."
# Because I can not tell the difference between a } that
# closes an object literal and a } that closes a function
# going down is { and ( e.g. function() {
# going up is } and )
# going... down.
# push the line#, depth count, and if the line is a function.
# Since JS only has functional scope I only need to remove
# "function() {" lines, clearly this will not work for
# "function()
# {" but nothing should break, only tab completion for local
# scope will not work for this function.
# going... up.
#more to go, recurse
#remove and push back
# it is possible to determine a syntax error at this point.
# if the REPL still has a bufferedCommand and
# self.lines.level.length === 0
# TODO? keep a log of level so that any syntax breaking lines can
# be cleared on .break and in the case of a syntax error?
# TODO? if a log was kept, then I could clear the bufferedComand and
# eval these lines and throw the syntax error
addStandardGlobals = (completionGroups, filter) ->
# Global object properties
# (http://www.ecma-international.org/publications/standards/Ecma-262.htm)
completionGroups.push [
"NaN"
"Infinity"
"undefined"
"eval"
"parseInt"
"parseFloat"
"isNaN"
"isFinite"
"decodeURI"
"decodeURIComponent"
"encodeURI"
"encodeURIComponent"
"Object"
"Function"
"Array"
"String"
"Boolean"
"Number"
"Date"
"RegExp"
"Error"
"EvalError"
"RangeError"
"ReferenceError"
"SyntaxError"
"TypeError"
"URIError"
"Math"
"JSON"
]
# Common keywords. Exclude for completion on the empty string, b/c
# they just get in the way.
if filter
completionGroups.push [
"break"
"case"
"catch"
"const"
"continue"
"debugger"
"default"
"delete"
"do"
"else"
"export"
"false"
"finally"
"for"
"function"
"if"
"import"
"in"
"instanceof"
"let"
"new"
"null"
"return"
"switch"
"this"
"throw"
"true"
"try"
"typeof"
"undefined"
"var"
"void"
"while"
"with"
"yield"
]
return
defineDefaultCommands = (repl) ->
# TODO remove me after 0.3.x
repl.defineCommand "break",
help: "Sometimes you get stuck, this gets you out"
action: ->
@bufferedCommand = ""
@displayPrompt()
return
clearMessage = undefined
if repl.useGlobal
clearMessage = "Alias for .break"
else
clearMessage = "Break, and also clear the local context"
repl.defineCommand "clear",
help: clearMessage
action: ->
@bufferedCommand = ""
unless @useGlobal
@outputStream.write "Clearing context...\n"
@resetContext()
@displayPrompt()
return
repl.defineCommand "exit",
help: "Exit the repl"
action: ->
@close()
return
repl.defineCommand "help",
help: "Show repl options"
action: ->
self = this
Object.keys(@commands).sort().forEach (name) ->
cmd = self.commands[name]
self.outputStream.write name + "\t" + (cmd.help or "") + "\n"
return
@displayPrompt()
return
repl.defineCommand "save",
help: "Save all evaluated commands in this REPL session to a file"
action: (file) ->
try
fs.writeFileSync file, @lines.join("\n") + "\n"
@outputStream.write "Session saved to:" + file + "\n"
catch e
@outputStream.write "Failed to save:" + file + "\n"
@displayPrompt()
return
repl.defineCommand "load",
help: "Load JS from a file into the REPL session"
action: (file) ->
try
stats = fs.statSync(file)
if stats and stats.isFile()
self = this
data = fs.readFileSync(file, "utf8")
lines = data.split("\n")
@displayPrompt()
lines.forEach (line) ->
self.write line + "\n" if line
return
catch e
@outputStream.write "Failed to load:" + file + "\n"
@displayPrompt()
return
return
trimWhitespace = (cmd) ->
trimmer = /^\s*(.+)\s*$/m
matches = trimmer.exec(cmd)
return matches[1] if matches and matches.length is 2
""
regexpEscape = (s) ->
s.replace /[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"
###*
Converts commands that use var and function <name>() to use the
local exports.context when evaled. This provides a local context
on the REPL.
@param {String} cmd The cmd to convert.
@return {String} The converted command.
###
# Replaces: var foo = "bar"; with: self.context.foo = bar;
# Replaces: function foo() {}; with: foo = function foo() {};
# If the error is that we've unexpectedly ended the input,
# then let the user try to recover by adding more input.
isRecoverableError = (e) ->
e and e.name is "SyntaxError" and /^(Unexpected end of input|Unexpected token :)/.test(e.message)
Recoverable = (err) ->
@err = err
return
"use strict"
util = require("util")
inherits = require("util").inherits
Stream = require("stream")
vm = require("vm")
path = require("path")
fs = require("fs")
rl = require("readline")
Console = require("console").Console
domain = require("domain")
debug = util.debuglog("repl")
module.filename = path.resolve("repl")
module.paths = require("module")._nodeModulePaths(module.filename)
exports.writer = util.inspect
exports._builtinLibs = [
"assert"
"buffer"
"child_process"
"cluster"
"crypto"
"dgram"
"dns"
"domain"
"events"
"fs"
"http"
"https"
"net"
"os"
"path"
"punycode"
"querystring"
"readline"
"stream"
"string_decoder"
"tls"
"tty"
"url"
"util"
"v8"
"vm"
"zlib"
"smalloc"
]
inherits REPLServer, rl.Interface
exports.REPLServer = REPLServer
exports.start = (prompt, source, eval_, useGlobal, ignoreUndefined) ->
repl = new REPLServer(prompt, source, eval_, useGlobal, ignoreUndefined)
exports.repl = repl unless exports.repl
repl
REPLServer::createContext = ->
context = undefined
if @useGlobal
context = global
else
context = vm.createContext()
for i of global
context[i] = global[i]
context.console = new Console(@outputStream)
context.global = context
context.global.global = context
context.module = module
context.require = require
@lines = []
@lines.level = []
exports._builtinLibs.forEach (name) ->
Object.defineProperty context, name,
get: ->
lib = require(name)
context._ = context[name] = lib
lib
set: (val) ->
delete context[name]
context[name] = val
return
configurable: true
return
context
REPLServer::resetContext = ->
@context = @createContext()
@emit "reset", @context
return
REPLServer::displayPrompt = (preserveCursor) ->
prompt = @_initialPrompt
if @bufferedCommand.length
prompt = "..."
levelInd = new Array(@lines.level.length).join("..")
prompt += levelInd + " "
REPLServer.super_::setPrompt.call this, prompt
@prompt preserveCursor
return
REPLServer::setPrompt = setPrompt = (prompt) ->
@_initialPrompt = prompt
REPLServer.super_::setPrompt.call this, prompt
return
util.inherits ArrayStream, Stream
ArrayStream::readable = true
ArrayStream::writable = true
ArrayStream::resume = ->
ArrayStream::write = ->
requireRE = /\brequire\s*\(['"](([\w\.\/-]+\/)?([\w\.\/-]*))/
simpleExpressionRE = /(([a-zA-Z_$](?:\w|\$)*)\.)*([a-zA-Z_$](?:\w|\$)*)\.?$/
REPLServer::complete = (line, callback) ->
completionGroupsLoaded = (err) ->
throw err if err
if completionGroups.length and filter
newCompletionGroups = []
i = 0
while i < completionGroups.length
group = completionGroups[i].filter((elem) ->
elem.indexOf(filter) is 0
)
newCompletionGroups.push group if group.length
i++
completionGroups = newCompletionGroups
if completionGroups.length
uniq = {}
completions = []
i = completionGroups.length - 1
while i >= 0
group = completionGroups[i]
group.sort()
j = 0
while j < group.length
c = group[j]
unless hasOwnProperty(uniq, c)
completions.push c
uniq[c] = true
j++
completions.push ""
i--
completions.pop() while completions.length and completions[completions.length - 1] is ""
callback null, [
completions or []
completeOn
]
return
if not util.isUndefined(@bufferedCommand) and @bufferedCommand.length
tmp = @lines.slice()
@lines.level.forEach (kill) ->
tmp[kill.line] = "" if kill.isFunction
return
flat = new ArrayStream()
magic = new REPLServer("", flat)
magic.context = magic.createContext()
flat.run tmp
return magic.complete(line, callback) unless magic.bufferedCommand
completions = undefined
completionGroups = []
completeOn = undefined
match = undefined
filter = undefined
i = undefined
group = undefined
c = undefined
match = null
match = line.match(/^\s*(\.\w*)$/)
if match
completionGroups.push Object.keys(@commands)
completeOn = match[1]
filter = match[1] if match[1].length > 1
completionGroupsLoaded()
else if match = line.match(requireRE)
exts = Object.keys(require.extensions)
indexRe = new RegExp("^index(" + exts.map(regexpEscape).join("|") + ")$")
completeOn = match[1]
subdir = match[2] or ""
filter = match[1]
dir = undefined
files = undefined
f = undefined
name = undefined
base = undefined
ext = undefined
abs = undefined
subfiles = undefined
s = undefined
group = []
paths = module.paths.concat(require("module").globalPaths)
i = 0
while i < paths.length
dir = path.resolve(paths[i], subdir)
try
files = fs.readdirSync(dir)
catch e
continue
f = 0
while f < files.length
name = files[f]
ext = path.extname(name)
base = name.slice(0, -ext.length)
continue if base.match(/-\d+\.\d+(\.\d+)?/) or name is ".npm"
if exts.indexOf(ext) isnt -1
group.push subdir + base if not subdir or base isnt "index"
else
abs = path.resolve(dir, name)
try
if fs.statSync(abs).isDirectory()
group.push subdir + name + "/"
subfiles = fs.readdirSync(abs)
s = 0
while s < subfiles.length
group.push subdir + name if indexRe.test(subfiles[s])
s++
f++
i++
completionGroups.push group if group.length
completionGroups.push exports._builtinLibs unless subdir
completionGroupsLoaded()
else if line.length is 0 or line[line.length - 1].match(/\w|\.|\$/)
match = simpleExpressionRE.exec(line)
if line.length is 0 or match
expr = undefined
completeOn = ((if match then match[0] else ""))
if line.length is 0
filter = ""
expr = ""
else if line[line.length - 1] is "."
filter = ""
expr = match[0].slice(0, match[0].length - 1)
else
bits = match[0].split(".")
filter = bits.pop()
expr = bits.join(".")
memberGroups = []
unless expr
if @useGlobal or @context.constructor and @context.constructor.name is "Context"
contextProto = @context
completionGroups.push Object.getOwnPropertyNames(contextProto) while contextProto = Object.getPrototypeOf(contextProto)
completionGroups.push Object.getOwnPropertyNames(@context)
addStandardGlobals completionGroups, filter
completionGroupsLoaded()
else
@eval ".scope", @context, "repl", (err, globals) ->
if err or not globals
addStandardGlobals completionGroups, filter
else if util.isArray(globals[0])
globals.forEach (group) ->
completionGroups.push group
return
else
completionGroups.push globals
addStandardGlobals completionGroups, filter
completionGroupsLoaded()
return
else
@eval expr, @context, "repl", (e, obj) ->
if obj?
memberGroups.push Object.getOwnPropertyNames(obj) if util.isObject(obj) or util.isFunction(obj)
try
sentinel = 5
p = undefined
if util.isObject(obj) or util.isFunction(obj)
p = Object.getPrototypeOf(obj)
else
p = (if obj.constructor then obj.constructor:: else null)
until util.isNull(p)
memberGroups.push Object.getOwnPropertyNames(p)
p = Object.getPrototypeOf(p)
sentinel--
break if sentinel <= 0
if memberGroups.length
i = 0
while i < memberGroups.length
completionGroups.push memberGroups[i].map((member) ->
expr + "." + member
)
i++
filter = expr + "." + filter if filter
completionGroupsLoaded()
return
else
completionGroupsLoaded()
else
completionGroupsLoaded()
return
REPLServer::parseREPLKeyword = (keyword, rest) ->
cmd = @commands[keyword]
if cmd
cmd.action.call this, rest
return true
false
REPLServer::defineCommand = (keyword, cmd) ->
if util.isFunction(cmd)
cmd = action: cmd
else throw new Error("bad argument, action must be a function") unless util.isFunction(cmd.action)
@commands[keyword] = cmd
return
REPLServer::memory = memory = (cmd) ->
self = this
self.lines = self.lines or []
self.lines.level = self.lines.level or []
if cmd
self.lines.push new Array(self.lines.level.length).join(" ") + cmd
else
self.lines.push ""
if cmd
dw = cmd.match(/{|\(/g)
up = cmd.match(/}|\)/g)
up = (if up then up.length else 0)
dw = (if dw then dw.length else 0)
depth = dw - up
if depth
(workIt = ->
if depth > 0
self.lines.level.push
line: self.lines.length - 1
depth: depth
isFunction: /\s*function\s*/.test(cmd)
else if depth < 0
curr = self.lines.level.pop()
if curr
tmp = curr.depth + depth
if tmp < 0
depth += curr.depth
workIt()
else if tmp > 0
curr.depth += depth
self.lines.level.push curr
return
)()
else
self.lines.level = []
return
REPLServer::convertToContext = (cmd) ->
self = this
matches = undefined
scopeVar = /^\s*var\s*([_\w\$]+)(.*)$/m
scopeFunc = /^\s*function\s*([_\w\$]+)/
matches = scopeVar.exec(cmd)
return "self.context." + matches[1] + matches[2] if matches and matches.length is 3
matches = scopeFunc.exec(self.bufferedCommand)
return matches[1] + " = " + self.bufferedCommand if matches and matches.length is 2
cmd
inherits Recoverable, SyntaxError
| 213580 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# A repl library that you can include in your own code to get a runtime
# * interface to your program.
# *
# * var repl = require("repl");
# * // start repl on stdin
# * repl.start("prompt> ");
# *
# * // listen for unix socket connections and start repl on them
# * net.createServer(function(socket) {
# * repl.start("node via Unix socket> ", socket);
# * }).listen("/tmp/node-repl-sock");
# *
# * // listen for TCP socket connections and start repl on them
# * net.createServer(function(socket) {
# * repl.start("node via TCP socket> ", socket);
# * }).listen(5001);
# *
# * // expose foo to repl context
# * repl.start("node > ").context.foo = "stdin is fun";
#
# If obj.hasOwnProperty has been overridden, then calling
# obj.hasOwnProperty(prop) will break.
# See: https://github.com/joyent/node/issues/1707
hasOwnProperty = (obj, prop) ->
Object::hasOwnProperty.call obj, prop
# hack for require.resolve("./relative") to work properly.
# hack for repl require to work properly with node_modules folders
# Can overridden with custom print functions, such as `probe` or `eyes.js`.
# This is the default "writer" value if none is passed in the REPL options.
REPLServer = (prompt, stream, eval_, useGlobal, ignoreUndefined) ->
# an options object was given
# just for backwards compat, see github.com/joyent/node/pull/7127
defaultEval = (code, context, file, cb) ->
err = undefined
result = undefined
# first, create the Script object to check the syntax
try
script = vm.createScript(code,
filename: file
displayErrors: false
)
catch e
debug "parse error %j", code, e
if isRecoverableError(e)
err = new Recoverable(e)
else
err = e
unless err
try
if self.useGlobal
result = script.runInThisContext(displayErrors: false)
else
result = script.runInContext(context,
displayErrors: false
)
catch e
err = e
if err and process.domain
debug "not recoverable, send to domain"
process.domain.emit "error", err
process.domain.exit()
return
cb err, result
return
# legacy API, passing a 'stream'/'socket' option
# use stdin and stdout as the default streams if none were given
# We're given custom object with 2 streams, or the `process` object
# We're given a duplex readable/writable Stream, like a `net.Socket`
complete = (text, callback) ->
self.complete text, callback
return
return new REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) unless this instanceof REPLServer
options = undefined
input = undefined
output = undefined
dom = undefined
if util.isObject(prompt)
options = prompt
stream = options.stream or options.socket
input = options.input
output = options.output
eval_ = options.eval
useGlobal = options.useGlobal
ignoreUndefined = options.ignoreUndefined
prompt = options.prompt
dom = options.domain
else unless util.isString(prompt)
throw new Error("An options Object, or a prompt String are required")
else
options = {}
self = this
self._domain = dom or domain.create()
self.useGlobal = !!useGlobal
self.ignoreUndefined = !!ignoreUndefined
self.rli = this
eval_ = eval_ or defaultEval
self.eval = self._domain.bind(eval_)
self._domain.on "error", (e) ->
debug "domain error"
self.outputStream.write (e.stack or e) + "\n"
self.bufferedCommand = ""
self.lines.level = []
self.displayPrompt()
return
if not input and not output
stream = process unless stream
if stream.stdin and stream.stdout
input = stream.stdin
output = stream.stdout
else
input = stream
output = stream
self.inputStream = input
self.outputStream = output
self.resetContext()
self.bufferedCommand = ""
self.lines.level = []
rl.Interface.apply this, [
self.inputStream
self.outputStream
complete
options.terminal
]
self.setPrompt (if not util.isUndefined(prompt) then prompt else "> ")
@commands = {}
defineDefaultCommands this
# figure out which "writer" function to use
self.writer = options.writer or exports.writer
options.useColors = self.terminal if util.isUndefined(options.useColors)
self.useColors = !!options.useColors
if self.useColors and self.writer is util.inspect
# Turn on ANSI coloring.
self.writer = (obj, showHidden, depth) ->
util.inspect obj, showHidden, depth, true
self.setPrompt self._prompt
self.on "close", ->
self.emit "exit"
return
sawSIGINT = false
self.on "SIGINT", ->
empty = self.line.length is 0
self.clearLine()
if not (self.bufferedCommand and self.bufferedCommand.length > 0) and empty
if sawSIGINT
self.close()
sawSIGINT = false
return
self.output.write "(^C again to quit)\n"
sawSIGINT = true
else
sawSIGINT = false
self.bufferedCommand = ""
self.lines.level = []
self.displayPrompt()
return
self.on "line", (cmd) ->
# Check to see if a REPL keyword was used. If it returns true,
# display next prompt and return.
# It's confusing for `{ a : 1 }` to be interpreted as a block
# statement rather than an object literal. So, we first try
# to wrap it in parentheses, so that it will be interpreted as
# an expression.
# otherwise we just append a \n so that it will be either
# terminated, or continued onto the next expression if it's an
# unexpected end of input.
finish = (e, ret) ->
debug "finish", e, ret
self.memory cmd
if e and not self.bufferedCommand and cmd.trim().match(/^npm /)
self.outputStream.write "npm should be run outside of the " + "node repl, in your normal shell.\n" + "(Press Control-D to exit.)\n"
self.bufferedCommand = ""
self.displayPrompt()
return
# If error was SyntaxError and not JSON.parse error
if e
if e instanceof Recoverable
# Start buffering data like that:
# {
# ... x: 1
# ... }
self.bufferedCommand += cmd + "\n"
self.displayPrompt()
return
else
self._domain.emit "error", e
# Clear buffer if no SyntaxErrors
self.bufferedCommand = ""
# If we got any output - print it (if no error)
if not e and (not self.ignoreUndefined or not util.isUndefined(ret))
self.context._ = ret
self.outputStream.write self.writer(ret) + "\n"
# Display prompt again
self.displayPrompt()
return
debug "line %j", cmd
sawSIGINT = false
skipCatchall = false
cmd = trimWhitespace(cmd)
if cmd and cmd.charAt(0) is "." and isNaN(parseFloat(cmd))
matches = cmd.match(/^\.([^\s]+)\s*(.*)$/)
keyword = matches and matches[1]
rest = matches and matches[2]
if self.parseREPLKeyword(keyword, rest) is true
return
else
self.outputStream.write "Invalid REPL keyword\n"
skipCatchall = true
unless skipCatchall
evalCmd = self.bufferedCommand + cmd
if /^\s*\{/.test(evalCmd) and /\}\s*$/.test(evalCmd)
evalCmd = "(" + evalCmd + ")\n"
else
evalCmd = evalCmd + "\n"
debug "eval %j", evalCmd
self.eval evalCmd, self.context, "repl", finish
else
finish null
return
self.on "SIGCONT", ->
self.displayPrompt true
return
self.displayPrompt()
return
# prompt is a string to print on each line for the prompt,
# source is a stream to use for I/O, defaulting to stdin/stdout.
# make built-in modules available directly
# (loaded lazily)
# allow the creation of other globals with this name
# Allow REPL extensions to extend the new context
# Do not overwrite `_initialPrompt` here
# When invoked as an API method, overwrite _initialPrompt
# A stream to push an array into a REPL
# used in REPLServer.complete
ArrayStream = ->
Stream.call this
@run = (data) ->
self = this
data.forEach (line) ->
self.emit "data", line + "\n"
return
return
return
# Provide a list of completions for the given leading text. This is
# given to the readline interface for handling tab completion.
#
# Example:
# complete('var foo = util.')
# -> [['util.print', 'util.debug', 'util.log', 'util.inspect', 'util.pump'],
# 'util.' ]
#
# Warning: This eval's code like "foo.bar.baz", so it will run property
# getter code.
# There may be local variables to evaluate, try a nested REPL
# Get a new array of inputed lines
# Kill off all function declarations to push all local variables into
# global scope
# make a new "input" stream
# make a nested REPL
# eval the flattened code
# all this is only profitable if the nested REPL
# does not have a bufferedCommand
# list of completion lists, one for each inheritance "level"
# REPL commands (e.g. ".break").
# require('...<Tab>')
# Exclude versioned names that 'npm' installs.
# Handle variable member lookup.
# We support simple chained expressions like the following (no function
# calls, etc.). That is for simplicity and also because we *eval* that
# leading expression so for safety (see WARNING above) don't want to
# eval function calls.
#
# foo.bar<|> # completions for 'foo' with filter 'bar'
# spam.eggs.<|> # completions for 'spam.eggs' with filter ''
# foo<|> # all scope vars with filter 'foo'
# foo.<|> # completions for 'foo' with filter ''
# Resolve expr and get its completions.
# If context is instance of vm.ScriptContext
# Get global vars synchronously
# Add grouped globals
# if (e) console.log(e);
# works for non-objects
# Circular refs possible? Let's guard against that.
#console.log("completion error walking prototype chain:" + e);
# Will be called when all completionGroups are in place
# Useful for async autocompletion
# Filter, sort (within each group), uniq and merge the completion groups.
# unique completions across all groups
# Completion group 0 is the "closest"
# (least far up the inheritance chain)
# so we put its completions last: to be closest in the REPL.
# separator btwn groups
###*
Used to parse and execute the Node REPL commands.
@param {keyword} keyword The command entered to check.
@return {Boolean} If true it means don't continue parsing the command.
###
# save the line so I can do magic later
# TODO should I tab the level?
# I don't want to not change the format too much...
# I need to know "depth."
# Because I can not tell the difference between a } that
# closes an object literal and a } that closes a function
# going down is { and ( e.g. function() {
# going up is } and )
# going... down.
# push the line#, depth count, and if the line is a function.
# Since JS only has functional scope I only need to remove
# "function() {" lines, clearly this will not work for
# "function()
# {" but nothing should break, only tab completion for local
# scope will not work for this function.
# going... up.
#more to go, recurse
#remove and push back
# it is possible to determine a syntax error at this point.
# if the REPL still has a bufferedCommand and
# self.lines.level.length === 0
# TODO? keep a log of level so that any syntax breaking lines can
# be cleared on .break and in the case of a syntax error?
# TODO? if a log was kept, then I could clear the bufferedComand and
# eval these lines and throw the syntax error
addStandardGlobals = (completionGroups, filter) ->
# Global object properties
# (http://www.ecma-international.org/publications/standards/Ecma-262.htm)
completionGroups.push [
"NaN"
"Infinity"
"undefined"
"eval"
"parseInt"
"parseFloat"
"isNaN"
"isFinite"
"decodeURI"
"decodeURIComponent"
"encodeURI"
"encodeURIComponent"
"Object"
"Function"
"Array"
"String"
"Boolean"
"Number"
"Date"
"RegExp"
"Error"
"EvalError"
"RangeError"
"ReferenceError"
"SyntaxError"
"TypeError"
"URIError"
"Math"
"JSON"
]
# Common keywords. Exclude for completion on the empty string, b/c
# they just get in the way.
if filter
completionGroups.push [
"break"
"case"
"catch"
"const"
"continue"
"debugger"
"default"
"delete"
"do"
"else"
"export"
"false"
"finally"
"for"
"function"
"if"
"import"
"in"
"instanceof"
"let"
"new"
"null"
"return"
"switch"
"this"
"throw"
"true"
"try"
"typeof"
"undefined"
"var"
"void"
"while"
"with"
"yield"
]
return
defineDefaultCommands = (repl) ->
# TODO remove me after 0.3.x
repl.defineCommand "break",
help: "Sometimes you get stuck, this gets you out"
action: ->
@bufferedCommand = ""
@displayPrompt()
return
clearMessage = undefined
if repl.useGlobal
clearMessage = "Alias for .break"
else
clearMessage = "Break, and also clear the local context"
repl.defineCommand "clear",
help: clearMessage
action: ->
@bufferedCommand = ""
unless @useGlobal
@outputStream.write "Clearing context...\n"
@resetContext()
@displayPrompt()
return
repl.defineCommand "exit",
help: "Exit the repl"
action: ->
@close()
return
repl.defineCommand "help",
help: "Show repl options"
action: ->
self = this
Object.keys(@commands).sort().forEach (name) ->
cmd = self.commands[name]
self.outputStream.write name + "\t" + (cmd.help or "") + "\n"
return
@displayPrompt()
return
repl.defineCommand "save",
help: "Save all evaluated commands in this REPL session to a file"
action: (file) ->
try
fs.writeFileSync file, @lines.join("\n") + "\n"
@outputStream.write "Session saved to:" + file + "\n"
catch e
@outputStream.write "Failed to save:" + file + "\n"
@displayPrompt()
return
repl.defineCommand "load",
help: "Load JS from a file into the REPL session"
action: (file) ->
try
stats = fs.statSync(file)
if stats and stats.isFile()
self = this
data = fs.readFileSync(file, "utf8")
lines = data.split("\n")
@displayPrompt()
lines.forEach (line) ->
self.write line + "\n" if line
return
catch e
@outputStream.write "Failed to load:" + file + "\n"
@displayPrompt()
return
return
trimWhitespace = (cmd) ->
trimmer = /^\s*(.+)\s*$/m
matches = trimmer.exec(cmd)
return matches[1] if matches and matches.length is 2
""
regexpEscape = (s) ->
s.replace /[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"
###*
Converts commands that use var and function <name>() to use the
local exports.context when evaled. This provides a local context
on the REPL.
@param {String} cmd The cmd to convert.
@return {String} The converted command.
###
# Replaces: var foo = "bar"; with: self.context.foo = bar;
# Replaces: function foo() {}; with: foo = function foo() {};
# If the error is that we've unexpectedly ended the input,
# then let the user try to recover by adding more input.
isRecoverableError = (e) ->
e and e.name is "SyntaxError" and /^(Unexpected end of input|Unexpected token :)/.test(e.message)
Recoverable = (err) ->
@err = err
return
"use strict"
util = require("util")
inherits = require("util").inherits
Stream = require("stream")
vm = require("vm")
path = require("path")
fs = require("fs")
rl = require("readline")
Console = require("console").Console
domain = require("domain")
debug = util.debuglog("repl")
module.filename = path.resolve("repl")
module.paths = require("module")._nodeModulePaths(module.filename)
exports.writer = util.inspect
exports._builtinLibs = [
"assert"
"buffer"
"child_process"
"cluster"
"crypto"
"dgram"
"dns"
"domain"
"events"
"fs"
"http"
"https"
"net"
"os"
"path"
"punycode"
"querystring"
"readline"
"stream"
"string_decoder"
"tls"
"tty"
"url"
"util"
"v8"
"vm"
"zlib"
"smalloc"
]
inherits REPLServer, rl.Interface
exports.REPLServer = REPLServer
exports.start = (prompt, source, eval_, useGlobal, ignoreUndefined) ->
repl = new REPLServer(prompt, source, eval_, useGlobal, ignoreUndefined)
exports.repl = repl unless exports.repl
repl
REPLServer::createContext = ->
context = undefined
if @useGlobal
context = global
else
context = vm.createContext()
for i of global
context[i] = global[i]
context.console = new Console(@outputStream)
context.global = context
context.global.global = context
context.module = module
context.require = require
@lines = []
@lines.level = []
exports._builtinLibs.forEach (name) ->
Object.defineProperty context, name,
get: ->
lib = require(name)
context._ = context[name] = lib
lib
set: (val) ->
delete context[name]
context[name] = val
return
configurable: true
return
context
REPLServer::resetContext = ->
@context = @createContext()
@emit "reset", @context
return
REPLServer::displayPrompt = (preserveCursor) ->
prompt = @_initialPrompt
if @bufferedCommand.length
prompt = "..."
levelInd = new Array(@lines.level.length).join("..")
prompt += levelInd + " "
REPLServer.super_::setPrompt.call this, prompt
@prompt preserveCursor
return
REPLServer::setPrompt = setPrompt = (prompt) ->
@_initialPrompt = prompt
REPLServer.super_::setPrompt.call this, prompt
return
util.inherits ArrayStream, Stream
ArrayStream::readable = true
ArrayStream::writable = true
ArrayStream::resume = ->
ArrayStream::write = ->
requireRE = /\brequire\s*\(['"](([\w\.\/-]+\/)?([\w\.\/-]*))/
simpleExpressionRE = /(([a-zA-Z_$](?:\w|\$)*)\.)*([a-zA-Z_$](?:\w|\$)*)\.?$/
REPLServer::complete = (line, callback) ->
completionGroupsLoaded = (err) ->
throw err if err
if completionGroups.length and filter
newCompletionGroups = []
i = 0
while i < completionGroups.length
group = completionGroups[i].filter((elem) ->
elem.indexOf(filter) is 0
)
newCompletionGroups.push group if group.length
i++
completionGroups = newCompletionGroups
if completionGroups.length
uniq = {}
completions = []
i = completionGroups.length - 1
while i >= 0
group = completionGroups[i]
group.sort()
j = 0
while j < group.length
c = group[j]
unless hasOwnProperty(uniq, c)
completions.push c
uniq[c] = true
j++
completions.push ""
i--
completions.pop() while completions.length and completions[completions.length - 1] is ""
callback null, [
completions or []
completeOn
]
return
if not util.isUndefined(@bufferedCommand) and @bufferedCommand.length
tmp = @lines.slice()
@lines.level.forEach (kill) ->
tmp[kill.line] = "" if kill.isFunction
return
flat = new ArrayStream()
magic = new REPLServer("", flat)
magic.context = magic.createContext()
flat.run tmp
return magic.complete(line, callback) unless magic.bufferedCommand
completions = undefined
completionGroups = []
completeOn = undefined
match = undefined
filter = undefined
i = undefined
group = undefined
c = undefined
match = null
match = line.match(/^\s*(\.\w*)$/)
if match
completionGroups.push Object.keys(@commands)
completeOn = match[1]
filter = match[1] if match[1].length > 1
completionGroupsLoaded()
else if match = line.match(requireRE)
exts = Object.keys(require.extensions)
indexRe = new RegExp("^index(" + exts.map(regexpEscape).join("|") + ")$")
completeOn = match[1]
subdir = match[2] or ""
filter = match[1]
dir = undefined
files = undefined
f = undefined
name = undefined
base = undefined
ext = undefined
abs = undefined
subfiles = undefined
s = undefined
group = []
paths = module.paths.concat(require("module").globalPaths)
i = 0
while i < paths.length
dir = path.resolve(paths[i], subdir)
try
files = fs.readdirSync(dir)
catch e
continue
f = 0
while f < files.length
name = files[f]
ext = path.extname(name)
base = name.slice(0, -ext.length)
continue if base.match(/-\d+\.\d+(\.\d+)?/) or name is ".npm"
if exts.indexOf(ext) isnt -1
group.push subdir + base if not subdir or base isnt "index"
else
abs = path.resolve(dir, name)
try
if fs.statSync(abs).isDirectory()
group.push subdir + name + "/"
subfiles = fs.readdirSync(abs)
s = 0
while s < subfiles.length
group.push subdir + name if indexRe.test(subfiles[s])
s++
f++
i++
completionGroups.push group if group.length
completionGroups.push exports._builtinLibs unless subdir
completionGroupsLoaded()
else if line.length is 0 or line[line.length - 1].match(/\w|\.|\$/)
match = simpleExpressionRE.exec(line)
if line.length is 0 or match
expr = undefined
completeOn = ((if match then match[0] else ""))
if line.length is 0
filter = ""
expr = ""
else if line[line.length - 1] is "."
filter = ""
expr = match[0].slice(0, match[0].length - 1)
else
bits = match[0].split(".")
filter = bits.pop()
expr = bits.join(".")
memberGroups = []
unless expr
if @useGlobal or @context.constructor and @context.constructor.name is "Context"
contextProto = @context
completionGroups.push Object.getOwnPropertyNames(contextProto) while contextProto = Object.getPrototypeOf(contextProto)
completionGroups.push Object.getOwnPropertyNames(@context)
addStandardGlobals completionGroups, filter
completionGroupsLoaded()
else
@eval ".scope", @context, "repl", (err, globals) ->
if err or not globals
addStandardGlobals completionGroups, filter
else if util.isArray(globals[0])
globals.forEach (group) ->
completionGroups.push group
return
else
completionGroups.push globals
addStandardGlobals completionGroups, filter
completionGroupsLoaded()
return
else
@eval expr, @context, "repl", (e, obj) ->
if obj?
memberGroups.push Object.getOwnPropertyNames(obj) if util.isObject(obj) or util.isFunction(obj)
try
sentinel = 5
p = undefined
if util.isObject(obj) or util.isFunction(obj)
p = Object.getPrototypeOf(obj)
else
p = (if obj.constructor then obj.constructor:: else null)
until util.isNull(p)
memberGroups.push Object.getOwnPropertyNames(p)
p = Object.getPrototypeOf(p)
sentinel--
break if sentinel <= 0
if memberGroups.length
i = 0
while i < memberGroups.length
completionGroups.push memberGroups[i].map((member) ->
expr + "." + member
)
i++
filter = expr + "." + filter if filter
completionGroupsLoaded()
return
else
completionGroupsLoaded()
else
completionGroupsLoaded()
return
REPLServer::parseREPLKeyword = (keyword, rest) ->
cmd = @commands[keyword]
if cmd
cmd.action.call this, rest
return true
false
REPLServer::defineCommand = (keyword, cmd) ->
if util.isFunction(cmd)
cmd = action: cmd
else throw new Error("bad argument, action must be a function") unless util.isFunction(cmd.action)
@commands[keyword] = cmd
return
REPLServer::memory = memory = (cmd) ->
self = this
self.lines = self.lines or []
self.lines.level = self.lines.level or []
if cmd
self.lines.push new Array(self.lines.level.length).join(" ") + cmd
else
self.lines.push ""
if cmd
dw = cmd.match(/{|\(/g)
up = cmd.match(/}|\)/g)
up = (if up then up.length else 0)
dw = (if dw then dw.length else 0)
depth = dw - up
if depth
(workIt = ->
if depth > 0
self.lines.level.push
line: self.lines.length - 1
depth: depth
isFunction: /\s*function\s*/.test(cmd)
else if depth < 0
curr = self.lines.level.pop()
if curr
tmp = curr.depth + depth
if tmp < 0
depth += curr.depth
workIt()
else if tmp > 0
curr.depth += depth
self.lines.level.push curr
return
)()
else
self.lines.level = []
return
REPLServer::convertToContext = (cmd) ->
self = this
matches = undefined
scopeVar = /^\s*var\s*([_\w\$]+)(.*)$/m
scopeFunc = /^\s*function\s*([_\w\$]+)/
matches = scopeVar.exec(cmd)
return "self.context." + matches[1] + matches[2] if matches and matches.length is 3
matches = scopeFunc.exec(self.bufferedCommand)
return matches[1] + " = " + self.bufferedCommand if matches and matches.length is 2
cmd
inherits Recoverable, SyntaxError
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# A repl library that you can include in your own code to get a runtime
# * interface to your program.
# *
# * var repl = require("repl");
# * // start repl on stdin
# * repl.start("prompt> ");
# *
# * // listen for unix socket connections and start repl on them
# * net.createServer(function(socket) {
# * repl.start("node via Unix socket> ", socket);
# * }).listen("/tmp/node-repl-sock");
# *
# * // listen for TCP socket connections and start repl on them
# * net.createServer(function(socket) {
# * repl.start("node via TCP socket> ", socket);
# * }).listen(5001);
# *
# * // expose foo to repl context
# * repl.start("node > ").context.foo = "stdin is fun";
#
# If obj.hasOwnProperty has been overridden, then calling
# obj.hasOwnProperty(prop) will break.
# See: https://github.com/joyent/node/issues/1707
hasOwnProperty = (obj, prop) ->
Object::hasOwnProperty.call obj, prop
# hack for require.resolve("./relative") to work properly.
# hack for repl require to work properly with node_modules folders
# Can overridden with custom print functions, such as `probe` or `eyes.js`.
# This is the default "writer" value if none is passed in the REPL options.
REPLServer = (prompt, stream, eval_, useGlobal, ignoreUndefined) ->
# an options object was given
# just for backwards compat, see github.com/joyent/node/pull/7127
defaultEval = (code, context, file, cb) ->
err = undefined
result = undefined
# first, create the Script object to check the syntax
try
script = vm.createScript(code,
filename: file
displayErrors: false
)
catch e
debug "parse error %j", code, e
if isRecoverableError(e)
err = new Recoverable(e)
else
err = e
unless err
try
if self.useGlobal
result = script.runInThisContext(displayErrors: false)
else
result = script.runInContext(context,
displayErrors: false
)
catch e
err = e
if err and process.domain
debug "not recoverable, send to domain"
process.domain.emit "error", err
process.domain.exit()
return
cb err, result
return
# legacy API, passing a 'stream'/'socket' option
# use stdin and stdout as the default streams if none were given
# We're given custom object with 2 streams, or the `process` object
# We're given a duplex readable/writable Stream, like a `net.Socket`
complete = (text, callback) ->
self.complete text, callback
return
return new REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) unless this instanceof REPLServer
options = undefined
input = undefined
output = undefined
dom = undefined
if util.isObject(prompt)
options = prompt
stream = options.stream or options.socket
input = options.input
output = options.output
eval_ = options.eval
useGlobal = options.useGlobal
ignoreUndefined = options.ignoreUndefined
prompt = options.prompt
dom = options.domain
else unless util.isString(prompt)
throw new Error("An options Object, or a prompt String are required")
else
options = {}
self = this
self._domain = dom or domain.create()
self.useGlobal = !!useGlobal
self.ignoreUndefined = !!ignoreUndefined
self.rli = this
eval_ = eval_ or defaultEval
self.eval = self._domain.bind(eval_)
self._domain.on "error", (e) ->
debug "domain error"
self.outputStream.write (e.stack or e) + "\n"
self.bufferedCommand = ""
self.lines.level = []
self.displayPrompt()
return
if not input and not output
stream = process unless stream
if stream.stdin and stream.stdout
input = stream.stdin
output = stream.stdout
else
input = stream
output = stream
self.inputStream = input
self.outputStream = output
self.resetContext()
self.bufferedCommand = ""
self.lines.level = []
rl.Interface.apply this, [
self.inputStream
self.outputStream
complete
options.terminal
]
self.setPrompt (if not util.isUndefined(prompt) then prompt else "> ")
@commands = {}
defineDefaultCommands this
# figure out which "writer" function to use
self.writer = options.writer or exports.writer
options.useColors = self.terminal if util.isUndefined(options.useColors)
self.useColors = !!options.useColors
if self.useColors and self.writer is util.inspect
# Turn on ANSI coloring.
self.writer = (obj, showHidden, depth) ->
util.inspect obj, showHidden, depth, true
self.setPrompt self._prompt
self.on "close", ->
self.emit "exit"
return
sawSIGINT = false
self.on "SIGINT", ->
empty = self.line.length is 0
self.clearLine()
if not (self.bufferedCommand and self.bufferedCommand.length > 0) and empty
if sawSIGINT
self.close()
sawSIGINT = false
return
self.output.write "(^C again to quit)\n"
sawSIGINT = true
else
sawSIGINT = false
self.bufferedCommand = ""
self.lines.level = []
self.displayPrompt()
return
self.on "line", (cmd) ->
# Check to see if a REPL keyword was used. If it returns true,
# display next prompt and return.
# It's confusing for `{ a : 1 }` to be interpreted as a block
# statement rather than an object literal. So, we first try
# to wrap it in parentheses, so that it will be interpreted as
# an expression.
# otherwise we just append a \n so that it will be either
# terminated, or continued onto the next expression if it's an
# unexpected end of input.
finish = (e, ret) ->
debug "finish", e, ret
self.memory cmd
if e and not self.bufferedCommand and cmd.trim().match(/^npm /)
self.outputStream.write "npm should be run outside of the " + "node repl, in your normal shell.\n" + "(Press Control-D to exit.)\n"
self.bufferedCommand = ""
self.displayPrompt()
return
# If error was SyntaxError and not JSON.parse error
if e
if e instanceof Recoverable
# Start buffering data like that:
# {
# ... x: 1
# ... }
self.bufferedCommand += cmd + "\n"
self.displayPrompt()
return
else
self._domain.emit "error", e
# Clear buffer if no SyntaxErrors
self.bufferedCommand = ""
# If we got any output - print it (if no error)
if not e and (not self.ignoreUndefined or not util.isUndefined(ret))
self.context._ = ret
self.outputStream.write self.writer(ret) + "\n"
# Display prompt again
self.displayPrompt()
return
debug "line %j", cmd
sawSIGINT = false
skipCatchall = false
cmd = trimWhitespace(cmd)
if cmd and cmd.charAt(0) is "." and isNaN(parseFloat(cmd))
matches = cmd.match(/^\.([^\s]+)\s*(.*)$/)
keyword = matches and matches[1]
rest = matches and matches[2]
if self.parseREPLKeyword(keyword, rest) is true
return
else
self.outputStream.write "Invalid REPL keyword\n"
skipCatchall = true
unless skipCatchall
evalCmd = self.bufferedCommand + cmd
if /^\s*\{/.test(evalCmd) and /\}\s*$/.test(evalCmd)
evalCmd = "(" + evalCmd + ")\n"
else
evalCmd = evalCmd + "\n"
debug "eval %j", evalCmd
self.eval evalCmd, self.context, "repl", finish
else
finish null
return
self.on "SIGCONT", ->
self.displayPrompt true
return
self.displayPrompt()
return
# prompt is a string to print on each line for the prompt,
# source is a stream to use for I/O, defaulting to stdin/stdout.
# make built-in modules available directly
# (loaded lazily)
# allow the creation of other globals with this name
# Allow REPL extensions to extend the new context
# Do not overwrite `_initialPrompt` here
# When invoked as an API method, overwrite _initialPrompt
# A stream to push an array into a REPL
# used in REPLServer.complete
ArrayStream = ->
Stream.call this
@run = (data) ->
self = this
data.forEach (line) ->
self.emit "data", line + "\n"
return
return
return
# Provide a list of completions for the given leading text. This is
# given to the readline interface for handling tab completion.
#
# Example:
# complete('var foo = util.')
# -> [['util.print', 'util.debug', 'util.log', 'util.inspect', 'util.pump'],
# 'util.' ]
#
# Warning: This eval's code like "foo.bar.baz", so it will run property
# getter code.
# There may be local variables to evaluate, try a nested REPL
# Get a new array of inputed lines
# Kill off all function declarations to push all local variables into
# global scope
# make a new "input" stream
# make a nested REPL
# eval the flattened code
# all this is only profitable if the nested REPL
# does not have a bufferedCommand
# list of completion lists, one for each inheritance "level"
# REPL commands (e.g. ".break").
# require('...<Tab>')
# Exclude versioned names that 'npm' installs.
# Handle variable member lookup.
# We support simple chained expressions like the following (no function
# calls, etc.). That is for simplicity and also because we *eval* that
# leading expression so for safety (see WARNING above) don't want to
# eval function calls.
#
# foo.bar<|> # completions for 'foo' with filter 'bar'
# spam.eggs.<|> # completions for 'spam.eggs' with filter ''
# foo<|> # all scope vars with filter 'foo'
# foo.<|> # completions for 'foo' with filter ''
# Resolve expr and get its completions.
# If context is instance of vm.ScriptContext
# Get global vars synchronously
# Add grouped globals
# if (e) console.log(e);
# works for non-objects
# Circular refs possible? Let's guard against that.
#console.log("completion error walking prototype chain:" + e);
# Will be called when all completionGroups are in place
# Useful for async autocompletion
# Filter, sort (within each group), uniq and merge the completion groups.
# unique completions across all groups
# Completion group 0 is the "closest"
# (least far up the inheritance chain)
# so we put its completions last: to be closest in the REPL.
# separator btwn groups
###*
Used to parse and execute the Node REPL commands.
@param {keyword} keyword The command entered to check.
@return {Boolean} If true it means don't continue parsing the command.
###
# save the line so I can do magic later
# TODO should I tab the level?
# I don't want to not change the format too much...
# I need to know "depth."
# Because I can not tell the difference between a } that
# closes an object literal and a } that closes a function
# going down is { and ( e.g. function() {
# going up is } and )
# going... down.
# push the line#, depth count, and if the line is a function.
# Since JS only has functional scope I only need to remove
# "function() {" lines, clearly this will not work for
# "function()
# {" but nothing should break, only tab completion for local
# scope will not work for this function.
# going... up.
#more to go, recurse
#remove and push back
# it is possible to determine a syntax error at this point.
# if the REPL still has a bufferedCommand and
# self.lines.level.length === 0
# TODO? keep a log of level so that any syntax breaking lines can
# be cleared on .break and in the case of a syntax error?
# TODO? if a log was kept, then I could clear the bufferedComand and
# eval these lines and throw the syntax error
addStandardGlobals = (completionGroups, filter) ->
# Global object properties
# (http://www.ecma-international.org/publications/standards/Ecma-262.htm)
completionGroups.push [
"NaN"
"Infinity"
"undefined"
"eval"
"parseInt"
"parseFloat"
"isNaN"
"isFinite"
"decodeURI"
"decodeURIComponent"
"encodeURI"
"encodeURIComponent"
"Object"
"Function"
"Array"
"String"
"Boolean"
"Number"
"Date"
"RegExp"
"Error"
"EvalError"
"RangeError"
"ReferenceError"
"SyntaxError"
"TypeError"
"URIError"
"Math"
"JSON"
]
# Common keywords. Exclude for completion on the empty string, b/c
# they just get in the way.
if filter
completionGroups.push [
"break"
"case"
"catch"
"const"
"continue"
"debugger"
"default"
"delete"
"do"
"else"
"export"
"false"
"finally"
"for"
"function"
"if"
"import"
"in"
"instanceof"
"let"
"new"
"null"
"return"
"switch"
"this"
"throw"
"true"
"try"
"typeof"
"undefined"
"var"
"void"
"while"
"with"
"yield"
]
return
defineDefaultCommands = (repl) ->
# TODO remove me after 0.3.x
repl.defineCommand "break",
help: "Sometimes you get stuck, this gets you out"
action: ->
@bufferedCommand = ""
@displayPrompt()
return
clearMessage = undefined
if repl.useGlobal
clearMessage = "Alias for .break"
else
clearMessage = "Break, and also clear the local context"
repl.defineCommand "clear",
help: clearMessage
action: ->
@bufferedCommand = ""
unless @useGlobal
@outputStream.write "Clearing context...\n"
@resetContext()
@displayPrompt()
return
repl.defineCommand "exit",
help: "Exit the repl"
action: ->
@close()
return
repl.defineCommand "help",
help: "Show repl options"
action: ->
self = this
Object.keys(@commands).sort().forEach (name) ->
cmd = self.commands[name]
self.outputStream.write name + "\t" + (cmd.help or "") + "\n"
return
@displayPrompt()
return
repl.defineCommand "save",
help: "Save all evaluated commands in this REPL session to a file"
action: (file) ->
try
fs.writeFileSync file, @lines.join("\n") + "\n"
@outputStream.write "Session saved to:" + file + "\n"
catch e
@outputStream.write "Failed to save:" + file + "\n"
@displayPrompt()
return
repl.defineCommand "load",
help: "Load JS from a file into the REPL session"
action: (file) ->
try
stats = fs.statSync(file)
if stats and stats.isFile()
self = this
data = fs.readFileSync(file, "utf8")
lines = data.split("\n")
@displayPrompt()
lines.forEach (line) ->
self.write line + "\n" if line
return
catch e
@outputStream.write "Failed to load:" + file + "\n"
@displayPrompt()
return
return
trimWhitespace = (cmd) ->
trimmer = /^\s*(.+)\s*$/m
matches = trimmer.exec(cmd)
return matches[1] if matches and matches.length is 2
""
regexpEscape = (s) ->
s.replace /[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"
###*
Converts commands that use var and function <name>() to use the
local exports.context when evaled. This provides a local context
on the REPL.
@param {String} cmd The cmd to convert.
@return {String} The converted command.
###
# Replaces: var foo = "bar"; with: self.context.foo = bar;
# Replaces: function foo() {}; with: foo = function foo() {};
# If the error is that we've unexpectedly ended the input,
# then let the user try to recover by adding more input.
isRecoverableError = (e) ->
e and e.name is "SyntaxError" and /^(Unexpected end of input|Unexpected token :)/.test(e.message)
Recoverable = (err) ->
@err = err
return
"use strict"
util = require("util")
inherits = require("util").inherits
Stream = require("stream")
vm = require("vm")
path = require("path")
fs = require("fs")
rl = require("readline")
Console = require("console").Console
domain = require("domain")
debug = util.debuglog("repl")
module.filename = path.resolve("repl")
module.paths = require("module")._nodeModulePaths(module.filename)
exports.writer = util.inspect
exports._builtinLibs = [
"assert"
"buffer"
"child_process"
"cluster"
"crypto"
"dgram"
"dns"
"domain"
"events"
"fs"
"http"
"https"
"net"
"os"
"path"
"punycode"
"querystring"
"readline"
"stream"
"string_decoder"
"tls"
"tty"
"url"
"util"
"v8"
"vm"
"zlib"
"smalloc"
]
inherits REPLServer, rl.Interface
exports.REPLServer = REPLServer
exports.start = (prompt, source, eval_, useGlobal, ignoreUndefined) ->
repl = new REPLServer(prompt, source, eval_, useGlobal, ignoreUndefined)
exports.repl = repl unless exports.repl
repl
REPLServer::createContext = ->
context = undefined
if @useGlobal
context = global
else
context = vm.createContext()
for i of global
context[i] = global[i]
context.console = new Console(@outputStream)
context.global = context
context.global.global = context
context.module = module
context.require = require
@lines = []
@lines.level = []
exports._builtinLibs.forEach (name) ->
Object.defineProperty context, name,
get: ->
lib = require(name)
context._ = context[name] = lib
lib
set: (val) ->
delete context[name]
context[name] = val
return
configurable: true
return
context
REPLServer::resetContext = ->
@context = @createContext()
@emit "reset", @context
return
REPLServer::displayPrompt = (preserveCursor) ->
prompt = @_initialPrompt
if @bufferedCommand.length
prompt = "..."
levelInd = new Array(@lines.level.length).join("..")
prompt += levelInd + " "
REPLServer.super_::setPrompt.call this, prompt
@prompt preserveCursor
return
REPLServer::setPrompt = setPrompt = (prompt) ->
@_initialPrompt = prompt
REPLServer.super_::setPrompt.call this, prompt
return
util.inherits ArrayStream, Stream
ArrayStream::readable = true
ArrayStream::writable = true
ArrayStream::resume = ->
ArrayStream::write = ->
requireRE = /\brequire\s*\(['"](([\w\.\/-]+\/)?([\w\.\/-]*))/
simpleExpressionRE = /(([a-zA-Z_$](?:\w|\$)*)\.)*([a-zA-Z_$](?:\w|\$)*)\.?$/
REPLServer::complete = (line, callback) ->
completionGroupsLoaded = (err) ->
throw err if err
if completionGroups.length and filter
newCompletionGroups = []
i = 0
while i < completionGroups.length
group = completionGroups[i].filter((elem) ->
elem.indexOf(filter) is 0
)
newCompletionGroups.push group if group.length
i++
completionGroups = newCompletionGroups
if completionGroups.length
uniq = {}
completions = []
i = completionGroups.length - 1
while i >= 0
group = completionGroups[i]
group.sort()
j = 0
while j < group.length
c = group[j]
unless hasOwnProperty(uniq, c)
completions.push c
uniq[c] = true
j++
completions.push ""
i--
completions.pop() while completions.length and completions[completions.length - 1] is ""
callback null, [
completions or []
completeOn
]
return
if not util.isUndefined(@bufferedCommand) and @bufferedCommand.length
tmp = @lines.slice()
@lines.level.forEach (kill) ->
tmp[kill.line] = "" if kill.isFunction
return
flat = new ArrayStream()
magic = new REPLServer("", flat)
magic.context = magic.createContext()
flat.run tmp
return magic.complete(line, callback) unless magic.bufferedCommand
completions = undefined
completionGroups = []
completeOn = undefined
match = undefined
filter = undefined
i = undefined
group = undefined
c = undefined
match = null
match = line.match(/^\s*(\.\w*)$/)
if match
completionGroups.push Object.keys(@commands)
completeOn = match[1]
filter = match[1] if match[1].length > 1
completionGroupsLoaded()
else if match = line.match(requireRE)
exts = Object.keys(require.extensions)
indexRe = new RegExp("^index(" + exts.map(regexpEscape).join("|") + ")$")
completeOn = match[1]
subdir = match[2] or ""
filter = match[1]
dir = undefined
files = undefined
f = undefined
name = undefined
base = undefined
ext = undefined
abs = undefined
subfiles = undefined
s = undefined
group = []
paths = module.paths.concat(require("module").globalPaths)
i = 0
while i < paths.length
dir = path.resolve(paths[i], subdir)
try
files = fs.readdirSync(dir)
catch e
continue
f = 0
while f < files.length
name = files[f]
ext = path.extname(name)
base = name.slice(0, -ext.length)
continue if base.match(/-\d+\.\d+(\.\d+)?/) or name is ".npm"
if exts.indexOf(ext) isnt -1
group.push subdir + base if not subdir or base isnt "index"
else
abs = path.resolve(dir, name)
try
if fs.statSync(abs).isDirectory()
group.push subdir + name + "/"
subfiles = fs.readdirSync(abs)
s = 0
while s < subfiles.length
group.push subdir + name if indexRe.test(subfiles[s])
s++
f++
i++
completionGroups.push group if group.length
completionGroups.push exports._builtinLibs unless subdir
completionGroupsLoaded()
else if line.length is 0 or line[line.length - 1].match(/\w|\.|\$/)
match = simpleExpressionRE.exec(line)
if line.length is 0 or match
expr = undefined
completeOn = ((if match then match[0] else ""))
if line.length is 0
filter = ""
expr = ""
else if line[line.length - 1] is "."
filter = ""
expr = match[0].slice(0, match[0].length - 1)
else
bits = match[0].split(".")
filter = bits.pop()
expr = bits.join(".")
memberGroups = []
unless expr
if @useGlobal or @context.constructor and @context.constructor.name is "Context"
contextProto = @context
completionGroups.push Object.getOwnPropertyNames(contextProto) while contextProto = Object.getPrototypeOf(contextProto)
completionGroups.push Object.getOwnPropertyNames(@context)
addStandardGlobals completionGroups, filter
completionGroupsLoaded()
else
@eval ".scope", @context, "repl", (err, globals) ->
if err or not globals
addStandardGlobals completionGroups, filter
else if util.isArray(globals[0])
globals.forEach (group) ->
completionGroups.push group
return
else
completionGroups.push globals
addStandardGlobals completionGroups, filter
completionGroupsLoaded()
return
else
@eval expr, @context, "repl", (e, obj) ->
if obj?
memberGroups.push Object.getOwnPropertyNames(obj) if util.isObject(obj) or util.isFunction(obj)
try
sentinel = 5
p = undefined
if util.isObject(obj) or util.isFunction(obj)
p = Object.getPrototypeOf(obj)
else
p = (if obj.constructor then obj.constructor:: else null)
until util.isNull(p)
memberGroups.push Object.getOwnPropertyNames(p)
p = Object.getPrototypeOf(p)
sentinel--
break if sentinel <= 0
if memberGroups.length
i = 0
while i < memberGroups.length
completionGroups.push memberGroups[i].map((member) ->
expr + "." + member
)
i++
filter = expr + "." + filter if filter
completionGroupsLoaded()
return
else
completionGroupsLoaded()
else
completionGroupsLoaded()
return
REPLServer::parseREPLKeyword = (keyword, rest) ->
cmd = @commands[keyword]
if cmd
cmd.action.call this, rest
return true
false
REPLServer::defineCommand = (keyword, cmd) ->
if util.isFunction(cmd)
cmd = action: cmd
else throw new Error("bad argument, action must be a function") unless util.isFunction(cmd.action)
@commands[keyword] = cmd
return
REPLServer::memory = memory = (cmd) ->
self = this
self.lines = self.lines or []
self.lines.level = self.lines.level or []
if cmd
self.lines.push new Array(self.lines.level.length).join(" ") + cmd
else
self.lines.push ""
if cmd
dw = cmd.match(/{|\(/g)
up = cmd.match(/}|\)/g)
up = (if up then up.length else 0)
dw = (if dw then dw.length else 0)
depth = dw - up
if depth
(workIt = ->
if depth > 0
self.lines.level.push
line: self.lines.length - 1
depth: depth
isFunction: /\s*function\s*/.test(cmd)
else if depth < 0
curr = self.lines.level.pop()
if curr
tmp = curr.depth + depth
if tmp < 0
depth += curr.depth
workIt()
else if tmp > 0
curr.depth += depth
self.lines.level.push curr
return
)()
else
self.lines.level = []
return
REPLServer::convertToContext = (cmd) ->
self = this
matches = undefined
scopeVar = /^\s*var\s*([_\w\$]+)(.*)$/m
scopeFunc = /^\s*function\s*([_\w\$]+)/
matches = scopeVar.exec(cmd)
return "self.context." + matches[1] + matches[2] if matches and matches.length is 3
matches = scopeFunc.exec(self.bufferedCommand)
return matches[1] + " = " + self.bufferedCommand if matches and matches.length is 2
cmd
inherits Recoverable, SyntaxError
|
[
{
"context": "\n# Routing.\n# GET and POST APIs here.\n#\n# @author: Daniele Gazzelloni <daniele@danielegazzelloni.com>\n#################",
"end": 72,
"score": 0.9998813271522522,
"start": 54,
"tag": "NAME",
"value": "Daniele Gazzelloni"
},
{
"context": " POST APIs here.\n#\n# @auth... | backend/src/routing.coffee | danielegazzelloni/barbershop-challenge | 0 |
##
# Routing.
# GET and POST APIs here.
#
# @author: Daniele Gazzelloni <daniele@danielegazzelloni.com>
######################################################################
## BASE SETUP
## ==============================================
express = require 'express'
bodyParser = require 'body-parser'
config = require './config'
logger = require './logger'
customers = require './customers'
# Restrict Access-Control directives as your needs
allowCrossDomain = (req, res, next) ->
res.header "Access-Control-Allow-Origin", "*"
res.header "Access-Control-Allow-Credentials", "true"
res.header 'Access-Control-Allow-Methods', 'GET,POST'
res.header "Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept"
next();
# Method for serving routes
serveRoutes = (app) ->
app.use allowCrossDomain
app.use bodyParser.json({limit: '25mb'})
app.use bodyParser.urlencoded({ extended: false, limit: '5mb' })
## ROUTES
## ==============================================
app.get config.customersAPI, (req, res) ->
logger.log ">", "#{config.customersAPI} (GET) called."
customers.processVerb req, "GET", (status, result) ->
res.status(status.error)
res.json(result)
app.post config.customersAPI, (req, res) ->
logger.log ">", "#{config.customersAPI} (POST) called."
customers.processVerb req, "POST", (status, result) ->
res.status(status.error)
res.json(result)
# Module exports
exports.serveRoutes = serveRoutes
| 41336 |
##
# Routing.
# GET and POST APIs here.
#
# @author: <NAME> <<EMAIL>>
######################################################################
## BASE SETUP
## ==============================================
express = require 'express'
bodyParser = require 'body-parser'
config = require './config'
logger = require './logger'
customers = require './customers'
# Restrict Access-Control directives as your needs
allowCrossDomain = (req, res, next) ->
res.header "Access-Control-Allow-Origin", "*"
res.header "Access-Control-Allow-Credentials", "true"
res.header 'Access-Control-Allow-Methods', 'GET,POST'
res.header "Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept"
next();
# Method for serving routes
serveRoutes = (app) ->
app.use allowCrossDomain
app.use bodyParser.json({limit: '25mb'})
app.use bodyParser.urlencoded({ extended: false, limit: '5mb' })
## ROUTES
## ==============================================
app.get config.customersAPI, (req, res) ->
logger.log ">", "#{config.customersAPI} (GET) called."
customers.processVerb req, "GET", (status, result) ->
res.status(status.error)
res.json(result)
app.post config.customersAPI, (req, res) ->
logger.log ">", "#{config.customersAPI} (POST) called."
customers.processVerb req, "POST", (status, result) ->
res.status(status.error)
res.json(result)
# Module exports
exports.serveRoutes = serveRoutes
| true |
##
# Routing.
# GET and POST APIs here.
#
# @author: PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
######################################################################
## BASE SETUP
## ==============================================
express = require 'express'
bodyParser = require 'body-parser'
config = require './config'
logger = require './logger'
customers = require './customers'
# Restrict Access-Control directives as your needs
allowCrossDomain = (req, res, next) ->
res.header "Access-Control-Allow-Origin", "*"
res.header "Access-Control-Allow-Credentials", "true"
res.header 'Access-Control-Allow-Methods', 'GET,POST'
res.header "Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept"
next();
# Method for serving routes
serveRoutes = (app) ->
app.use allowCrossDomain
app.use bodyParser.json({limit: '25mb'})
app.use bodyParser.urlencoded({ extended: false, limit: '5mb' })
## ROUTES
## ==============================================
app.get config.customersAPI, (req, res) ->
logger.log ">", "#{config.customersAPI} (GET) called."
customers.processVerb req, "GET", (status, result) ->
res.status(status.error)
res.json(result)
app.post config.customersAPI, (req, res) ->
logger.log ">", "#{config.customersAPI} (POST) called."
customers.processVerb req, "POST", (status, result) ->
res.status(status.error)
res.json(result)
# Module exports
exports.serveRoutes = serveRoutes
|
[
{
"context": "ost: \"localhost\"\n name: \"Karma\"\n username: \"karma_app_db\"\n password: \"password\"\n testdb:\n host: \"lo",
"end": 82,
"score": 0.9995464086532593,
"start": 70,
"tag": "USERNAME",
"value": "karma_app_db"
},
{
"context": "arma\"\n username: \"karm... | config.example.coffee | eve-apps/karma | 1 | config =
db:
host: "localhost"
name: "Karma"
username: "karma_app_db"
password: "password"
testdb:
host: "localhost"
name: "test"
username: "karma_app_db"
password: "password"
config.db.connString = "postgres://#{config.db.username}:#{config.db.password}@#{config.db.host}/#{config.db.name}"
config.testdb.connString = "postgres://#{config.testdb.username}:#{config.testdb.password}@#{config.testdb.host}/#{config.testdb.name}"
module.exports = config
| 160965 | config =
db:
host: "localhost"
name: "Karma"
username: "karma_app_db"
password: "<PASSWORD>"
testdb:
host: "localhost"
name: "test"
username: "karma_app_db"
password: "<PASSWORD>"
config.db.connString = "postgres://#{config.db.username}:#{config.db.password}@#{config.db.host}/#{config.db.name}"
config.testdb.connString = "postgres://#{config.testdb.username}:#{config.testdb.password}@#{config.testdb.host}/#{config.testdb.name}"
module.exports = config
| true | config =
db:
host: "localhost"
name: "Karma"
username: "karma_app_db"
password: "PI:PASSWORD:<PASSWORD>END_PI"
testdb:
host: "localhost"
name: "test"
username: "karma_app_db"
password: "PI:PASSWORD:<PASSWORD>END_PI"
config.db.connString = "postgres://#{config.db.username}:#{config.db.password}@#{config.db.host}/#{config.db.name}"
config.testdb.connString = "postgres://#{config.testdb.username}:#{config.testdb.password}@#{config.testdb.host}/#{config.testdb.name}"
module.exports = config
|
[
{
"context": "erver = server\n data =\n email: 'test@cozycloud.cc'\n password: 'password'\n tim",
"end": 526,
"score": 0.9999254941940308,
"start": 509,
"tag": "EMAIL",
"value": "test@cozycloud.cc"
},
{
"context": "email: 'test@cozycloud.cc'\n ... | test/helpers.coffee | jacquarg/cozy-calendar | 0 | Client = require('request-json').JsonClient
client = new Client "http://localhost:8888/"
module.exports = helpers = {}
if process.env.USE_JS
helpers.prefix = '../build/'
else
helpers.prefix = '../'
Event = require "#{helpers.prefix}server/models/event"
User = require "#{helpers.prefix}server/models/user"
helpers.before = (done) ->
@timeout 10000
start = require "#{helpers.prefix}server"
start 8888, (err, app, server) =>
@server = server
data =
email: 'test@cozycloud.cc'
password: 'password'
timezone: 'Europe/Paris'
User.create data, (err) ->
return done err if err
# wait a little for User.timezone to be updated through Realtime
setTimeout done, 1000
helpers.after = (done) ->
@server.close()
helpers.cleanDb ->
User.destroyAll done
# Remove all the alarms
helpers.cleanDb = (callback) ->
Event.destroyAll callback
# Get all the alarams
helpers.getAllEvents = (callback) ->
Event.all callback
# Create an event from values
helpers.createEvent = (start, end, place, description, callback) ->
(callback) ->
evt =
start: start
end: end
place: place
description: description
tags: ['my calendar']
Event.create evt, callback
# Create an alarm from object
helpers.createEventFromObject = (data, callback) ->
Event.create data, callback
helpers.getEventByID = (id, callback) ->
Event.find id, callback
helpers.doesEventExist = (id, callback) ->
Event.exists id, callback
| 124738 | Client = require('request-json').JsonClient
client = new Client "http://localhost:8888/"
module.exports = helpers = {}
if process.env.USE_JS
helpers.prefix = '../build/'
else
helpers.prefix = '../'
Event = require "#{helpers.prefix}server/models/event"
User = require "#{helpers.prefix}server/models/user"
helpers.before = (done) ->
@timeout 10000
start = require "#{helpers.prefix}server"
start 8888, (err, app, server) =>
@server = server
data =
email: '<EMAIL>'
password: '<PASSWORD>'
timezone: 'Europe/Paris'
User.create data, (err) ->
return done err if err
# wait a little for User.timezone to be updated through Realtime
setTimeout done, 1000
helpers.after = (done) ->
@server.close()
helpers.cleanDb ->
User.destroyAll done
# Remove all the alarms
helpers.cleanDb = (callback) ->
Event.destroyAll callback
# Get all the alarams
helpers.getAllEvents = (callback) ->
Event.all callback
# Create an event from values
helpers.createEvent = (start, end, place, description, callback) ->
(callback) ->
evt =
start: start
end: end
place: place
description: description
tags: ['my calendar']
Event.create evt, callback
# Create an alarm from object
helpers.createEventFromObject = (data, callback) ->
Event.create data, callback
helpers.getEventByID = (id, callback) ->
Event.find id, callback
helpers.doesEventExist = (id, callback) ->
Event.exists id, callback
| true | Client = require('request-json').JsonClient
client = new Client "http://localhost:8888/"
module.exports = helpers = {}
if process.env.USE_JS
helpers.prefix = '../build/'
else
helpers.prefix = '../'
Event = require "#{helpers.prefix}server/models/event"
User = require "#{helpers.prefix}server/models/user"
helpers.before = (done) ->
@timeout 10000
start = require "#{helpers.prefix}server"
start 8888, (err, app, server) =>
@server = server
data =
email: 'PI:EMAIL:<EMAIL>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
timezone: 'Europe/Paris'
User.create data, (err) ->
return done err if err
# wait a little for User.timezone to be updated through Realtime
setTimeout done, 1000
helpers.after = (done) ->
@server.close()
helpers.cleanDb ->
User.destroyAll done
# Remove all the alarms
helpers.cleanDb = (callback) ->
Event.destroyAll callback
# Get all the alarams
helpers.getAllEvents = (callback) ->
Event.all callback
# Create an event from values
helpers.createEvent = (start, end, place, description, callback) ->
(callback) ->
evt =
start: start
end: end
place: place
description: description
tags: ['my calendar']
Event.create evt, callback
# Create an alarm from object
helpers.createEventFromObject = (data, callback) ->
Event.create data, callback
helpers.getEventByID = (id, callback) ->
Event.find id, callback
helpers.doesEventExist = (id, callback) ->
Event.exists id, callback
|
[
{
"context": "1\n heal: 15\n mana_cost: 20\n ,\n # name: \"Replenish health\"\n # _description: \"Heal yourself o",
"end": 792,
"score": 0.7648021578788757,
"start": 788,
"tag": "NAME",
"value": "Repl"
},
{
"context": "heal: 15\n mana_cost: 20\n ,\n # name: \"Repl... | db/spells_data.coffee | adrienschuler/Battle-Arena | 0 | exports.initialize = ->
spells = [
name: "Punch"
_description: "Give a punch to your opponent, doing {damage} damages."
thumbnail: "punch"
is_default: 1
damage: 5
mana_cost: 0
,
name: "Fireball"
_description: "Launch a devastating fireball, doing {damage} damages.<br> Cost {mana_cost} mana."
thumbnail: "fireball"
is_default: 1
damage: 20
mana_cost: 20
,
name: "Frozenball"
_description: "Launch a frozenball, doing {damage} damages. Cost {mana_cost} mana."
thumbnail: "frozenball"
is_default: 1
damage: 10
mana_cost: 10
,
name: "Heal"
_description: "Heal yourself for {heal} hitpoints. Cost {mana_cost} mana."
thumbnail: "heal"
is_default: 1
heal: 15
mana_cost: 20
,
# name: "Replenish health"
# _description: "Heal yourself overtime for {heal} hitpoints per turn during {round_duration} turns. Cost {mana_cost} mana."
# thumbnail: "replenish_health"
# requirement: 3
# skill_points: 4
# heal: 15
# mana_cost: 40
# round_duration: 3
# ,
# name: "Transfuse stamina"
# _description: "Decrease your opponent stamina by {attributes[0].stats[0].value} and inscrease your own stamina by {attributes[1].stats[0].value}. Cost {mana_cost} mana."
# thumbnail: "transfuse_stamina"
# is_default: 1
# mana_cost: 20
# attributes: [
# target: "opponent"
# stats: [
# stat: "stamina"
# value: -5
# ,
# target: "me"
# stats: [
# stat: "stamina"
# value: +5
# ]
# ]
# ]
# ,
name: "Poison"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "poison"
is_default: 1
damage: 10
mana_cost: 5
,
name: "Wind"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "wind"
is_default: 1
damage: 10
mana_cost: 10
,
name: "Scream"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "scream"
damage: 15
mana_cost: 10
skill_points: 2
,
name: "Blast"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "blast"
damage: 45
mana_cost: 35
skill_points: 8
,
name: "Thunder"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "thunder"
damage: 30
mana_cost: 15
skill_points: 5
,
name: "Knifes"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "knifes"
damage: 30
mana_cost: 20
requirement: 2
skill_points: 2
,
name: "Meteor"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "meteor"
damage: 50
mana_cost: 60
requirement: 3
skill_points: 5
]
spells
| 92410 | exports.initialize = ->
spells = [
name: "Punch"
_description: "Give a punch to your opponent, doing {damage} damages."
thumbnail: "punch"
is_default: 1
damage: 5
mana_cost: 0
,
name: "Fireball"
_description: "Launch a devastating fireball, doing {damage} damages.<br> Cost {mana_cost} mana."
thumbnail: "fireball"
is_default: 1
damage: 20
mana_cost: 20
,
name: "Frozenball"
_description: "Launch a frozenball, doing {damage} damages. Cost {mana_cost} mana."
thumbnail: "frozenball"
is_default: 1
damage: 10
mana_cost: 10
,
name: "Heal"
_description: "Heal yourself for {heal} hitpoints. Cost {mana_cost} mana."
thumbnail: "heal"
is_default: 1
heal: 15
mana_cost: 20
,
# name: "<NAME>en<NAME>"
# _description: "Heal yourself overtime for {heal} hitpoints per turn during {round_duration} turns. Cost {mana_cost} mana."
# thumbnail: "replenish_health"
# requirement: 3
# skill_points: 4
# heal: 15
# mana_cost: 40
# round_duration: 3
# ,
# name: "<NAME>fuse stamina"
# _description: "Decrease your opponent stamina by {attributes[0].stats[0].value} and inscrease your own stamina by {attributes[1].stats[0].value}. Cost {mana_cost} mana."
# thumbnail: "transfuse_stamina"
# is_default: 1
# mana_cost: 20
# attributes: [
# target: "opponent"
# stats: [
# stat: "stamina"
# value: -5
# ,
# target: "me"
# stats: [
# stat: "stamina"
# value: +5
# ]
# ]
# ]
# ,
name: "Poison"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "poison"
is_default: 1
damage: 10
mana_cost: 5
,
name: "Wind"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "wind"
is_default: 1
damage: 10
mana_cost: 10
,
name: "Scream"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "scream"
damage: 15
mana_cost: 10
skill_points: 2
,
name: "Blast"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "blast"
damage: 45
mana_cost: 35
skill_points: 8
,
name: "Thunder"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "thunder"
damage: 30
mana_cost: 15
skill_points: 5
,
name: "<NAME>"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "knifes"
damage: 30
mana_cost: 20
requirement: 2
skill_points: 2
,
name: "Meteor"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "meteor"
damage: 50
mana_cost: 60
requirement: 3
skill_points: 5
]
spells
| true | exports.initialize = ->
spells = [
name: "Punch"
_description: "Give a punch to your opponent, doing {damage} damages."
thumbnail: "punch"
is_default: 1
damage: 5
mana_cost: 0
,
name: "Fireball"
_description: "Launch a devastating fireball, doing {damage} damages.<br> Cost {mana_cost} mana."
thumbnail: "fireball"
is_default: 1
damage: 20
mana_cost: 20
,
name: "Frozenball"
_description: "Launch a frozenball, doing {damage} damages. Cost {mana_cost} mana."
thumbnail: "frozenball"
is_default: 1
damage: 10
mana_cost: 10
,
name: "Heal"
_description: "Heal yourself for {heal} hitpoints. Cost {mana_cost} mana."
thumbnail: "heal"
is_default: 1
heal: 15
mana_cost: 20
,
# name: "PI:NAME:<NAME>END_PIenPI:NAME:<NAME>END_PI"
# _description: "Heal yourself overtime for {heal} hitpoints per turn during {round_duration} turns. Cost {mana_cost} mana."
# thumbnail: "replenish_health"
# requirement: 3
# skill_points: 4
# heal: 15
# mana_cost: 40
# round_duration: 3
# ,
# name: "PI:NAME:<NAME>END_PIfuse stamina"
# _description: "Decrease your opponent stamina by {attributes[0].stats[0].value} and inscrease your own stamina by {attributes[1].stats[0].value}. Cost {mana_cost} mana."
# thumbnail: "transfuse_stamina"
# is_default: 1
# mana_cost: 20
# attributes: [
# target: "opponent"
# stats: [
# stat: "stamina"
# value: -5
# ,
# target: "me"
# stats: [
# stat: "stamina"
# value: +5
# ]
# ]
# ]
# ,
name: "Poison"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "poison"
is_default: 1
damage: 10
mana_cost: 5
,
name: "Wind"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "wind"
is_default: 1
damage: 10
mana_cost: 10
,
name: "Scream"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "scream"
damage: 15
mana_cost: 10
skill_points: 2
,
name: "Blast"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "blast"
damage: 45
mana_cost: 35
skill_points: 8
,
name: "Thunder"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "thunder"
damage: 30
mana_cost: 15
skill_points: 5
,
name: "PI:NAME:<NAME>END_PI"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "knifes"
damage: 30
mana_cost: 20
requirement: 2
skill_points: 2
,
name: "Meteor"
_description: "Deal {damage} damages to your opponent. Cost {mana_cost} mana."
thumbnail: "meteor"
damage: 50
mana_cost: 60
requirement: 3
skill_points: 5
]
spells
|
[
{
"context": "###*\n@module joukou-fbpp/protocols/runtime\n@author Fabian Cook <fabian.cook@joukou.com>\n###\n\nclass RuntimeProtoc",
"end": 953,
"score": 0.9998677968978882,
"start": 942,
"tag": "NAME",
"value": "Fabian Cook"
},
{
"context": "oukou-fbpp/protocols/runtime\n@author Fa... | src/protocols/runtime/index.coffee | joukou/joukou-flow | 0 | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
BaseProtocol = require( '../base/index' )
pjson = require( '../../../package.json' )
uuid = require( 'node-uuid' )
Q = require( 'q' )
authentication = require( '../../authentication' ).bearer
schema = require( './schema' )
CommandResponse = require( '../../runtime/command-response' )
###*
@module joukou-fbpp/protocols/runtime
@author Fabian Cook <fabian.cook@joukou.com>
###
class RuntimeProtocol extends BaseProtocol
mainGraph: null
###*
@constructor RuntimeProtocol
###
constructor: ( context ) ->
super( 'runtime', context )
@command( 'getRuntime', @getRuntime, '', 'GET' )
@command( 'ports', @ports, 'ports', 'GET' )
@command( 'receivePacket', @receivePacket, ':graph/packet/:port', 'PUT' )
@addCommandSchemas( schema )
###*
@typedef { object } getRuntimePayload
@property { string } secret
###
###*
@typedef { object } runtime
@property { string } type
@property { Array.<string> } capabilities
@property { string } [id=undefined]
@property { string } [label=undefined]
@property { string } [graph=undefined]
###
###*
Request the information about the runtime.
When receiving this message the runtime should response with a runtime
message.
@param { getRuntimePayload } payload
@param { RuntimeContext } context
@returns { runtime | Promise }
###
getRuntime: ( payload, context ) ->
runtime = {
type: context.type
version: context.version
capabilities: context.capabilities
id: context.id
label: context.label
graph: context.graph
}
response = new CommandResponse(
'runtime',
runtime
)
if context.authorized and (
not payload.secret? or
# No need to re-authenticate
payload.secret is context.secret
)
return response
deferred = Q.defer()
authentication.verify(
payload.secret,
( err, model ) ->
if err
return deferred.reject( err )
context.user = model
context.authorized = yes
context.secret = payload.secret
deferred.resolve( response )
)
return deferred.promise
sendPorts: ( name, graph ) ->
###*
@param { string } id
@returns { string }
###
setMainGraph: ( id, graph ) ->
@mainGraph = graph
###*
@typedef { object } portDef
@property { string } id
@property { string } type
@property { string } description
@property { boolean } addressable
@property { boolean } required
###
###*
@typedef { object } port
@property { string } graph
@property { Array.<portDef> } inPorts
@property { Array.<portDef> } outPorts
###
###*
Signals the runtime's available ports.
@param { * } payload
@param { RuntimeContext } context
@returns { Array.<port> | Promise }
###
ports: ( payload, context ) ->
###*
@typedef { object } packet
@property { string } port
@property { string } event
@property { object } payload
@property { string } graph
###
###*
@param { packet } payload
@param { RuntimeContext } context
@returns { packet | Promise }
###
receivePacket: ( payload, context ) ->
module.exports = RuntimeProtocol | 79323 | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
BaseProtocol = require( '../base/index' )
pjson = require( '../../../package.json' )
uuid = require( 'node-uuid' )
Q = require( 'q' )
authentication = require( '../../authentication' ).bearer
schema = require( './schema' )
CommandResponse = require( '../../runtime/command-response' )
###*
@module joukou-fbpp/protocols/runtime
@author <NAME> <<EMAIL>>
###
class RuntimeProtocol extends BaseProtocol
mainGraph: null
###*
@constructor RuntimeProtocol
###
constructor: ( context ) ->
super( 'runtime', context )
@command( 'getRuntime', @getRuntime, '', 'GET' )
@command( 'ports', @ports, 'ports', 'GET' )
@command( 'receivePacket', @receivePacket, ':graph/packet/:port', 'PUT' )
@addCommandSchemas( schema )
###*
@typedef { object } getRuntimePayload
@property { string } secret
###
###*
@typedef { object } runtime
@property { string } type
@property { Array.<string> } capabilities
@property { string } [id=undefined]
@property { string } [label=undefined]
@property { string } [graph=undefined]
###
###*
Request the information about the runtime.
When receiving this message the runtime should response with a runtime
message.
@param { getRuntimePayload } payload
@param { RuntimeContext } context
@returns { runtime | Promise }
###
getRuntime: ( payload, context ) ->
runtime = {
type: context.type
version: context.version
capabilities: context.capabilities
id: context.id
label: context.label
graph: context.graph
}
response = new CommandResponse(
'runtime',
runtime
)
if context.authorized and (
not payload.secret? or
# No need to re-authenticate
payload.secret is context.secret
)
return response
deferred = Q.defer()
authentication.verify(
payload.secret,
( err, model ) ->
if err
return deferred.reject( err )
context.user = model
context.authorized = yes
context.secret = payload.secret
deferred.resolve( response )
)
return deferred.promise
sendPorts: ( name, graph ) ->
###*
@param { string } id
@returns { string }
###
setMainGraph: ( id, graph ) ->
@mainGraph = graph
###*
@typedef { object } portDef
@property { string } id
@property { string } type
@property { string } description
@property { boolean } addressable
@property { boolean } required
###
###*
@typedef { object } port
@property { string } graph
@property { Array.<portDef> } inPorts
@property { Array.<portDef> } outPorts
###
###*
Signals the runtime's available ports.
@param { * } payload
@param { RuntimeContext } context
@returns { Array.<port> | Promise }
###
ports: ( payload, context ) ->
###*
@typedef { object } packet
@property { string } port
@property { string } event
@property { object } payload
@property { string } graph
###
###*
@param { packet } payload
@param { RuntimeContext } context
@returns { packet | Promise }
###
receivePacket: ( payload, context ) ->
module.exports = RuntimeProtocol | true | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
BaseProtocol = require( '../base/index' )
pjson = require( '../../../package.json' )
uuid = require( 'node-uuid' )
Q = require( 'q' )
authentication = require( '../../authentication' ).bearer
schema = require( './schema' )
CommandResponse = require( '../../runtime/command-response' )
###*
@module joukou-fbpp/protocols/runtime
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
class RuntimeProtocol extends BaseProtocol
mainGraph: null
###*
@constructor RuntimeProtocol
###
constructor: ( context ) ->
super( 'runtime', context )
@command( 'getRuntime', @getRuntime, '', 'GET' )
@command( 'ports', @ports, 'ports', 'GET' )
@command( 'receivePacket', @receivePacket, ':graph/packet/:port', 'PUT' )
@addCommandSchemas( schema )
###*
@typedef { object } getRuntimePayload
@property { string } secret
###
###*
@typedef { object } runtime
@property { string } type
@property { Array.<string> } capabilities
@property { string } [id=undefined]
@property { string } [label=undefined]
@property { string } [graph=undefined]
###
###*
Request the information about the runtime.
When receiving this message the runtime should response with a runtime
message.
@param { getRuntimePayload } payload
@param { RuntimeContext } context
@returns { runtime | Promise }
###
getRuntime: ( payload, context ) ->
runtime = {
type: context.type
version: context.version
capabilities: context.capabilities
id: context.id
label: context.label
graph: context.graph
}
response = new CommandResponse(
'runtime',
runtime
)
if context.authorized and (
not payload.secret? or
# No need to re-authenticate
payload.secret is context.secret
)
return response
deferred = Q.defer()
authentication.verify(
payload.secret,
( err, model ) ->
if err
return deferred.reject( err )
context.user = model
context.authorized = yes
context.secret = payload.secret
deferred.resolve( response )
)
return deferred.promise
sendPorts: ( name, graph ) ->
###*
@param { string } id
@returns { string }
###
setMainGraph: ( id, graph ) ->
@mainGraph = graph
###*
@typedef { object } portDef
@property { string } id
@property { string } type
@property { string } description
@property { boolean } addressable
@property { boolean } required
###
###*
@typedef { object } port
@property { string } graph
@property { Array.<portDef> } inPorts
@property { Array.<portDef> } outPorts
###
###*
Signals the runtime's available ports.
@param { * } payload
@param { RuntimeContext } context
@returns { Array.<port> | Promise }
###
ports: ( payload, context ) ->
###*
@typedef { object } packet
@property { string } port
@property { string } event
@property { object } payload
@property { string } graph
###
###*
@param { packet } payload
@param { RuntimeContext } context
@returns { packet | Promise }
###
receivePacket: ( payload, context ) ->
module.exports = RuntimeProtocol |
[
{
"context": "### Copyright (c) 2015 Magnus Leo. All rights reserved. ###\n\nShape = require('./Sha",
"end": 33,
"score": 0.9998552203178406,
"start": 23,
"tag": "NAME",
"value": "Magnus Leo"
}
] | src/classes/Line.coffee | magnusleo/Leo-Engine | 1 | ### Copyright (c) 2015 Magnus Leo. All rights reserved. ###
Shape = require('./Shape')
util = require('../modules/util')
core = require('../modules/core')
view = require('../modules/view')
module.exports =
class Line extends Shape
constructor: (data) -> # Shape::constructor
defaultData =
x2: 0
y2: 0
data = util.merge(defaultData, data)
super(data)
draw: -> # Line::draw
unless super
return false
core.frameBufferCtx.beginPath()
core.frameBufferCtx.moveTo @drawX, @drawY
core.frameBufferCtx.lineTo( view.posToPx(@x2, 'x'), view.posToPx(@y2, 'y') )
core.frameBufferCtx.closePath()
core.frameBufferCtx.stroke()
return true
| 176011 | ### Copyright (c) 2015 <NAME>. All rights reserved. ###
Shape = require('./Shape')
util = require('../modules/util')
core = require('../modules/core')
view = require('../modules/view')
module.exports =
class Line extends Shape
constructor: (data) -> # Shape::constructor
defaultData =
x2: 0
y2: 0
data = util.merge(defaultData, data)
super(data)
draw: -> # Line::draw
unless super
return false
core.frameBufferCtx.beginPath()
core.frameBufferCtx.moveTo @drawX, @drawY
core.frameBufferCtx.lineTo( view.posToPx(@x2, 'x'), view.posToPx(@y2, 'y') )
core.frameBufferCtx.closePath()
core.frameBufferCtx.stroke()
return true
| true | ### Copyright (c) 2015 PI:NAME:<NAME>END_PI. All rights reserved. ###
Shape = require('./Shape')
util = require('../modules/util')
core = require('../modules/core')
view = require('../modules/view')
module.exports =
class Line extends Shape
constructor: (data) -> # Shape::constructor
defaultData =
x2: 0
y2: 0
data = util.merge(defaultData, data)
super(data)
draw: -> # Line::draw
unless super
return false
core.frameBufferCtx.beginPath()
core.frameBufferCtx.moveTo @drawX, @drawY
core.frameBufferCtx.lineTo( view.posToPx(@x2, 'x'), view.posToPx(@y2, 'y') )
core.frameBufferCtx.closePath()
core.frameBufferCtx.stroke()
return true
|
[
{
"context": "til\n\n The MIT License (MIT)\n\n Copyright (c) 2014 Yasuhiro Okuno\n\n Permission is hereby granted, free of charge, ",
"end": 88,
"score": 0.9998723268508911,
"start": 74,
"tag": "NAME",
"value": "Yasuhiro Okuno"
},
{
"context": "ne userobject of password since GET... | coffee_lib/atlassian-crowd-ext/atlassian-crowd-ext.coffee | koma75/crowdutil | 1 | ###
@license
crowdutil
The MIT License (MIT)
Copyright (c) 2014 Yasuhiro Okuno
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN crowdECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
AtlassianCrowd = require 'atlassian-crowd'
url = require 'url'
http = require 'http'
https = require 'https'
# Add a method to update user
AtlassianCrowd.prototype.user.update = (username, userObj, callback) ->
# MUST have username and object to update with
error = null
# Check for input
if !username || !userObj
# Missing Input
error = new Error "missing input"
error.type = "BAD_REQUEST"
else if(
typeof userObj["name"] != 'string' ||
typeof userObj["first-name"] != 'string' ||
typeof userObj["last-name"] != 'string' ||
typeof userObj["display-name"] != 'string' ||
typeof userObj["email"] != 'string' ||
typeof userObj["active"] != 'boolean'
)
# Missing field in the user object
error = new Error "missing input"
error.type = "BAD_REQUEST"
else if userObj.name != username
# MUST NOT be updating user's username
error = new Error "username missmatch"
error.type = "BAD_REQUEST"
if error
return callback(error)
# prune userobject of password since GET user?username=uid
# returns a user object WITH password field
if typeof userObj.password != 'undefined'
delete userObj.password
options =
method: 'PUT'
data: JSON.stringify(userObj)
path: "/user?username=#{username}"
_doRequest(options, (err, res) ->
return callback(err, res)
)
#
# The code below are copied from node-atlassian-crowd
# and converted to coffeescript by Js2coffee
#
# Copyright (c) 2012 Gary Steven
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
_doRequest = (options, callback) ->
data = ""
error = undefined
opts =
hostname: @settings.hostname
port: @settings.port
auth: @settings.authstring
method: options.method
path: settings.pathname + settings.apipath + options.path
rejectUnauthorized: (if "rejectUnauthorized" of @settings then @settings.rejectUnauthorized else true)
headers:
Accept: "application/json"
if options.method is "POST" or options.method is "PUT"
if options.data
opts.headers["content-type"] = "application/json"
opts.headers["content-length"] = options.data.length
else
error = new Error("Missing POST Data")
error.type = "BAD_REQUEST"
return callback(error)
else
# nginx requires content-length header also for DELETE requests
opts.headers["content-length"] = "0" if options.method is "DELETE"
protocol = (if (settings.protocol is "https:") then https else http)
request = protocol.request(opts, (response) ->
response.on "data", (chunk) ->
data += chunk.toString()
return
return callback(null, response.statusCode) if response.statusCode is 204
if response.statusCode is 401
error = new Error("Application Authorization Error")
error.type = "APPLICATION_ACCESS_DENIED"
return callback(error)
if response.statusCode is 403
error = new Error("Application Permission Denied")
error.type = "APPLICATION_PERMISSION_DENIED"
return callback(error)
response.on "end", ->
if response.headers["content-type"] isnt "application/json"
error = new Error("Invalid Response from Atlassian Crowd")
error.type = "INVALID_RESPONSE"
callback error
else
if data
data = JSON.parse(data)
if data.reason or data.message
if typeof data.reason is "undefined"
data.reason = "BAD_REQUEST"
data.message = "Invalid Request to Atlassian Crowd"
error = new Error(data.message)
error.type = data.reason
callback error
else
callback null, data
else
callback null, response.statusCode
return
)
if options.data
request.end options.data
else
request.end()
return
module.exports = AtlassianCrowd
| 135696 | ###
@license
crowdutil
The MIT License (MIT)
Copyright (c) 2014 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN crowdECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
AtlassianCrowd = require 'atlassian-crowd'
url = require 'url'
http = require 'http'
https = require 'https'
# Add a method to update user
AtlassianCrowd.prototype.user.update = (username, userObj, callback) ->
# MUST have username and object to update with
error = null
# Check for input
if !username || !userObj
# Missing Input
error = new Error "missing input"
error.type = "BAD_REQUEST"
else if(
typeof userObj["name"] != 'string' ||
typeof userObj["first-name"] != 'string' ||
typeof userObj["last-name"] != 'string' ||
typeof userObj["display-name"] != 'string' ||
typeof userObj["email"] != 'string' ||
typeof userObj["active"] != 'boolean'
)
# Missing field in the user object
error = new Error "missing input"
error.type = "BAD_REQUEST"
else if userObj.name != username
# MUST NOT be updating user's username
error = new Error "username missmatch"
error.type = "BAD_REQUEST"
if error
return callback(error)
# prune userobject of password since GET user?username=uid
# returns a user object WITH password field
if typeof userObj.password != 'undefined'
delete userObj.password
options =
method: 'PUT'
data: JSON.stringify(userObj)
path: "/user?username=#{username}"
_doRequest(options, (err, res) ->
return callback(err, res)
)
#
# The code below are copied from node-atlassian-crowd
# and converted to coffeescript by Js2coffee
#
# Copyright (c) 2012 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
_doRequest = (options, callback) ->
data = ""
error = undefined
opts =
hostname: @settings.hostname
port: @settings.port
auth: @settings.authstring
method: options.method
path: settings.pathname + settings.apipath + options.path
rejectUnauthorized: (if "rejectUnauthorized" of @settings then @settings.rejectUnauthorized else true)
headers:
Accept: "application/json"
if options.method is "POST" or options.method is "PUT"
if options.data
opts.headers["content-type"] = "application/json"
opts.headers["content-length"] = options.data.length
else
error = new Error("Missing POST Data")
error.type = "BAD_REQUEST"
return callback(error)
else
# nginx requires content-length header also for DELETE requests
opts.headers["content-length"] = "0" if options.method is "DELETE"
protocol = (if (settings.protocol is "https:") then https else http)
request = protocol.request(opts, (response) ->
response.on "data", (chunk) ->
data += chunk.toString()
return
return callback(null, response.statusCode) if response.statusCode is 204
if response.statusCode is 401
error = new Error("Application Authorization Error")
error.type = "APPLICATION_ACCESS_DENIED"
return callback(error)
if response.statusCode is 403
error = new Error("Application Permission Denied")
error.type = "APPLICATION_PERMISSION_DENIED"
return callback(error)
response.on "end", ->
if response.headers["content-type"] isnt "application/json"
error = new Error("Invalid Response from Atlassian Crowd")
error.type = "INVALID_RESPONSE"
callback error
else
if data
data = JSON.parse(data)
if data.reason or data.message
if typeof data.reason is "undefined"
data.reason = "BAD_REQUEST"
data.message = "Invalid Request to Atlassian Crowd"
error = new Error(data.message)
error.type = data.reason
callback error
else
callback null, data
else
callback null, response.statusCode
return
)
if options.data
request.end options.data
else
request.end()
return
module.exports = AtlassianCrowd
| true | ###
@license
crowdutil
The MIT License (MIT)
Copyright (c) 2014 PI:NAME:<NAME>END_PI
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN crowdECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
AtlassianCrowd = require 'atlassian-crowd'
url = require 'url'
http = require 'http'
https = require 'https'
# Add a method to update user
AtlassianCrowd.prototype.user.update = (username, userObj, callback) ->
# MUST have username and object to update with
error = null
# Check for input
if !username || !userObj
# Missing Input
error = new Error "missing input"
error.type = "BAD_REQUEST"
else if(
typeof userObj["name"] != 'string' ||
typeof userObj["first-name"] != 'string' ||
typeof userObj["last-name"] != 'string' ||
typeof userObj["display-name"] != 'string' ||
typeof userObj["email"] != 'string' ||
typeof userObj["active"] != 'boolean'
)
# Missing field in the user object
error = new Error "missing input"
error.type = "BAD_REQUEST"
else if userObj.name != username
# MUST NOT be updating user's username
error = new Error "username missmatch"
error.type = "BAD_REQUEST"
if error
return callback(error)
# prune userobject of password since GET user?username=uid
# returns a user object WITH password field
if typeof userObj.password != 'undefined'
delete userObj.password
options =
method: 'PUT'
data: JSON.stringify(userObj)
path: "/user?username=#{username}"
_doRequest(options, (err, res) ->
return callback(err, res)
)
#
# The code below are copied from node-atlassian-crowd
# and converted to coffeescript by Js2coffee
#
# Copyright (c) 2012 PI:NAME:<NAME>END_PI
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
_doRequest = (options, callback) ->
data = ""
error = undefined
opts =
hostname: @settings.hostname
port: @settings.port
auth: @settings.authstring
method: options.method
path: settings.pathname + settings.apipath + options.path
rejectUnauthorized: (if "rejectUnauthorized" of @settings then @settings.rejectUnauthorized else true)
headers:
Accept: "application/json"
if options.method is "POST" or options.method is "PUT"
if options.data
opts.headers["content-type"] = "application/json"
opts.headers["content-length"] = options.data.length
else
error = new Error("Missing POST Data")
error.type = "BAD_REQUEST"
return callback(error)
else
# nginx requires content-length header also for DELETE requests
opts.headers["content-length"] = "0" if options.method is "DELETE"
protocol = (if (settings.protocol is "https:") then https else http)
request = protocol.request(opts, (response) ->
response.on "data", (chunk) ->
data += chunk.toString()
return
return callback(null, response.statusCode) if response.statusCode is 204
if response.statusCode is 401
error = new Error("Application Authorization Error")
error.type = "APPLICATION_ACCESS_DENIED"
return callback(error)
if response.statusCode is 403
error = new Error("Application Permission Denied")
error.type = "APPLICATION_PERMISSION_DENIED"
return callback(error)
response.on "end", ->
if response.headers["content-type"] isnt "application/json"
error = new Error("Invalid Response from Atlassian Crowd")
error.type = "INVALID_RESPONSE"
callback error
else
if data
data = JSON.parse(data)
if data.reason or data.message
if typeof data.reason is "undefined"
data.reason = "BAD_REQUEST"
data.message = "Invalid Request to Atlassian Crowd"
error = new Error(data.message)
error.type = data.reason
callback error
else
callback null, data
else
callback null, response.statusCode
return
)
if options.data
request.end options.data
else
request.end()
return
module.exports = AtlassianCrowd
|
[
{
"context": "# @author mr.doob / http://mrdoob.com/\n# @author aladjev.andrew@gma",
"end": 17,
"score": 0.789387583732605,
"start": 10,
"tag": "NAME",
"value": "mr.doob"
},
{
"context": "# @author mr.doob / http://mrdoob.com/\n# @author aladjev.andrew@gmail.com\n\n#= require new_src... | source/javascripts/new_src/core/ray.coffee | andrew-aladev/three.js | 0 | # @author mr.doob / http://mrdoob.com/
# @author aladjev.andrew@gmail.com
#= require new_src/core/vector_3
class Ray
precision: 0.0001
constructor: (origin, direction) ->
@origin = origin or new THREE.Vector3()
@direction = direction or new THREE.Vector3()
@a = new THREE.Vector3()
@b = new THREE.Vector3()
@c = new THREE.Vector3()
@d = new THREE.Vector3()
@originCopy = new THREE.Vector3()
@directionCopy = new THREE.Vector3()
@vector = new THREE.Vector3()
@normal = new THREE.Vector3()
@intersectPoint = new THREE.Vector3()
@v0 = new THREE.Vector3()
@v1 = new THREE.Vector3()
@v2 = new THREE.Vector3()
setPrecision: (value) ->
@precision = value
_intersectObject: (object) ->
intersects = []
if object instanceof THREE.Particle
distance = @distanceFromIntersection @origin, @direction, object.matrixWorld.getPosition()
if distance > object.scale.x
return []
intersects.push =
distance: distance
point: object.position
face: null
object: object
else if object instanceof THREE.Mesh
# Checking boundingSphere
distance = @distanceFromIntersection @origin, @direction, object.matrixWorld.getPosition()
scale = THREE.Frustum.__v1.set(
object.matrixWorld.getColumnX().length()
object.matrixWorld.getColumnY().length()
object.matrixWorld.getColumnZ().length()
)
if distance > object.geometry.boundingSphere.radius * Math.max(scale.x, Math.max(scale.y, scale.z))
return intersects
# Checking faces
geometry = object.geometry
vertices = geometry.vertices
object.matrixRotationWorld.extractRotation object.matrixWorld
length = geometry.faces.length
for f in [0...length]
face = geometry.faces[f]
@originCopy.copy @origin
@directionCopy.copy @direction
objMatrix = object.matrixWorld;
# determine if ray intersects the plane of the face
# note: this works regardless of the direction of the face normal
@vector = objMatrix.multiplyVector3(@vector.copy(face.centroid)).subSelf @originCopy
@normal = object.matrixRotationWorld.multiplyVector3 @normal.copy(face.normal)
dot = @directionCopy.dot @normal
# bail if ray and plane are parallel
if Math.abs(dot) < @precision
continue
# calc distance to plane
scalar = @normal.dot(@vector) / dot
# if negative distance, then plane is behind ray
if scalar < 0
continue
if object.flipSided
flip = dot > 0
else
flip = dot < 0
if object.doubleSided or flip
@intersectPoint.add @originCopy, @directionCopy.multiplyScalar(scalar)
if face instanceof THREE.Face3
@a = objMatrix.multiplyVector3 @a.copy(vertices[face.a])
@b = objMatrix.multiplyVector3 @b.copy(vertices[face.b])
@c = objMatrix.multiplyVector3 @c.copy(vertices[face.c])
if @pointInFace3(@intersectPoint, @a, @b, @c)
intersects.push
distance: @originCopy.distanceTo @intersectPoint
point: @intersectPoint.clone()
face: face
object: object
else if face instanceof THREE.Face4
@a = objMatrix.multiplyVector3(@a.copy(vertices[face.a]))
@b = objMatrix.multiplyVector3(@b.copy(vertices[face.b]))
@c = objMatrix.multiplyVector3(@c.copy(vertices[face.c]))
@d = objMatrix.multiplyVector3(@d.copy(vertices[face.d]))
if @pointInFace3(@intersectPoint, @a, @b, @d) or @pointInFace3(@intersectPoint, @b, @c, @d)
intersects.push
distance: @originCopy.distanceTo @intersectPoint
point: @intersectPoint.clone()
face: face
object: object
return intersects
intersectObjects: (objects) ->
intersects = []
length = objects.length
for i in [0...length]
intersects.concat @_intersectObject(objects[i])
intersects.sort (a, b) ->
a.distance - b.distance
intersects
distanceFromIntersection: (origin, direction, position) ->
@v0.sub position, origin
dot = @v0.dot direction
intersect = @v1.add origin, @v2.copy(direction).multiplyScalar(dot)
distance = position.distanceTo intersect
# http://www.blackpawn.com/texts/pointinpoly/default.html
pointInFace3: (p, a, b, c) ->
@v0.sub c, a
@v1.sub b, a
@v2.sub p, a
dot00 = @v0.dot @v0
dot01 = @v0.dot @v1
dot02 = @v0.dot @v2
dot11 = @v1.dot @v1
dot12 = @v1.dot @v2
invDenom = 1 / (dot00 * dot11 - dot01 * dot01)
u = (dot11 * dot02 - dot01 * dot12) * invDenom
v = (dot00 * dot12 - dot01 * dot02) * invDenom
(u >= 0) and (v >= 0) and (u + v < 1)
this
namespace "THREE", (exports) ->
exports.Ray = Ray | 208581 | # @author <NAME> / http://mrdoob.com/
# @author <EMAIL>
#= require new_src/core/vector_3
class Ray
precision: 0.0001
constructor: (origin, direction) ->
@origin = origin or new THREE.Vector3()
@direction = direction or new THREE.Vector3()
@a = new THREE.Vector3()
@b = new THREE.Vector3()
@c = new THREE.Vector3()
@d = new THREE.Vector3()
@originCopy = new THREE.Vector3()
@directionCopy = new THREE.Vector3()
@vector = new THREE.Vector3()
@normal = new THREE.Vector3()
@intersectPoint = new THREE.Vector3()
@v0 = new THREE.Vector3()
@v1 = new THREE.Vector3()
@v2 = new THREE.Vector3()
setPrecision: (value) ->
@precision = value
_intersectObject: (object) ->
intersects = []
if object instanceof THREE.Particle
distance = @distanceFromIntersection @origin, @direction, object.matrixWorld.getPosition()
if distance > object.scale.x
return []
intersects.push =
distance: distance
point: object.position
face: null
object: object
else if object instanceof THREE.Mesh
# Checking boundingSphere
distance = @distanceFromIntersection @origin, @direction, object.matrixWorld.getPosition()
scale = THREE.Frustum.__v1.set(
object.matrixWorld.getColumnX().length()
object.matrixWorld.getColumnY().length()
object.matrixWorld.getColumnZ().length()
)
if distance > object.geometry.boundingSphere.radius * Math.max(scale.x, Math.max(scale.y, scale.z))
return intersects
# Checking faces
geometry = object.geometry
vertices = geometry.vertices
object.matrixRotationWorld.extractRotation object.matrixWorld
length = geometry.faces.length
for f in [0...length]
face = geometry.faces[f]
@originCopy.copy @origin
@directionCopy.copy @direction
objMatrix = object.matrixWorld;
# determine if ray intersects the plane of the face
# note: this works regardless of the direction of the face normal
@vector = objMatrix.multiplyVector3(@vector.copy(face.centroid)).subSelf @originCopy
@normal = object.matrixRotationWorld.multiplyVector3 @normal.copy(face.normal)
dot = @directionCopy.dot @normal
# bail if ray and plane are parallel
if Math.abs(dot) < @precision
continue
# calc distance to plane
scalar = @normal.dot(@vector) / dot
# if negative distance, then plane is behind ray
if scalar < 0
continue
if object.flipSided
flip = dot > 0
else
flip = dot < 0
if object.doubleSided or flip
@intersectPoint.add @originCopy, @directionCopy.multiplyScalar(scalar)
if face instanceof THREE.Face3
@a = objMatrix.multiplyVector3 @a.copy(vertices[face.a])
@b = objMatrix.multiplyVector3 @b.copy(vertices[face.b])
@c = objMatrix.multiplyVector3 @c.copy(vertices[face.c])
if @pointInFace3(@intersectPoint, @a, @b, @c)
intersects.push
distance: @originCopy.distanceTo @intersectPoint
point: @intersectPoint.clone()
face: face
object: object
else if face instanceof THREE.Face4
@a = objMatrix.multiplyVector3(@a.copy(vertices[face.a]))
@b = objMatrix.multiplyVector3(@b.copy(vertices[face.b]))
@c = objMatrix.multiplyVector3(@c.copy(vertices[face.c]))
@d = objMatrix.multiplyVector3(@d.copy(vertices[face.d]))
if @pointInFace3(@intersectPoint, @a, @b, @d) or @pointInFace3(@intersectPoint, @b, @c, @d)
intersects.push
distance: @originCopy.distanceTo @intersectPoint
point: @intersectPoint.clone()
face: face
object: object
return intersects
intersectObjects: (objects) ->
intersects = []
length = objects.length
for i in [0...length]
intersects.concat @_intersectObject(objects[i])
intersects.sort (a, b) ->
a.distance - b.distance
intersects
distanceFromIntersection: (origin, direction, position) ->
@v0.sub position, origin
dot = @v0.dot direction
intersect = @v1.add origin, @v2.copy(direction).multiplyScalar(dot)
distance = position.distanceTo intersect
# http://www.blackpawn.com/texts/pointinpoly/default.html
pointInFace3: (p, a, b, c) ->
@v0.sub c, a
@v1.sub b, a
@v2.sub p, a
dot00 = @v0.dot @v0
dot01 = @v0.dot @v1
dot02 = @v0.dot @v2
dot11 = @v1.dot @v1
dot12 = @v1.dot @v2
invDenom = 1 / (dot00 * dot11 - dot01 * dot01)
u = (dot11 * dot02 - dot01 * dot12) * invDenom
v = (dot00 * dot12 - dot01 * dot02) * invDenom
(u >= 0) and (v >= 0) and (u + v < 1)
this
namespace "THREE", (exports) ->
exports.Ray = Ray | true | # @author PI:NAME:<NAME>END_PI / http://mrdoob.com/
# @author PI:EMAIL:<EMAIL>END_PI
#= require new_src/core/vector_3
class Ray
precision: 0.0001
constructor: (origin, direction) ->
@origin = origin or new THREE.Vector3()
@direction = direction or new THREE.Vector3()
@a = new THREE.Vector3()
@b = new THREE.Vector3()
@c = new THREE.Vector3()
@d = new THREE.Vector3()
@originCopy = new THREE.Vector3()
@directionCopy = new THREE.Vector3()
@vector = new THREE.Vector3()
@normal = new THREE.Vector3()
@intersectPoint = new THREE.Vector3()
@v0 = new THREE.Vector3()
@v1 = new THREE.Vector3()
@v2 = new THREE.Vector3()
setPrecision: (value) ->
@precision = value
_intersectObject: (object) ->
intersects = []
if object instanceof THREE.Particle
distance = @distanceFromIntersection @origin, @direction, object.matrixWorld.getPosition()
if distance > object.scale.x
return []
intersects.push =
distance: distance
point: object.position
face: null
object: object
else if object instanceof THREE.Mesh
# Checking boundingSphere
distance = @distanceFromIntersection @origin, @direction, object.matrixWorld.getPosition()
scale = THREE.Frustum.__v1.set(
object.matrixWorld.getColumnX().length()
object.matrixWorld.getColumnY().length()
object.matrixWorld.getColumnZ().length()
)
if distance > object.geometry.boundingSphere.radius * Math.max(scale.x, Math.max(scale.y, scale.z))
return intersects
# Checking faces
geometry = object.geometry
vertices = geometry.vertices
object.matrixRotationWorld.extractRotation object.matrixWorld
length = geometry.faces.length
for f in [0...length]
face = geometry.faces[f]
@originCopy.copy @origin
@directionCopy.copy @direction
objMatrix = object.matrixWorld;
# determine if ray intersects the plane of the face
# note: this works regardless of the direction of the face normal
@vector = objMatrix.multiplyVector3(@vector.copy(face.centroid)).subSelf @originCopy
@normal = object.matrixRotationWorld.multiplyVector3 @normal.copy(face.normal)
dot = @directionCopy.dot @normal
# bail if ray and plane are parallel
if Math.abs(dot) < @precision
continue
# calc distance to plane
scalar = @normal.dot(@vector) / dot
# if negative distance, then plane is behind ray
if scalar < 0
continue
if object.flipSided
flip = dot > 0
else
flip = dot < 0
if object.doubleSided or flip
@intersectPoint.add @originCopy, @directionCopy.multiplyScalar(scalar)
if face instanceof THREE.Face3
@a = objMatrix.multiplyVector3 @a.copy(vertices[face.a])
@b = objMatrix.multiplyVector3 @b.copy(vertices[face.b])
@c = objMatrix.multiplyVector3 @c.copy(vertices[face.c])
if @pointInFace3(@intersectPoint, @a, @b, @c)
intersects.push
distance: @originCopy.distanceTo @intersectPoint
point: @intersectPoint.clone()
face: face
object: object
else if face instanceof THREE.Face4
@a = objMatrix.multiplyVector3(@a.copy(vertices[face.a]))
@b = objMatrix.multiplyVector3(@b.copy(vertices[face.b]))
@c = objMatrix.multiplyVector3(@c.copy(vertices[face.c]))
@d = objMatrix.multiplyVector3(@d.copy(vertices[face.d]))
if @pointInFace3(@intersectPoint, @a, @b, @d) or @pointInFace3(@intersectPoint, @b, @c, @d)
intersects.push
distance: @originCopy.distanceTo @intersectPoint
point: @intersectPoint.clone()
face: face
object: object
return intersects
intersectObjects: (objects) ->
intersects = []
length = objects.length
for i in [0...length]
intersects.concat @_intersectObject(objects[i])
intersects.sort (a, b) ->
a.distance - b.distance
intersects
distanceFromIntersection: (origin, direction, position) ->
@v0.sub position, origin
dot = @v0.dot direction
intersect = @v1.add origin, @v2.copy(direction).multiplyScalar(dot)
distance = position.distanceTo intersect
# http://www.blackpawn.com/texts/pointinpoly/default.html
pointInFace3: (p, a, b, c) ->
@v0.sub c, a
@v1.sub b, a
@v2.sub p, a
dot00 = @v0.dot @v0
dot01 = @v0.dot @v1
dot02 = @v0.dot @v2
dot11 = @v1.dot @v1
dot12 = @v1.dot @v2
invDenom = 1 / (dot00 * dot11 - dot01 * dot01)
u = (dot11 * dot02 - dot01 * dot12) * invDenom
v = (dot00 * dot12 - dot01 * dot02) * invDenom
(u >= 0) and (v >= 0) and (u + v < 1)
this
namespace "THREE", (exports) ->
exports.Ray = Ray |
[
{
"context": "v1Api =\n\turl: \"http://localhost:5000\"\n\tuser: 'overleaf'\n\tpass: 'password'\n\nhttpAuthUser = \"sharelatex\"\nh",
"end": 54,
"score": 0.9941836595535278,
"start": 46,
"tag": "USERNAME",
"value": "overleaf"
},
{
"context": "\"http://localhost:5000\"\n\tuser: 'overl... | test/acceptance/config/settings.test.coffee | kingzevin/web-microservice | 0 | v1Api =
url: "http://localhost:5000"
user: 'overleaf'
pass: 'password'
httpAuthUser = "sharelatex"
httpAuthPass = "password"
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
module.exports =
enableSubscriptions: true
httpAuthUsers: httpAuthUsers
apis:
web:
user: httpAuthUser
pass: httpAuthPass
v1:
url: v1Api.url
user: v1Api.user
pass: v1Api.pass
recurly:
# Set up our own mock recurly server
url: 'http://localhost:6034'
subdomain: 'test'
# for registration via SL, set enableLegacyRegistration to true
# for registration via Overleaf v1, set enableLegacyLogin to true
# Currently, acceptance tests require enableLegacyRegistration.
enableLegacyRegistration: true
features: features =
v1_free:
collaborators: 1
dropbox: false
versioning: false
github: true
gitBridge: true
templates: false
references: false
referencesSearch: false
mendeley: true
compileTimeout: 60
compileGroup: "standard"
trackChanges: false
personal:
collaborators: 1
dropbox: false
versioning: false
github: false
gitBridge: false
templates: false
references: false
referencesSearch: false
mendeley: false
compileTimeout: 60
compileGroup: "standard"
trackChanges: false
collaborator:
collaborators: 10
dropbox: true
versioning: true
github: true
gitBridge: true
templates: true
references: true
referencesSearch: true
mendeley: true
compileTimeout: 180
compileGroup: "priority"
trackChanges: true
professional:
collaborators: -1
dropbox: true
versioning: true
github: true
gitBridge: true
templates: true
references: true
referencesSearch: true
mendeley: true
compileTimeout: 180
compileGroup: "priority"
trackChanges: true
defaultFeatures: features.personal
defaultPlanCode: 'personal'
institutionPlanCode: 'professional'
plans: plans = [{
planCode: "v1_free"
name: "V1 Free"
price: 0
features: features.v1_free
},{
planCode: "personal"
name: "Personal"
price: 0
features: features.personal
},{
planCode: "collaborator"
name: "Collaborator"
price: 1500
features: features.collaborator
},{
planCode: "professional"
name: "Professional"
price: 3000
features: features.professional
}]
bonus_features:
1:
collaborators: 2
dropbox: false
versioning: false
3:
collaborators: 4
dropbox: false
versioning: false
6:
collaborators: 4
dropbox: true
versioning: true
9:
collaborators: -1
dropbox: true
versioning: true
proxyUrls:
'/institutions/list': { baseUrl: v1Api.url, path: '/universities/list' }
'/institutions/list/:id':
baseUrl: v1Api.url
path: (params) -> "/universities/list/#{params.id}"
'/institutions/domains': { baseUrl: v1Api.url, path: '/university/domains' }
'/proxy/missing/baseUrl': path: '/foo/bar'
'/proxy/get_and_post': {
methods: ['get', 'post'],
path: '/destination/get_and_post'
}
redirects:
'/redirect/one': '/destination/one',
'/redirect/get_and_post': {
methods: ['get', 'post'],
url: '/destination/get_and_post'
},
'/redirect/base_url': {
baseUrl: 'https://example.com'
url: '/destination/base_url'
},
'/redirect/params/:id': {
url: (params) -> "/destination/#{params.id}/params"
},
'/redirect/qs': '/destination/qs'
'/docs_v1': {
url: '/docs'
}
oauthProviders:
'provider': {
name: 'provider'
},
'collabratec': {
name: 'collabratec'
}
'google': {
name: 'google'
},
# setting to true since many features are enabled/disabled after availability of this
# property (check Features.js)
overleaf: true
| 73329 | v1Api =
url: "http://localhost:5000"
user: 'overleaf'
pass: '<PASSWORD>'
httpAuthUser = "sharelatex"
httpAuthPass = "<PASSWORD>"
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
module.exports =
enableSubscriptions: true
httpAuthUsers: httpAuthUsers
apis:
web:
user: httpAuthUser
pass: <PASSWORD>
v1:
url: v1Api.url
user: v1Api.user
pass: <PASSWORD>1Api.pass
recurly:
# Set up our own mock recurly server
url: 'http://localhost:6034'
subdomain: 'test'
# for registration via SL, set enableLegacyRegistration to true
# for registration via Overleaf v1, set enableLegacyLogin to true
# Currently, acceptance tests require enableLegacyRegistration.
enableLegacyRegistration: true
features: features =
v1_free:
collaborators: 1
dropbox: false
versioning: false
github: true
gitBridge: true
templates: false
references: false
referencesSearch: false
mendeley: true
compileTimeout: 60
compileGroup: "standard"
trackChanges: false
personal:
collaborators: 1
dropbox: false
versioning: false
github: false
gitBridge: false
templates: false
references: false
referencesSearch: false
mendeley: false
compileTimeout: 60
compileGroup: "standard"
trackChanges: false
collaborator:
collaborators: 10
dropbox: true
versioning: true
github: true
gitBridge: true
templates: true
references: true
referencesSearch: true
mendeley: true
compileTimeout: 180
compileGroup: "priority"
trackChanges: true
professional:
collaborators: -1
dropbox: true
versioning: true
github: true
gitBridge: true
templates: true
references: true
referencesSearch: true
mendeley: true
compileTimeout: 180
compileGroup: "priority"
trackChanges: true
defaultFeatures: features.personal
defaultPlanCode: 'personal'
institutionPlanCode: 'professional'
plans: plans = [{
planCode: "v1_free"
name: "V1 Free"
price: 0
features: features.v1_free
},{
planCode: "personal"
name: "Personal"
price: 0
features: features.personal
},{
planCode: "collaborator"
name: "<NAME>ator"
price: 1500
features: features.collaborator
},{
planCode: "professional"
name: "Professional"
price: 3000
features: features.professional
}]
bonus_features:
1:
collaborators: 2
dropbox: false
versioning: false
3:
collaborators: 4
dropbox: false
versioning: false
6:
collaborators: 4
dropbox: true
versioning: true
9:
collaborators: -1
dropbox: true
versioning: true
proxyUrls:
'/institutions/list': { baseUrl: v1Api.url, path: '/universities/list' }
'/institutions/list/:id':
baseUrl: v1Api.url
path: (params) -> "/universities/list/#{params.id}"
'/institutions/domains': { baseUrl: v1Api.url, path: '/university/domains' }
'/proxy/missing/baseUrl': path: '/foo/bar'
'/proxy/get_and_post': {
methods: ['get', 'post'],
path: '/destination/get_and_post'
}
redirects:
'/redirect/one': '/destination/one',
'/redirect/get_and_post': {
methods: ['get', 'post'],
url: '/destination/get_and_post'
},
'/redirect/base_url': {
baseUrl: 'https://example.com'
url: '/destination/base_url'
},
'/redirect/params/:id': {
url: (params) -> "/destination/#{params.id}/params"
},
'/redirect/qs': '/destination/qs'
'/docs_v1': {
url: '/docs'
}
oauthProviders:
'provider': {
name: 'provider'
},
'collabratec': {
name: 'collabratec'
}
'google': {
name: 'google'
},
# setting to true since many features are enabled/disabled after availability of this
# property (check Features.js)
overleaf: true
| true | v1Api =
url: "http://localhost:5000"
user: 'overleaf'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
httpAuthUser = "sharelatex"
httpAuthPass = "PI:PASSWORD:<PASSWORD>END_PI"
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
module.exports =
enableSubscriptions: true
httpAuthUsers: httpAuthUsers
apis:
web:
user: httpAuthUser
pass: PI:PASSWORD:<PASSWORD>END_PI
v1:
url: v1Api.url
user: v1Api.user
pass: PI:PASSWORD:<PASSWORD>END_PI1Api.pass
recurly:
# Set up our own mock recurly server
url: 'http://localhost:6034'
subdomain: 'test'
# for registration via SL, set enableLegacyRegistration to true
# for registration via Overleaf v1, set enableLegacyLogin to true
# Currently, acceptance tests require enableLegacyRegistration.
enableLegacyRegistration: true
features: features =
v1_free:
collaborators: 1
dropbox: false
versioning: false
github: true
gitBridge: true
templates: false
references: false
referencesSearch: false
mendeley: true
compileTimeout: 60
compileGroup: "standard"
trackChanges: false
personal:
collaborators: 1
dropbox: false
versioning: false
github: false
gitBridge: false
templates: false
references: false
referencesSearch: false
mendeley: false
compileTimeout: 60
compileGroup: "standard"
trackChanges: false
collaborator:
collaborators: 10
dropbox: true
versioning: true
github: true
gitBridge: true
templates: true
references: true
referencesSearch: true
mendeley: true
compileTimeout: 180
compileGroup: "priority"
trackChanges: true
professional:
collaborators: -1
dropbox: true
versioning: true
github: true
gitBridge: true
templates: true
references: true
referencesSearch: true
mendeley: true
compileTimeout: 180
compileGroup: "priority"
trackChanges: true
defaultFeatures: features.personal
defaultPlanCode: 'personal'
institutionPlanCode: 'professional'
plans: plans = [{
planCode: "v1_free"
name: "V1 Free"
price: 0
features: features.v1_free
},{
planCode: "personal"
name: "Personal"
price: 0
features: features.personal
},{
planCode: "collaborator"
name: "PI:NAME:<NAME>END_PIator"
price: 1500
features: features.collaborator
},{
planCode: "professional"
name: "Professional"
price: 3000
features: features.professional
}]
bonus_features:
1:
collaborators: 2
dropbox: false
versioning: false
3:
collaborators: 4
dropbox: false
versioning: false
6:
collaborators: 4
dropbox: true
versioning: true
9:
collaborators: -1
dropbox: true
versioning: true
proxyUrls:
'/institutions/list': { baseUrl: v1Api.url, path: '/universities/list' }
'/institutions/list/:id':
baseUrl: v1Api.url
path: (params) -> "/universities/list/#{params.id}"
'/institutions/domains': { baseUrl: v1Api.url, path: '/university/domains' }
'/proxy/missing/baseUrl': path: '/foo/bar'
'/proxy/get_and_post': {
methods: ['get', 'post'],
path: '/destination/get_and_post'
}
redirects:
'/redirect/one': '/destination/one',
'/redirect/get_and_post': {
methods: ['get', 'post'],
url: '/destination/get_and_post'
},
'/redirect/base_url': {
baseUrl: 'https://example.com'
url: '/destination/base_url'
},
'/redirect/params/:id': {
url: (params) -> "/destination/#{params.id}/params"
},
'/redirect/qs': '/destination/qs'
'/docs_v1': {
url: '/docs'
}
oauthProviders:
'provider': {
name: 'provider'
},
'collabratec': {
name: 'collabratec'
}
'google': {
name: 'google'
},
# setting to true since many features are enabled/disabled after availability of this
# property (check Features.js)
overleaf: true
|
[
{
"context": "y need to go over public channels\nhttpAuthUser = \"sharelatex\"\nhttpAuthPass = \"CRYPTO_RANDOM\" # Randomly genera",
"end": 171,
"score": 0.9858732223510742,
"start": 161,
"tag": "USERNAME",
"value": "sharelatex"
},
{
"context": "nnels\nhttpAuthUser = \"sharelatex\"\n... | settings.coffee | zz/docker-sharelatex | 0 | Path = require('path')
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUser = "sharelatex"
httpAuthPass = "CRYPTO_RANDOM" # Randomly generated for you
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
DATA_DIR = '/var/lib/sharelatex/data'
TMP_DIR = '/var/lib/sharelatex/tmp'
module.exports =
# Databases
# ---------
# ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
# Documentation about the URL connection string format can be found at:
#
# http://docs.mongodb.org/manual/reference/connection-string/
#
# The following works out of the box with Mongo's default settings:
mongo:
url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
# Redis is used in ShareLaTeX for high volume queries, like real-time
# editing, and session management.
#
# The following config will work with Redis's default settings:
redis:
web: redisConfig =
host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
password: process.env["SHARELATEX_REDIS_PASS"] or ""
fairy: redisConfig
# The compile server (the clsi) uses a SQL database to cache files and
# meta-data. sqllite is the default, and the load is low enough that this will
# be fine in production (we use sqllite at sharelatex.com).
#
# If you want to configure a different database, see the Sequelize documentation
# for available options:
#
# https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
#
mysql:
clsi:
database: "clsi"
username: "clsi"
password: ""
dialect: "sqlite"
storage: Path.join(DATA_DIR, "db.sqlite")
# File storage
# ------------
# ShareLaTeX can store binary files like images either locally or in Amazon
# S3. The default is locally:
filestore:
backend: "fs"
stores:
user_files: Path.join(DATA_DIR, "user_files")
# To use Amazon S3 as a storage backend, comment out the above config, and
# uncomment the following, filling in your key, secret, and bucket name:
#
# filestore:
# backend: "s3"
# stores:
# user_files: "BUCKET_NAME"
# s3:
# key: "AWS_KEY"
# secret: "AWS_SECRET"
#
# Local disk caching
# ------------------
path:
# If we ever need to write something to disk (e.g. incoming requests
# that need processing but may be too big for memory), then write
# them to disk here:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
# Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, "uploads")
# Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, "compiles")
# Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, "cache")
# Server Config
# -------------
# Where your instance of ShareLaTeX can be found publicly. This is used
# when emails are sent out and in generated links:
siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
# The name this is used to describe your ShareLaTeX Installation
appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
# The email address which users will be directed to as the main point of
# contact for this installation of ShareLaTeX.
adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "placeholder@example.com"
# If provided, a sessionSecret is used to sign cookies so that they cannot be
# spoofed. This is recommended.
security:
sessionSecret: "CRYPTO_RANDOM" # This was randomly generated for you
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUsers: httpAuthUsers
# Should javascript assets be served minified or not. Note that you will
# need to run `grunt compile:minify` within the web-sharelatex directory
# to generate these.
useMinifiedJs: true
# Should static assets be sent with a header to tell the browser to cache
# them. This should be false in development where changes are being made,
# but should be set to true in production.
cacheStaticAssets: true
# If you are running ShareLaTeX over https, set this to true to send the
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.
behindProxy: true
# Sending Email
# -------------
#
# You must configure a mail server to be able to send invite emails from
# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
# documentation for available options:
#
# http://www.nodemailer.com/docs/transports
#
# email:
# fromAddress: ""
# replyTo: ""
# transport: "SES"
# parameters:
# AWSAccessKeyID: ""
# AWSSecretKey: ""
# Spell Check Languages
# ---------------------
#
# You must have the corresponding aspell dictionary installed to
# be able to use a language. Run `grunt check:aspell` to check which
# dictionaries you have installed. These should be set for the `code` for
# each language.
languages: [{
"code":"en",
"name":"English (American)"
},{
"code":"en_GB",
"name":"English (British)"
},{
"code":"af",
"name":"Africaans"
},{
"code":"am",
"name":"Amharic"
},{
"code":"ar",
"name":"Arabic"
},{
"code":"hy",
"name":"Armenian"
},{
"code":"gl",
"name":"Galician"
},{
"code":"eu",
"name":"Basque"
},{
"code":"bn",
"name":"Bengali"
},{
"code":"br",
"name":"Breton"
},{
"code":"bg",
"name":"Bulgarian"
},{
"code":"ca",
"name":"Catalan"
},{
"code":"hr",
"name":"Croatian"
},{
"code":"cs",
"name":"Czech"
},{
"code":"da",
"name":"Danish"
},{
"code":"nl",
"name":"Dutch"
},{
"code":"eo",
"name":"Esperanto"
},{
"code":"et",
"name":"Estonian"
},{
"code":"fo",
"name":"Faroese"
},{
"code":"fr",
"name":"French"
},{
"code":"de",
"name":"German"
},{
"code":"el",
"name":"Greek"
},{
"code":"gu",
"name":"Gujarati"
},{
"code":"he",
"name":"Hebrew"
},{
"code":"hi",
"name":"Hindi"
},{
"code":"hu",
"name":"Hungarian"
},{
"code":"is",
"name":"Icelandic"
},{
"code":"id",
"name":"Indonesian"
},{
"code":"ga",
"name":"Irish"
},{
"code":"it",
"name":"Italian"
},{
"code":"kn",
"name":"Kannada"
},{
"code":"kk",
"name":"Kazakh"
},{
"code":"ku",
"name":"Kurdish"
},{
"code":"lv",
"name":"Latvian"
},{
"code":"lt",
"name":"Lithuanian"
},{
"code":"ml",
"name":"Malayalam"
},{
"code":"mr",
"name":"Marathi"
},{
"code":"nr",
"name":"Ndebele"
},{
"code":"ns",
"name":"Northern Sotho"
},{
"code":"no",
"name":"Norwegian"
},{
"code":"or",
"name":"Oriya"
},{
"code":"fa",
"name":"Persian"
},{
"code":"pl",
"name":"Polish"
},{
"code":"pt_BR",
"name":"Portuguese (Brazilian)"
},{
"code":"pt_PT",
"name":"Portuguese (European)"
},{
"code":"pa",
"name":"Punjabi"
},{
"code":"ro",
"name":"Romanian"
},{
"code":"ru",
"name":"Russian"
},{
"code":"sk",
"name":"Slovak"
},{
"code":"sl",
"name":"Slovenian"
},{
"code":"st",
"name":"Southern Sotho"
},{
"code":"es",
"name":"Spanish"
},{
"code":"ss",
"name":"Swazi"
},{
"code":"sv",
"name":"Swedish"
},{
"code":"tl",
"name":"Tagalog"
},{
"code":"ta",
"name":"Tamil"
},{
"code":"te",
"name":"Telugu"
},{
"code":"ts",
"name":"Tsonga"
},{
"code":"tn",
"name":"Tswana"
},{
"code":"uk",
"name":"Ukrainian"
},{
"code":"hsb",
"name":"Upper Sorbian"
},{
"code":"uz",
"name":"Uzbek"
},{
"code":"cy",
"name":"Welsh"
},{
"code":"xh",
"name":"Xhosa"
},{
"code":"zu",
"name":"Zulu"
}
]
# Service locations
# -----------------
# ShareLaTeX is comprised of many small services, which each expose
# an HTTP API running on a different port. Generally you
# can leave these as they are unless you have some other services
# running which conflict, or want to run the web process on port 80.
# internal:
# web:
# port: webPort = 3000
# host: "localhost"
# documentupdater:
# port: docUpdaterPort = 3003
# host: "localhost"
# filestore:
# port: filestorePort = 3009
# host: "localhost"
# chat:
# port: chatPort = 3010
# host: "localhost"
# tags:
# port: tagsPort = 3012
# host: "localhost"
# clsi:
# port: clsiPort = 3013
# host: "localhost"
# trackchanges:
# port: trackchangesPort = 3015
# host: "localhost"
# docstore:
# port: docstorePort = 3016
# host: "localhost"
# spelling:
# port: spellingPort = 3005
# host: "localhost"
# If you change the above config, or run some services on remote servers,
# you need to tell the other services where to find them:
apis:
web:
url: "http://localhost:3000"
user: httpAuthUser
pass: httpAuthPass
# documentupdater:
# url : "http://localhost:#{docUpdaterPort}"
# clsi:
# url: "http://localhost:#{clsiPort}"
# filestore:
# url: "http://localhost:#{filestorePort}"
# trackchanges:
# url: "http://localhost:#{trackchangesPort}"
# docstore:
# url: "http://localhost:#{docstorePort}"
# tags:
# url: "http://localhost:#{tagsPort}"
# spelling:
# url: "http://localhost:#{spellingPort}"
# chat:
# url: "http://localhost:#{chatPort}"
# With lots of incoming and outgoing HTTP connections to different services,
# sometimes long running, it is a good idea to increase the default number
# of sockets that Node will hold open.
http = require('http')
http.globalAgent.maxSockets = 300
https = require('https')
https.globalAgent.maxSockets = 300
| 13763 | Path = require('path')
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUser = "sharelatex"
httpAuthPass = "<PASSWORD>" # Randomly generated for you
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
DATA_DIR = '/var/lib/sharelatex/data'
TMP_DIR = '/var/lib/sharelatex/tmp'
module.exports =
# Databases
# ---------
# ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
# Documentation about the URL connection string format can be found at:
#
# http://docs.mongodb.org/manual/reference/connection-string/
#
# The following works out of the box with Mongo's default settings:
mongo:
url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
# Redis is used in ShareLaTeX for high volume queries, like real-time
# editing, and session management.
#
# The following config will work with Redis's default settings:
redis:
web: redisConfig =
host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
password: process.env["SHARELATEX_REDIS_PASS"] or ""
fairy: redisConfig
# The compile server (the clsi) uses a SQL database to cache files and
# meta-data. sqllite is the default, and the load is low enough that this will
# be fine in production (we use sqllite at sharelatex.com).
#
# If you want to configure a different database, see the Sequelize documentation
# for available options:
#
# https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
#
mysql:
clsi:
database: "clsi"
username: "clsi"
password: ""
dialect: "sqlite"
storage: Path.join(DATA_DIR, "db.sqlite")
# File storage
# ------------
# ShareLaTeX can store binary files like images either locally or in Amazon
# S3. The default is locally:
filestore:
backend: "fs"
stores:
user_files: Path.join(DATA_DIR, "user_files")
# To use Amazon S3 as a storage backend, comment out the above config, and
# uncomment the following, filling in your key, secret, and bucket name:
#
# filestore:
# backend: "s3"
# stores:
# user_files: "BUCKET_NAME"
# s3:
# key: "AWS_KEY"
# secret: "AWS_SECRET"
#
# Local disk caching
# ------------------
path:
# If we ever need to write something to disk (e.g. incoming requests
# that need processing but may be too big for memory), then write
# them to disk here:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
# Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, "uploads")
# Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, "compiles")
# Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, "cache")
# Server Config
# -------------
# Where your instance of ShareLaTeX can be found publicly. This is used
# when emails are sent out and in generated links:
siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
# The name this is used to describe your ShareLaTeX Installation
appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
# The email address which users will be directed to as the main point of
# contact for this installation of ShareLaTeX.
adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "<EMAIL>"
# If provided, a sessionSecret is used to sign cookies so that they cannot be
# spoofed. This is recommended.
security:
sessionSecret: "<PASSWORD>" # This was randomly generated for you
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUsers: httpAuthUsers
# Should javascript assets be served minified or not. Note that you will
# need to run `grunt compile:minify` within the web-sharelatex directory
# to generate these.
useMinifiedJs: true
# Should static assets be sent with a header to tell the browser to cache
# them. This should be false in development where changes are being made,
# but should be set to true in production.
cacheStaticAssets: true
# If you are running ShareLaTeX over https, set this to true to send the
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.
behindProxy: true
# Sending Email
# -------------
#
# You must configure a mail server to be able to send invite emails from
# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
# documentation for available options:
#
# http://www.nodemailer.com/docs/transports
#
# email:
# fromAddress: ""
# replyTo: ""
# transport: "SES"
# parameters:
# AWSAccessKeyID: ""
# AWSSecretKey: ""
# Spell Check Languages
# ---------------------
#
# You must have the corresponding aspell dictionary installed to
# be able to use a language. Run `grunt check:aspell` to check which
# dictionaries you have installed. These should be set for the `code` for
# each language.
languages: [{
"code":"en",
"name":"English (American)"
},{
"code":"en_GB",
"name":"English (British)"
},{
"code":"af",
"name":"Africaans"
},{
"code":"am",
"name":"Amharic"
},{
"code":"ar",
"name":"Arabic"
},{
"code":"hy",
"name":"Armenian"
},{
"code":"gl",
"name":"Galician"
},{
"code":"eu",
"name":"Basque"
},{
"code":"bn",
"name":"Bengali"
},{
"code":"br",
"name":"Breton"
},{
"code":"bg",
"name":"Bulgarian"
},{
"code":"ca",
"name":"Catalan"
},{
"code":"hr",
"name":"Croatian"
},{
"code":"cs",
"name":"Czech"
},{
"code":"da",
"name":"Danish"
},{
"code":"nl",
"name":"Dutch"
},{
"code":"eo",
"name":"Esperanto"
},{
"code":"et",
"name":"Estonian"
},{
"code":"fo",
"name":"Faroese"
},{
"code":"fr",
"name":"French"
},{
"code":"de",
"name":"German"
},{
"code":"el",
"name":"Greek"
},{
"code":"gu",
"name":"Gujarati"
},{
"code":"he",
"name":"Hebrew"
},{
"code":"hi",
"name":"Hindi"
},{
"code":"hu",
"name":"Hungarian"
},{
"code":"is",
"name":"Icelandic"
},{
"code":"id",
"name":"Indonesian"
},{
"code":"ga",
"name":"Irish"
},{
"code":"it",
"name":"Italian"
},{
"code":"kn",
"name":"Kannada"
},{
"code":"kk",
"name":"Kazakh"
},{
"code":"ku",
"name":"Kurdish"
},{
"code":"lv",
"name":"Latvian"
},{
"code":"lt",
"name":"Lithuanian"
},{
"code":"ml",
"name":"Malayalam"
},{
"code":"mr",
"name":"Marathi"
},{
"code":"nr",
"name":"Ndebele"
},{
"code":"ns",
"name":"Northern Sotho"
},{
"code":"no",
"name":"Norwegian"
},{
"code":"or",
"name":"Oriya"
},{
"code":"fa",
"name":"Persian"
},{
"code":"pl",
"name":"Polish"
},{
"code":"pt_BR",
"name":"Portuguese (Brazilian)"
},{
"code":"pt_PT",
"name":"Portuguese (European)"
},{
"code":"pa",
"name":"Punjabi"
},{
"code":"ro",
"name":"Romanian"
},{
"code":"ru",
"name":"Russian"
},{
"code":"sk",
"name":"Slovak"
},{
"code":"sl",
"name":"Slovenian"
},{
"code":"st",
"name":"Southern Sotho"
},{
"code":"es",
"name":"Spanish"
},{
"code":"ss",
"name":"Swazi"
},{
"code":"sv",
"name":"Swedish"
},{
"code":"tl",
"name":"Tagalog"
},{
"code":"ta",
"name":"Tamil"
},{
"code":"te",
"name":"Telugu"
},{
"code":"ts",
"name":"Tsonga"
},{
"code":"tn",
"name":"Tswana"
},{
"code":"uk",
"name":"Ukrainian"
},{
"code":"hsb",
"name":"Upper Sorbian"
},{
"code":"uz",
"name":"Uzbek"
},{
"code":"cy",
"name":"Welsh"
},{
"code":"xh",
"name":"Xhosa"
},{
"code":"zu",
"name":"Zulu"
}
]
# Service locations
# -----------------
# ShareLaTeX is comprised of many small services, which each expose
# an HTTP API running on a different port. Generally you
# can leave these as they are unless you have some other services
# running which conflict, or want to run the web process on port 80.
# internal:
# web:
# port: webPort = 3000
# host: "localhost"
# documentupdater:
# port: docUpdaterPort = 3003
# host: "localhost"
# filestore:
# port: filestorePort = 3009
# host: "localhost"
# chat:
# port: chatPort = 3010
# host: "localhost"
# tags:
# port: tagsPort = 3012
# host: "localhost"
# clsi:
# port: clsiPort = 3013
# host: "localhost"
# trackchanges:
# port: trackchangesPort = 3015
# host: "localhost"
# docstore:
# port: docstorePort = 3016
# host: "localhost"
# spelling:
# port: spellingPort = 3005
# host: "localhost"
# If you change the above config, or run some services on remote servers,
# you need to tell the other services where to find them:
apis:
web:
url: "http://localhost:3000"
user: httpAuthUser
pass: <PASSWORD>
# documentupdater:
# url : "http://localhost:#{docUpdaterPort}"
# clsi:
# url: "http://localhost:#{clsiPort}"
# filestore:
# url: "http://localhost:#{filestorePort}"
# trackchanges:
# url: "http://localhost:#{trackchangesPort}"
# docstore:
# url: "http://localhost:#{docstorePort}"
# tags:
# url: "http://localhost:#{tagsPort}"
# spelling:
# url: "http://localhost:#{spellingPort}"
# chat:
# url: "http://localhost:#{chatPort}"
# With lots of incoming and outgoing HTTP connections to different services,
# sometimes long running, it is a good idea to increase the default number
# of sockets that Node will hold open.
http = require('http')
http.globalAgent.maxSockets = 300
https = require('https')
https.globalAgent.maxSockets = 300
| true | Path = require('path')
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUser = "sharelatex"
httpAuthPass = "PI:PASSWORD:<PASSWORD>END_PI" # Randomly generated for you
httpAuthUsers = {}
httpAuthUsers[httpAuthUser] = httpAuthPass
DATA_DIR = '/var/lib/sharelatex/data'
TMP_DIR = '/var/lib/sharelatex/tmp'
module.exports =
# Databases
# ---------
# ShareLaTeX's main persistant data store is MongoDB (http://www.mongodb.org/)
# Documentation about the URL connection string format can be found at:
#
# http://docs.mongodb.org/manual/reference/connection-string/
#
# The following works out of the box with Mongo's default settings:
mongo:
url : process.env["SHARELATEX_MONGO_URL"] or 'mongodb://dockerhost/sharelatex'
# Redis is used in ShareLaTeX for high volume queries, like real-time
# editing, and session management.
#
# The following config will work with Redis's default settings:
redis:
web: redisConfig =
host: process.env["SHARELATEX_REDIS_HOST"] or "dockerhost"
port: process.env["SHARELATEX_REDIS_PORT"] or "6379"
password: process.env["SHARELATEX_REDIS_PASS"] or ""
fairy: redisConfig
# The compile server (the clsi) uses a SQL database to cache files and
# meta-data. sqllite is the default, and the load is low enough that this will
# be fine in production (we use sqllite at sharelatex.com).
#
# If you want to configure a different database, see the Sequelize documentation
# for available options:
#
# https://github.com/sequelize/sequelize/wiki/API-Reference-Sequelize#example-usage
#
mysql:
clsi:
database: "clsi"
username: "clsi"
password: ""
dialect: "sqlite"
storage: Path.join(DATA_DIR, "db.sqlite")
# File storage
# ------------
# ShareLaTeX can store binary files like images either locally or in Amazon
# S3. The default is locally:
filestore:
backend: "fs"
stores:
user_files: Path.join(DATA_DIR, "user_files")
# To use Amazon S3 as a storage backend, comment out the above config, and
# uncomment the following, filling in your key, secret, and bucket name:
#
# filestore:
# backend: "s3"
# stores:
# user_files: "BUCKET_NAME"
# s3:
# key: "AWS_KEY"
# secret: "AWS_SECRET"
#
# Local disk caching
# ------------------
path:
# If we ever need to write something to disk (e.g. incoming requests
# that need processing but may be too big for memory), then write
# them to disk here:
dumpFolder: Path.join(TMP_DIR, "dumpFolder")
# Where to write uploads before they are processed
uploadFolder: Path.join(TMP_DIR, "uploads")
# Where to write the project to disk before running LaTeX on it
compilesDir: Path.join(DATA_DIR, "compiles")
# Where to cache downloaded URLs for the CLSI
clsiCacheDir: Path.join(DATA_DIR, "cache")
# Server Config
# -------------
# Where your instance of ShareLaTeX can be found publicly. This is used
# when emails are sent out and in generated links:
siteUrl: siteUrl = process.env["SHARELATEX_SITE_URL"] or 'http://localhost'
# The name this is used to describe your ShareLaTeX Installation
appName: process.env["SHARELATEX_APP_NAME"] or "ShareLaTeX (Community Edition)"
# The email address which users will be directed to as the main point of
# contact for this installation of ShareLaTeX.
adminEmail: process.env["SHARELATEX_ADMIN_EMAIL"] or "PI:EMAIL:<EMAIL>END_PI"
# If provided, a sessionSecret is used to sign cookies so that they cannot be
# spoofed. This is recommended.
security:
sessionSecret: "PI:KEY:<PASSWORD>END_PI" # This was randomly generated for you
# These credentials are used for authenticating api requests
# between services that may need to go over public channels
httpAuthUsers: httpAuthUsers
# Should javascript assets be served minified or not. Note that you will
# need to run `grunt compile:minify` within the web-sharelatex directory
# to generate these.
useMinifiedJs: true
# Should static assets be sent with a header to tell the browser to cache
# them. This should be false in development where changes are being made,
# but should be set to true in production.
cacheStaticAssets: true
# If you are running ShareLaTeX over https, set this to true to send the
# cookie with a secure flag (recommended).
secureCookie: process.env["SHARELATEX_SECURE_COOKIE"]?
# If you are running ShareLaTeX behind a proxy (like Apache, Nginx, etc)
# then set this to true to allow it to correctly detect the forwarded IP
# address and http/https protocol information.
behindProxy: true
# Sending Email
# -------------
#
# You must configure a mail server to be able to send invite emails from
# ShareLaTeX. The config settings are passed to nodemailer. See the nodemailer
# documentation for available options:
#
# http://www.nodemailer.com/docs/transports
#
# email:
# fromAddress: ""
# replyTo: ""
# transport: "SES"
# parameters:
# AWSAccessKeyID: ""
# AWSSecretKey: ""
# Spell Check Languages
# ---------------------
#
# You must have the corresponding aspell dictionary installed to
# be able to use a language. Run `grunt check:aspell` to check which
# dictionaries you have installed. These should be set for the `code` for
# each language.
languages: [{
"code":"en",
"name":"English (American)"
},{
"code":"en_GB",
"name":"English (British)"
},{
"code":"af",
"name":"Africaans"
},{
"code":"am",
"name":"Amharic"
},{
"code":"ar",
"name":"Arabic"
},{
"code":"hy",
"name":"Armenian"
},{
"code":"gl",
"name":"Galician"
},{
"code":"eu",
"name":"Basque"
},{
"code":"bn",
"name":"Bengali"
},{
"code":"br",
"name":"Breton"
},{
"code":"bg",
"name":"Bulgarian"
},{
"code":"ca",
"name":"Catalan"
},{
"code":"hr",
"name":"Croatian"
},{
"code":"cs",
"name":"Czech"
},{
"code":"da",
"name":"Danish"
},{
"code":"nl",
"name":"Dutch"
},{
"code":"eo",
"name":"Esperanto"
},{
"code":"et",
"name":"Estonian"
},{
"code":"fo",
"name":"Faroese"
},{
"code":"fr",
"name":"French"
},{
"code":"de",
"name":"German"
},{
"code":"el",
"name":"Greek"
},{
"code":"gu",
"name":"Gujarati"
},{
"code":"he",
"name":"Hebrew"
},{
"code":"hi",
"name":"Hindi"
},{
"code":"hu",
"name":"Hungarian"
},{
"code":"is",
"name":"Icelandic"
},{
"code":"id",
"name":"Indonesian"
},{
"code":"ga",
"name":"Irish"
},{
"code":"it",
"name":"Italian"
},{
"code":"kn",
"name":"Kannada"
},{
"code":"kk",
"name":"Kazakh"
},{
"code":"ku",
"name":"Kurdish"
},{
"code":"lv",
"name":"Latvian"
},{
"code":"lt",
"name":"Lithuanian"
},{
"code":"ml",
"name":"Malayalam"
},{
"code":"mr",
"name":"Marathi"
},{
"code":"nr",
"name":"Ndebele"
},{
"code":"ns",
"name":"Northern Sotho"
},{
"code":"no",
"name":"Norwegian"
},{
"code":"or",
"name":"Oriya"
},{
"code":"fa",
"name":"Persian"
},{
"code":"pl",
"name":"Polish"
},{
"code":"pt_BR",
"name":"Portuguese (Brazilian)"
},{
"code":"pt_PT",
"name":"Portuguese (European)"
},{
"code":"pa",
"name":"Punjabi"
},{
"code":"ro",
"name":"Romanian"
},{
"code":"ru",
"name":"Russian"
},{
"code":"sk",
"name":"Slovak"
},{
"code":"sl",
"name":"Slovenian"
},{
"code":"st",
"name":"Southern Sotho"
},{
"code":"es",
"name":"Spanish"
},{
"code":"ss",
"name":"Swazi"
},{
"code":"sv",
"name":"Swedish"
},{
"code":"tl",
"name":"Tagalog"
},{
"code":"ta",
"name":"Tamil"
},{
"code":"te",
"name":"Telugu"
},{
"code":"ts",
"name":"Tsonga"
},{
"code":"tn",
"name":"Tswana"
},{
"code":"uk",
"name":"Ukrainian"
},{
"code":"hsb",
"name":"Upper Sorbian"
},{
"code":"uz",
"name":"Uzbek"
},{
"code":"cy",
"name":"Welsh"
},{
"code":"xh",
"name":"Xhosa"
},{
"code":"zu",
"name":"Zulu"
}
]
# Service locations
# -----------------
# ShareLaTeX is comprised of many small services, which each expose
# an HTTP API running on a different port. Generally you
# can leave these as they are unless you have some other services
# running which conflict, or want to run the web process on port 80.
# internal:
# web:
# port: webPort = 3000
# host: "localhost"
# documentupdater:
# port: docUpdaterPort = 3003
# host: "localhost"
# filestore:
# port: filestorePort = 3009
# host: "localhost"
# chat:
# port: chatPort = 3010
# host: "localhost"
# tags:
# port: tagsPort = 3012
# host: "localhost"
# clsi:
# port: clsiPort = 3013
# host: "localhost"
# trackchanges:
# port: trackchangesPort = 3015
# host: "localhost"
# docstore:
# port: docstorePort = 3016
# host: "localhost"
# spelling:
# port: spellingPort = 3005
# host: "localhost"
# If you change the above config, or run some services on remote servers,
# you need to tell the other services where to find them:
apis:
web:
url: "http://localhost:3000"
user: httpAuthUser
pass: PI:PASSWORD:<PASSWORD>END_PI
# documentupdater:
# url : "http://localhost:#{docUpdaterPort}"
# clsi:
# url: "http://localhost:#{clsiPort}"
# filestore:
# url: "http://localhost:#{filestorePort}"
# trackchanges:
# url: "http://localhost:#{trackchangesPort}"
# docstore:
# url: "http://localhost:#{docstorePort}"
# tags:
# url: "http://localhost:#{tagsPort}"
# spelling:
# url: "http://localhost:#{spellingPort}"
# chat:
# url: "http://localhost:#{chatPort}"
# With lots of incoming and outgoing HTTP connections to different services,
# sometimes long running, it is a good idea to increase the default number
# of sockets that Node will hold open.
http = require('http')
http.globalAgent.maxSockets = 300
https = require('https')
https.globalAgent.maxSockets = 300
|
[
{
"context": "tpBackend = $httpBackend\n\n for key in [\"A\", \"B\", \"C\"]\n @$httpBackend\n .whenGET(\"",
"end": 268,
"score": 0.690211296081543,
"start": 267,
"tag": "KEY",
"value": "B"
},
{
"context": "kend = $httpBackend\n\n for key in [\"A\", \"B\", \"... | spec/directive/PanelContainer.spec.coffee | awef/read.crx-3 | 2 | describe "[panelcontainer]", ->
"use strict"
beforeEach (done) ->
module "PanelContainer"
inject ($templateCache, $httpBackend, $compile, $rootScope) =>
@$templateCache = $templateCache
@$httpBackend = $httpBackend
for key in ["A", "B", "C"]
@$httpBackend
.whenGET("view/test#{key}.html")
.respond(
200,
"""
<div class="test#{key}">
<h1>view/test#{key}</h1>
</div>
"""
)
html = """
<div data-panelcontainer data-url="view:testA">
<div class="content" data-ng-include="templateUrl"></div>
</div>
"""
$rootScope.$apply =>
@scope = $rootScope.$new()
@element = $compile(html)(@scope)
return
@$templateCache.removeAll()
@$httpBackend.flush()
done()
return
return
afterEach ->
@$httpBackend.verifyNoOutstandingExpectation()
@$httpBackend.verifyNoOutstandingRequest()
return
describe "$scope.url", ->
it "指定されたURLに相当するテンプレートを読み込む", () ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testB")
expect(@element.find("h1").text()).toBe("view/testB")
return
describe "履歴の最先端以外の場所に居た場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "以降の履歴を捨てる", ->
@scope.url = "view:testA"
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testA")
expect(@scope.history.stack[@scope.history.stack.length - 1])
.toBe("view:testA")
return
return
return
describe "$scope.prev", ->
describe "戻るべきURLが無い場合", ->
it "何もしない", ->
expect(@scope.url).toBe("view:testA")
expect(@scope.prev()).toBeFalsy()
return
return
describe "戻るべきURLが有る場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "戻る", ->
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testB")
expect(@element.find("h1").text()).toBe("view/testB")
expect(@element.attr("data-url")).toBe("view:testB")
return
return
return
describe "$scope.next", ->
describe "進むべきURLが無い場合", ->
it "何もしない", ->
expect(@scope.url).toBe("view:testA")
expect(@scope.next()).toBeFalsy()
return
return
describe "進むべきURLが有る場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "進む", ->
@scope.next()
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testC")
expect(@element.find("h1").text()).toBe("view/testC")
expect(@element.attr("data-url")).toBe("view:testC")
return
return
return
return
| 122265 | describe "[panelcontainer]", ->
"use strict"
beforeEach (done) ->
module "PanelContainer"
inject ($templateCache, $httpBackend, $compile, $rootScope) =>
@$templateCache = $templateCache
@$httpBackend = $httpBackend
for key in ["A", "<KEY>", "<KEY>"]
@$httpBackend
.whenGET("view/test#{key}.html")
.respond(
200,
"""
<div class="test#{key}">
<h1>view/test#{key}</h1>
</div>
"""
)
html = """
<div data-panelcontainer data-url="view:testA">
<div class="content" data-ng-include="templateUrl"></div>
</div>
"""
$rootScope.$apply =>
@scope = $rootScope.$new()
@element = $compile(html)(@scope)
return
@$templateCache.removeAll()
@$httpBackend.flush()
done()
return
return
afterEach ->
@$httpBackend.verifyNoOutstandingExpectation()
@$httpBackend.verifyNoOutstandingRequest()
return
describe "$scope.url", ->
it "指定されたURLに相当するテンプレートを読み込む", () ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testB")
expect(@element.find("h1").text()).toBe("view/testB")
return
describe "履歴の最先端以外の場所に居た場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "以降の履歴を捨てる", ->
@scope.url = "view:testA"
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testA")
expect(@scope.history.stack[@scope.history.stack.length - 1])
.toBe("view:testA")
return
return
return
describe "$scope.prev", ->
describe "戻るべきURLが無い場合", ->
it "何もしない", ->
expect(@scope.url).toBe("view:testA")
expect(@scope.prev()).toBeFalsy()
return
return
describe "戻るべきURLが有る場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "戻る", ->
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testB")
expect(@element.find("h1").text()).toBe("view/testB")
expect(@element.attr("data-url")).toBe("view:testB")
return
return
return
describe "$scope.next", ->
describe "進むべきURLが無い場合", ->
it "何もしない", ->
expect(@scope.url).toBe("view:testA")
expect(@scope.next()).toBeFalsy()
return
return
describe "進むべきURLが有る場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "進む", ->
@scope.next()
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testC")
expect(@element.find("h1").text()).toBe("view/testC")
expect(@element.attr("data-url")).toBe("view:testC")
return
return
return
return
| true | describe "[panelcontainer]", ->
"use strict"
beforeEach (done) ->
module "PanelContainer"
inject ($templateCache, $httpBackend, $compile, $rootScope) =>
@$templateCache = $templateCache
@$httpBackend = $httpBackend
for key in ["A", "PI:KEY:<KEY>END_PI", "PI:KEY:<KEY>END_PI"]
@$httpBackend
.whenGET("view/test#{key}.html")
.respond(
200,
"""
<div class="test#{key}">
<h1>view/test#{key}</h1>
</div>
"""
)
html = """
<div data-panelcontainer data-url="view:testA">
<div class="content" data-ng-include="templateUrl"></div>
</div>
"""
$rootScope.$apply =>
@scope = $rootScope.$new()
@element = $compile(html)(@scope)
return
@$templateCache.removeAll()
@$httpBackend.flush()
done()
return
return
afterEach ->
@$httpBackend.verifyNoOutstandingExpectation()
@$httpBackend.verifyNoOutstandingRequest()
return
describe "$scope.url", ->
it "指定されたURLに相当するテンプレートを読み込む", () ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testB")
expect(@element.find("h1").text()).toBe("view/testB")
return
describe "履歴の最先端以外の場所に居た場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "以降の履歴を捨てる", ->
@scope.url = "view:testA"
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testA")
expect(@scope.history.stack[@scope.history.stack.length - 1])
.toBe("view:testA")
return
return
return
describe "$scope.prev", ->
describe "戻るべきURLが無い場合", ->
it "何もしない", ->
expect(@scope.url).toBe("view:testA")
expect(@scope.prev()).toBeFalsy()
return
return
describe "戻るべきURLが有る場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "戻る", ->
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testB")
expect(@element.find("h1").text()).toBe("view/testB")
expect(@element.attr("data-url")).toBe("view:testB")
return
return
return
describe "$scope.next", ->
describe "進むべきURLが無い場合", ->
it "何もしない", ->
expect(@scope.url).toBe("view:testA")
expect(@scope.next()).toBeFalsy()
return
return
describe "進むべきURLが有る場合", ->
beforeEach ->
@scope.url = "view:testB"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.url = "view:testC"
@$templateCache.removeAll()
@$httpBackend.flush()
@scope.prev()
@$templateCache.removeAll()
@$httpBackend.flush()
return
it "進む", ->
@scope.next()
@$templateCache.removeAll()
@$httpBackend.flush()
expect(@scope.url).toBe("view:testC")
expect(@element.find("h1").text()).toBe("view/testC")
expect(@element.attr("data-url")).toBe("view:testC")
return
return
return
return
|
[
{
"context": "ct(@ci.commit).to.be.equal '3f93f2e4ddcf5a216d314d507e8579e99b21c8fb'\n\n describe 'buildId', ->\n it ",
"end": 714,
"score": 0.5376520752906799,
"start": 711,
"tag": "PASSWORD",
"value": "507"
},
{
"context": "ci.commit).to.be.equal '3f93f2e4ddcf5a216d314d507e8579e... | test/adaptor/circle-ci.coffee | holyshared/ci-detector | 3 | describe 'CircleCI', ->
beforeEach ->
@env = fixtures.circleCI
@ci = new CircleCI @env
describe 'isCurrentEnv', ->
context 'when circle-ci enviroment', ->
beforeEach ->
@ci = new CircleCI CIRCLECI: 'true'
it 'return true', ->
expect(@ci.isCurrentEnv()).to.be.true
context 'when not circle-ci enviroment', ->
beforeEach ->
@ci = new CircleCI {}
it 'return false', ->
expect(@ci.isCurrentEnv()).to.be.false
describe 'branch', ->
it 'return branch name', ->
expect(@ci.branch).to.be.equal 'master'
describe 'commit', ->
it 'return commit hash value', ->
expect(@ci.commit).to.be.equal '3f93f2e4ddcf5a216d314d507e8579e99b21c8fb'
describe 'buildId', ->
it 'return build id', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildNumber', ->
it 'return build number', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildJobNumber', ->
it 'return build job number', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildJobId', ->
it 'return build job id', ->
expect(@ci.buildId).to.be.equal '1'
| 46421 | describe 'CircleCI', ->
beforeEach ->
@env = fixtures.circleCI
@ci = new CircleCI @env
describe 'isCurrentEnv', ->
context 'when circle-ci enviroment', ->
beforeEach ->
@ci = new CircleCI CIRCLECI: 'true'
it 'return true', ->
expect(@ci.isCurrentEnv()).to.be.true
context 'when not circle-ci enviroment', ->
beforeEach ->
@ci = new CircleCI {}
it 'return false', ->
expect(@ci.isCurrentEnv()).to.be.false
describe 'branch', ->
it 'return branch name', ->
expect(@ci.branch).to.be.equal 'master'
describe 'commit', ->
it 'return commit hash value', ->
expect(@ci.commit).to.be.equal '3f93f2e4ddcf5a216d314d<PASSWORD>e<PASSWORD>9e99b21c8fb'
describe 'buildId', ->
it 'return build id', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildNumber', ->
it 'return build number', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildJobNumber', ->
it 'return build job number', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildJobId', ->
it 'return build job id', ->
expect(@ci.buildId).to.be.equal '1'
| true | describe 'CircleCI', ->
beforeEach ->
@env = fixtures.circleCI
@ci = new CircleCI @env
describe 'isCurrentEnv', ->
context 'when circle-ci enviroment', ->
beforeEach ->
@ci = new CircleCI CIRCLECI: 'true'
it 'return true', ->
expect(@ci.isCurrentEnv()).to.be.true
context 'when not circle-ci enviroment', ->
beforeEach ->
@ci = new CircleCI {}
it 'return false', ->
expect(@ci.isCurrentEnv()).to.be.false
describe 'branch', ->
it 'return branch name', ->
expect(@ci.branch).to.be.equal 'master'
describe 'commit', ->
it 'return commit hash value', ->
expect(@ci.commit).to.be.equal '3f93f2e4ddcf5a216d314dPI:PASSWORD:<PASSWORD>END_PIePI:PASSWORD:<PASSWORD>END_PI9e99b21c8fb'
describe 'buildId', ->
it 'return build id', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildNumber', ->
it 'return build number', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildJobNumber', ->
it 'return build job number', ->
expect(@ci.buildId).to.be.equal '1'
describe 'buildJobId', ->
it 'return build job id', ->
expect(@ci.buildId).to.be.equal '1'
|
[
{
"context": "= (T,cb) ->\n await km.unlock_pgp { passphrase : 'aabb' }, defer err\n T.no_error err\n proc = new Messa",
"end": 2091,
"score": 0.9986951947212219,
"start": 2087,
"tag": "PASSWORD",
"value": "aabb"
}
] | test/files/rsa_8192.iced | johnnyRose/kbpgp | 1 | {keys} = require '../data/keys.iced'
{parse} = require '../../lib/openpgp/parser'
armor = require '../../lib/openpgp/armor'
C = require '../../lib/const'
{do_message,Message} = require '../../lib/openpgp/processor'
util = require 'util'
{unix_time,katch,ASP} = require '../../lib/util'
{KeyManager} = require '../../'
{import_key_pgp} = require '../../lib/symmetric'
{decrypt} = require '../../lib/openpgp/ocfb'
{PgpKeyRing} = require '../../lib/keyring'
{Literal} = require '../../lib/openpgp/packet/literal'
{burn} = require '../../lib/openpgp/burner'
clearsign = require '../../lib/openpgp/clearsign'
#============================================================================
ring = null
km = null
ctext = null
#============================================================================
exports.import_key = (T,cb) ->
opts = now : Math.floor(new Date(2014, 3, 7)/1000)
await KeyManager.import_from_armored_pgp { raw : keys.tinfoil, opts }, defer err, tmp
km = tmp
ring = new PgpKeyRing()
ring.add_key_manager km
T.no_error err
cb()
#============================================================================
msg = """
I wonder by my troth, what thou and I
Did, till we loved ? were we not weaned till then?
But sucked on country pleasures, childishly?
Or snorted we in the Seven Sleepers' den?
'Twas so ; but this, all pleasures fancies be;
If ever any beauty I did see,
Which I desired, and got, 'twas but a dream of thee.
"""
#============================================================================
exports.encrypt = (T, cb) ->
flags = C.openpgp.key_flags.encrypt_comm
encryption_key = km.find_best_pgp_key flags
T.assert encryption_key?, "got an encryption key"
literals = [ new Literal {
data : new Buffer(msg)
format : C.openpgp.literal_formats.utf8
date : unix_time()
}]
await burn { literals, encryption_key }, defer err, armored, tmp
ctext = tmp
T.no_error err
cb()
#============================================================================
exports.decrypt = (T,cb) ->
await km.unlock_pgp { passphrase : 'aabb' }, defer err
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext }, defer err, out
T.no_error err
T.assert (not out[0].get_data_signer()?), "wasn't signed"
T.equal msg, out[0].toString(), "message came back right"
cb()
#============================================================================
| 195951 | {keys} = require '../data/keys.iced'
{parse} = require '../../lib/openpgp/parser'
armor = require '../../lib/openpgp/armor'
C = require '../../lib/const'
{do_message,Message} = require '../../lib/openpgp/processor'
util = require 'util'
{unix_time,katch,ASP} = require '../../lib/util'
{KeyManager} = require '../../'
{import_key_pgp} = require '../../lib/symmetric'
{decrypt} = require '../../lib/openpgp/ocfb'
{PgpKeyRing} = require '../../lib/keyring'
{Literal} = require '../../lib/openpgp/packet/literal'
{burn} = require '../../lib/openpgp/burner'
clearsign = require '../../lib/openpgp/clearsign'
#============================================================================
ring = null
km = null
ctext = null
#============================================================================
exports.import_key = (T,cb) ->
opts = now : Math.floor(new Date(2014, 3, 7)/1000)
await KeyManager.import_from_armored_pgp { raw : keys.tinfoil, opts }, defer err, tmp
km = tmp
ring = new PgpKeyRing()
ring.add_key_manager km
T.no_error err
cb()
#============================================================================
msg = """
I wonder by my troth, what thou and I
Did, till we loved ? were we not weaned till then?
But sucked on country pleasures, childishly?
Or snorted we in the Seven Sleepers' den?
'Twas so ; but this, all pleasures fancies be;
If ever any beauty I did see,
Which I desired, and got, 'twas but a dream of thee.
"""
#============================================================================
exports.encrypt = (T, cb) ->
flags = C.openpgp.key_flags.encrypt_comm
encryption_key = km.find_best_pgp_key flags
T.assert encryption_key?, "got an encryption key"
literals = [ new Literal {
data : new Buffer(msg)
format : C.openpgp.literal_formats.utf8
date : unix_time()
}]
await burn { literals, encryption_key }, defer err, armored, tmp
ctext = tmp
T.no_error err
cb()
#============================================================================
exports.decrypt = (T,cb) ->
await km.unlock_pgp { passphrase : '<PASSWORD>' }, defer err
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext }, defer err, out
T.no_error err
T.assert (not out[0].get_data_signer()?), "wasn't signed"
T.equal msg, out[0].toString(), "message came back right"
cb()
#============================================================================
| true | {keys} = require '../data/keys.iced'
{parse} = require '../../lib/openpgp/parser'
armor = require '../../lib/openpgp/armor'
C = require '../../lib/const'
{do_message,Message} = require '../../lib/openpgp/processor'
util = require 'util'
{unix_time,katch,ASP} = require '../../lib/util'
{KeyManager} = require '../../'
{import_key_pgp} = require '../../lib/symmetric'
{decrypt} = require '../../lib/openpgp/ocfb'
{PgpKeyRing} = require '../../lib/keyring'
{Literal} = require '../../lib/openpgp/packet/literal'
{burn} = require '../../lib/openpgp/burner'
clearsign = require '../../lib/openpgp/clearsign'
#============================================================================
ring = null
km = null
ctext = null
#============================================================================
exports.import_key = (T,cb) ->
opts = now : Math.floor(new Date(2014, 3, 7)/1000)
await KeyManager.import_from_armored_pgp { raw : keys.tinfoil, opts }, defer err, tmp
km = tmp
ring = new PgpKeyRing()
ring.add_key_manager km
T.no_error err
cb()
#============================================================================
msg = """
I wonder by my troth, what thou and I
Did, till we loved ? were we not weaned till then?
But sucked on country pleasures, childishly?
Or snorted we in the Seven Sleepers' den?
'Twas so ; but this, all pleasures fancies be;
If ever any beauty I did see,
Which I desired, and got, 'twas but a dream of thee.
"""
#============================================================================
exports.encrypt = (T, cb) ->
flags = C.openpgp.key_flags.encrypt_comm
encryption_key = km.find_best_pgp_key flags
T.assert encryption_key?, "got an encryption key"
literals = [ new Literal {
data : new Buffer(msg)
format : C.openpgp.literal_formats.utf8
date : unix_time()
}]
await burn { literals, encryption_key }, defer err, armored, tmp
ctext = tmp
T.no_error err
cb()
#============================================================================
exports.decrypt = (T,cb) ->
await km.unlock_pgp { passphrase : 'PI:PASSWORD:<PASSWORD>END_PI' }, defer err
T.no_error err
proc = new Message { keyfetch : ring }
await proc.parse_and_process { body : ctext }, defer err, out
T.no_error err
T.assert (not out[0].get_data_signer()?), "wasn't signed"
T.equal msg, out[0].toString(), "message came back right"
cb()
#============================================================================
|
[
{
"context": " dbConfig.dbuser = dbuser\n dbConfig.dbpassword = dbpassword \n dbConfig.dburi = dburi\n console.log dbConfig",
"end": 359,
"score": 0.9989141225814819,
"start": 349,
"tag": "PASSWORD",
"value": "dbpassword"
},
{
"context": "lastName\"]\n user[\"username\"] ... | mongodbclient.coffee | knayamlohani/tvserieswebapplication | 0 | mongoClient = require('mongodb').MongoClient
format = require('util').format;
crypto = require 'crypto'
mailer = require './mailer.js'
moment = require 'moment'
_db = ""
dbConfig =
"dbuser" : ""
"dbpassword" : ""
"dburi" : ""
exports.setDbConfig = (dbuser, dbpassword, dburi) ->
dbConfig.dbuser = dbuser
dbConfig.dbpassword = dbpassword
dbConfig.dburi = dburi
console.log dbConfig.dburi
mongoClient.connect dbConfig.dburi, (err, db) ->
if !err
_db = db
return
return
host= ""
exports.setHost = (hostName) ->
host = hostName
console.log "host set", host
return
###
mongoClient.connect dbConfig.dburi, (err, db) ->
if !err
_db = db
###
exports.checkIfEmailAlreadyRegistered = (email, callback) ->
if _db
checkingIfEmailAlreadyRegistered email, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
checkingIfEmailAlreadyRegistered email, db, callback
return
return
checkingIfEmailAlreadyRegistered = (email, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'useraccountdetails'
collection.find({"email": email}).toArray (err, results) ->
console.log results
if results.length > 0
result.status = true
else
result.status = false
result.err = err
result.data = results
callback result
return
exports.addNewUser = (requestingUser, callback) ->
if _db
addingNewUser requestingUser, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" :
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
else
_db = db
addingNewUser requestingUser, db, callback
return
return
addingNewUser = (requestingUser, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
user =
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
collection = db.collection 'useraccountdetails'
collection.insert requestingUser, (err, docs) ->
if err
result.status = false
else
user["firstName"] = docs[0]["firstName"]
user["lastName"] = docs[0]["lastName"]
user["username"] = docs[0]["username"]
user["email"] = docs[0]["email"]
user["signinStatus"] = true
user["siginPage"] = ""
user["dashboardPage"] = "/dashboard"
user["status"] = docs[0]["username"]
user["toggle"] = "dropdown"
result.err = err
result.data = user
callback result
return
return
exports.authenticateUserCredentials = (email, password, callback) ->
console.log "authenticating user+++"
if _db
authenticatingUserCredentials email, password, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err": err
"status": false
"data":
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
else
_db = db
authenticatingUserCredentials email, password, db, callback
return
return
authenticatingUserCredentials = (email, password, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
user =
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
"authenticationStatus": ""
collection = db.collection 'useraccountdetails'
collection.find({"email": email}).toArray (err, results) ->
if !err and results.length > 0 and results[0].password == password
user["firstName"] = results[0]["firstName"]
user["lastName"] = results[0]["lastName"]
user["username"] = results[0]["username"]
user["email"] = results[0]["email"]
user["signinStatus"] = true
user["siginPage"] = ""
user["dashboardPage"] = "/dashboard"
user["status"] = results[0]["username"]
user["toggle"] = "dropdown"
user["authenticationStatus"] = results[0]["authenticationStatus"]
user["timeZone"] = results[0]["timeZone"]
result.status = true
else
result.err = "signin error"
result.status = false
result.data = user
callback result
return
return
#returns JSON object
exports.addSeriesToSubscribedTvShows = (subscribingTvSeries, callback) ->
if _db
addingSeriesToSubscribedTvShows subscribingTvSeries, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
addingSeriesToSubscribedTvShows subscribingTvSeries, db, callback
return
return
addingSeriesToSubscribedTvShows = (subscribedTvSeries, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
collection.insert subscribedTvSeries, (err, docs) ->
if err
result.status = false
result.err = err
result.status = true
result.data = docs
callback result
return
return
exports.getSubscribedTvShows = (username, callback) ->
if _db
gettingSubscribedTvShows username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingSubscribedTvShows username, db, callback
return
return
gettingSubscribedTvShows = (subscriber, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
collection.find({"subscribersUsername": subscriber}).toArray (err, results) ->
if err
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
exports.getSubscriptionStatusForSeriesWidth = (id, username, callback) ->
if _db
gettingSubscriptionStatusForSeriesWidth id, username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingSubscriptionStatusForSeriesWidth id, username, _db, callback
return
return
gettingSubscriptionStatusForSeriesWidth = (id, username, db, callback) ->
#callback "returning subscription status for series with id #{id}"
collection = db.collection 'usersubscribedtvshows'
result =
"err" : ""
"status" : ""
"data" : ""
collection.find({"subscribersUsername": username, "id": id}).toArray (err, results) ->
if err
result.status = false
else if results.length > 0
result.status = true
else result.status = false
result.err = err
result.data = results
console.log result
callback result
return
###
exports.getTvShowsAiringOn = (dayOfWeek, callback) ->
if _db
gettingSubscribedTvShows dayOfWeek, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingTvShowsAiringOn dayOfWeek, db, callback
return
return
###
gettingTvShowsAiringOn = (dayOfWeek, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
console.log "day of week", dayOfWeek
collection.find({"airsOnDayOfWeek": dayOfWeek}).toArray (err, results) ->
if err
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
exports.deleteAccount = (username, callback) ->
if _db
deletingAccount username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
deletingAccount username, _db, callback
return
return
deletingAccount = (username, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
userAccountCollection = db.collection 'useraccountdetails'
userSubscriptionsCollection = db.collection 'usersubscribedtvshows'
userAccountCollection.remove {"username": username}, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log "deleting account", result
callback result
return
userSubscriptionsCollection.remove {"subscribersUsername": username}, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log "deleting subscriptions all", result
#callback result
return
return
exports.storePasswordChangeRequest = (passwordResetObject, callback) ->
if _db
storingPasswordChangeRequest passwordResetObject, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
storingPasswordChangeRequest passwordResetObject, _db, callback
return
return
storingPasswordChangeRequest = (passwordResetObject, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'passwordchangerequests'
collection.insert passwordResetObject, (err, docs) ->
if err
result.status = false
result.err = err
result.data = docs
callback result
return
return
exports.updatePassword = (token, newPassword, callback) ->
if _db
updatingPassword token, newPassword, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
updatingPassword token, newPassword, _db, callback
return
return
updatingPassword = (token, newPassword, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
passwordChangeRequestsCollection = db.collection 'passwordchangerequests'
shasum = crypto.createHash 'sha1'
shasum.update token
tokenhash = shasum.digest 'hex'
passwordChangeRequestsCollection.find({"tokenHash": tokenhash}).toArray (err, results) ->
console.log results
if results.length == 1
result.status = true
email = results[0].email
collection = db.collection 'useraccountdetails'
collection.update {"email": email},{ $set: { "password" : newPassword } }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
else
result.status = false
result.err = err
result.data = results
callback result
return
return
exports.addUnauthenticatedUser = (unauthenticatedUserObject, callback) ->
if _db
addingUnauthenticatedUser unauthenticatedUserObject, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
addingUnauthenticatedUser unauthenticatedUserObject, _db, callback
return
return
addingUnauthenticatedUser = (unauthenticatedUserObject, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
token = unauthenticatedUserObject.token
unauthenticatedUserObject.token = generateHash unauthenticatedUserObject.token
collection = db.collection 'unauthenticatedaccounts'
collection.insert unauthenticatedUserObject, (err, docs) ->
if err
result.status = false
result.err = err
result.data = docs
callback result
else
body = "<div><p>You have successfully set up your TV Series account and you can now access it by clicking on the following link:</p></div><div><p><a href='https://#{host}/authenticateAccount?token=#{token}'> https://#{host}/authenticateAccount?token=#{token} </a></p></div>"
mailOptions =
from : 'TV Series <tvserieswebapp@gmail.com>'
to : unauthenticatedUserObject.email
subject : 'Authenticate your TV Series Account'
text : ''
html : body
mailer.sendMail mailOptions, callback
return
return
generateHash = (string) ->
shasum = crypto.createHash 'sha1'
shasum.update string
hashValue = shasum.digest 'hex'
return hashValue
exports.authenticateAccount = (token, callback) ->
if _db
authenticatingAccount token, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
authenticatingAccount token, _db, callback
return
return
authenticatingAccount = (token, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
unauthenticatedAccountsCollection = db.collection 'unauthenticatedaccounts'
shasum = crypto.createHash 'sha1'
shasum.update token
token = shasum.digest 'hex'
console.log "finding token"
unauthenticatedAccountsCollection.find({"token": token}).toArray (err, results) ->
console.log results
if results.length == 1
result.status = true
email = results[0].email
collection = db.collection 'useraccountdetails'
console.log "updating account"
collection.update {"email": email},{ $set: { "authenticationStatus" : true } }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
else
result.status = false
result.err = err
result.data = results
callback result
return
return
exports.deleteExpiredPasswordResetTokens = (options, callback) ->
console.log "calling deleteExpiredPasswordResetTokens"
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'passwordchangerequests'
collection.find({}).toArray (err, results) ->
console.log results
if results.length > 0
for request in results
if new Date(request.expires) < new Date()
console.log "original date", new Date(request.expires), "curr date", new Date()
collection.remove request, (err, results) ->
console.log result, "removed"
return
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.deleteExpiredAccountAuthenticationTokens = (options, callback) ->
console.log "calling deleteExpiredAccountAuthenticationTokens"
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'unauthenticatedaccounts'
collection.find({}).toArray (err, results) ->
console.log results
if results.length > 0
for request in results
if new Date(request.expires) < new Date()
console.log "account token deleted original date", new Date(request.expires), "curr date", new Date()
collection.remove request, (err, results) ->
console.log result, "removed"
return
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.addNewJob = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobs'
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.getTvShowsAiringOn = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
console.log "day of week", options.object
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.getMailSubscriptionJobs = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobs'
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.checkIfJobsCreated = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobscreatedstatus'
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
if !err && results.length == 0
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.addEntryToJobsCreatedStatusCollection = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobscreatedstatus'
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
return
,
options,callback
return
###
exports.updateDocumentInCollection = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection options.collection
collection.update options.object.searchParameter, { $set: options.object.updatedValue }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
,
options, callback
return
###
createMongodbConnectionAndPerform = (job, options, callback) ->
if _db
job options, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
job options, _db, callback
return
return
###
connects to mongodb and perform the required operation passed as job and on completion of the job calls the callback
###
connectToMongodbAndPerform = (job, options, callback) ->
if _db
options.collection = _db.collection options.collection
job options, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
options.collection = _db.collection options.collection
job options, callback
return
return
###
search for a document in collection
###
searchDocumentInCollection = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
console.log "search ", options.object
collection = options.collection
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
exports.searchDocumentInCollection = searchDocumentInCollection
###
delete document from a collection
###
deleteDocumentFromCollection = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.remove options.object, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
exports.deleteDocumentFromCollection = deleteDocumentFromCollection
###
checks for availability of username
###
exports.checkUsernameAvailability = (options, callback) ->
options.collection = 'useraccountdetails'
connectToMongodbAndPerform searchDocumentInCollection, options, callback
return
###
removes tv shows from subscribed list for a given user
###
exports.removeSeriesFromSubscribedTvShows = (options, callback) ->
options.collection = 'usersubscribedtvshows'
connectToMongodbAndPerform removingSeriesFromSubscribedTvShows, options, callback
return
removingSeriesFromSubscribedTvShows = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
(() ->
tvShowsToBeUnsubscribed = options.object
counter = 0
tvShowsUnsubscribedCount = 0
limit = tvShowsToBeUnsubscribed.length
for tvShow in tvShowsToBeUnsubscribed
console.log "series", tvShow
collection.remove {subscribersUsername:"#{tvShow.subscribersUsername}", id: "#{tvShow.id}"}, (err, docs) ->
counter++
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
if docs.data == 1
tvShowsUnsubscribedCount++
if counter == limit
if counter == tvShowsUnsubscribedCount
result.err =
"msg" : "some tv shows were not unsubscribed"
callback result
return
return
)()
return
###
add document to collection
###
addDocumentToCollection = (options, callback) ->
connectToMongodbAndPerform (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
return
, options, callback
return
exports.addDocumentToCollection = addDocumentToCollection
exports.updateDocumentInCollection = (options, callback) ->
connectToMongodbAndPerform (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.update options.object.searchParameter, { $set: options.object.updatedValue }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
,
options, callback
return
exports.deleteExpiredJobsCreatedStatusCollectionEntries = (options, callback) ->
console.log "calling deleteExpiredJobsCreatedStatusCollectionEntries"
options.collection = "jobscreatedstatus"
connectToMongodbAndPerform (options, callback) ->
result =
"err" : null
"status" : true
"data" : ""
collection = options.collection
collection.find({}).toArray (err, results) ->
if !err && results.length > 0
counter = 0;
totalCount = results.length
deletedEntriesCount = 0;
entriesToBeDeletedCount = 0;
for jobEntry in results
#if jobEntry before Today then delete the job entry
counter++
now = new Date()
now.setHours(now.getHours() - 24)
if new Date(jobEntry.date) < now
console.log "job created status date", new Date(jobEntry.date), "now", new Date()
entriesToBeDeletedCount++
collection.remove jobEntry, (err, results) ->
console.log result, "removed job entry"
if results.data == 1
deletedEntriesCount++
if totalCount == counter
if entriesToBeDeletedCount != deletedEntriesCount
result =
"err":
"message": "not all entries deleted"
"status" : false
callback result
return
return
return
,
options,callback
return
exports.deleteEntriesFromJobsCollectionWithStatusFinished = (options, callback) ->
console.log "calling deleteEntriesFromJobsCollectionWithStatusFinished"
options.collection = "jobs"
connectToMongodbAndPerform (options, callback) ->
result =
"err" : null
"status" : true
"data" : ""
collection = options.collection
collection.find({}).toArray (err, results) ->
if !err && results.length > 0
counter = 0;
totalCount = results.length
deletedEntriesCount = 0;
entriesToBeDeletedCount = 0;
for jobEntry in results
#if jobEntry before Today then delete the job entry
counter++
if jobEntry.status == "finished"
entriesToBeDeletedCount++
collection.remove jobEntry, (err, results) ->
console.log result, "removed job entry"
if results.data == 1
deletedEntriesCount++
if totalCount == counter
if deletedEntriesCount < entriesToBeDeletedCount
result =
"err":
"message": "not all entries deleted with status finished"
"status" : false
else
result.data = 1
callback result
return
return
return
,
options,callback
return
| 204590 | mongoClient = require('mongodb').MongoClient
format = require('util').format;
crypto = require 'crypto'
mailer = require './mailer.js'
moment = require 'moment'
_db = ""
dbConfig =
"dbuser" : ""
"dbpassword" : ""
"dburi" : ""
exports.setDbConfig = (dbuser, dbpassword, dburi) ->
dbConfig.dbuser = dbuser
dbConfig.dbpassword = <PASSWORD>
dbConfig.dburi = dburi
console.log dbConfig.dburi
mongoClient.connect dbConfig.dburi, (err, db) ->
if !err
_db = db
return
return
host= ""
exports.setHost = (hostName) ->
host = hostName
console.log "host set", host
return
###
mongoClient.connect dbConfig.dburi, (err, db) ->
if !err
_db = db
###
exports.checkIfEmailAlreadyRegistered = (email, callback) ->
if _db
checkingIfEmailAlreadyRegistered email, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
checkingIfEmailAlreadyRegistered email, db, callback
return
return
checkingIfEmailAlreadyRegistered = (email, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'useraccountdetails'
collection.find({"email": email}).toArray (err, results) ->
console.log results
if results.length > 0
result.status = true
else
result.status = false
result.err = err
result.data = results
callback result
return
exports.addNewUser = (requestingUser, callback) ->
if _db
addingNewUser requestingUser, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" :
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
else
_db = db
addingNewUser requestingUser, db, callback
return
return
addingNewUser = (requestingUser, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
user =
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
collection = db.collection 'useraccountdetails'
collection.insert requestingUser, (err, docs) ->
if err
result.status = false
else
user["firstName"] = docs[0]["firstName"]
user["lastName"] = docs[0]["lastName"]
user["username"] = docs[0]["username"]
user["email"] = docs[0]["email"]
user["signinStatus"] = true
user["siginPage"] = ""
user["dashboardPage"] = "/dashboard"
user["status"] = docs[0]["username"]
user["toggle"] = "dropdown"
result.err = err
result.data = user
callback result
return
return
exports.authenticateUserCredentials = (email, password, callback) ->
console.log "authenticating user+++"
if _db
authenticatingUserCredentials email, password, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err": err
"status": false
"data":
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
else
_db = db
authenticatingUserCredentials email, password, db, callback
return
return
authenticatingUserCredentials = (email, password, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
user =
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
"authenticationStatus": ""
collection = db.collection 'useraccountdetails'
collection.find({"email": email}).toArray (err, results) ->
if !err and results.length > 0 and results[0].password == <PASSWORD>
user["firstName"] = results[0]["firstName"]
user["lastName"] = results[0]["lastName"]
user["username"] = results[0]["username"]
user["email"] = results[0]["email"]
user["signinStatus"] = true
user["siginPage"] = ""
user["dashboardPage"] = "/dashboard"
user["status"] = results[0]["username"]
user["toggle"] = "dropdown"
user["authenticationStatus"] = results[0]["authenticationStatus"]
user["timeZone"] = results[0]["timeZone"]
result.status = true
else
result.err = "signin error"
result.status = false
result.data = user
callback result
return
return
#returns JSON object
exports.addSeriesToSubscribedTvShows = (subscribingTvSeries, callback) ->
if _db
addingSeriesToSubscribedTvShows subscribingTvSeries, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
addingSeriesToSubscribedTvShows subscribingTvSeries, db, callback
return
return
addingSeriesToSubscribedTvShows = (subscribedTvSeries, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
collection.insert subscribedTvSeries, (err, docs) ->
if err
result.status = false
result.err = err
result.status = true
result.data = docs
callback result
return
return
exports.getSubscribedTvShows = (username, callback) ->
if _db
gettingSubscribedTvShows username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingSubscribedTvShows username, db, callback
return
return
gettingSubscribedTvShows = (subscriber, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
collection.find({"subscribersUsername": subscriber}).toArray (err, results) ->
if err
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
exports.getSubscriptionStatusForSeriesWidth = (id, username, callback) ->
if _db
gettingSubscriptionStatusForSeriesWidth id, username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingSubscriptionStatusForSeriesWidth id, username, _db, callback
return
return
gettingSubscriptionStatusForSeriesWidth = (id, username, db, callback) ->
#callback "returning subscription status for series with id #{id}"
collection = db.collection 'usersubscribedtvshows'
result =
"err" : ""
"status" : ""
"data" : ""
collection.find({"subscribersUsername": username, "id": id}).toArray (err, results) ->
if err
result.status = false
else if results.length > 0
result.status = true
else result.status = false
result.err = err
result.data = results
console.log result
callback result
return
###
exports.getTvShowsAiringOn = (dayOfWeek, callback) ->
if _db
gettingSubscribedTvShows dayOfWeek, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingTvShowsAiringOn dayOfWeek, db, callback
return
return
###
gettingTvShowsAiringOn = (dayOfWeek, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
console.log "day of week", dayOfWeek
collection.find({"airsOnDayOfWeek": dayOfWeek}).toArray (err, results) ->
if err
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
exports.deleteAccount = (username, callback) ->
if _db
deletingAccount username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
deletingAccount username, _db, callback
return
return
deletingAccount = (username, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
userAccountCollection = db.collection 'useraccountdetails'
userSubscriptionsCollection = db.collection 'usersubscribedtvshows'
userAccountCollection.remove {"username": username}, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log "deleting account", result
callback result
return
userSubscriptionsCollection.remove {"subscribersUsername": username}, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log "deleting subscriptions all", result
#callback result
return
return
exports.storePasswordChangeRequest = (passwordResetObject, callback) ->
if _db
storingPasswordChangeRequest passwordResetObject, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
storingPasswordChangeRequest passwordResetObject, _db, callback
return
return
storingPasswordChangeRequest = (passwordResetObject, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'passwordchangerequests'
collection.insert passwordResetObject, (err, docs) ->
if err
result.status = false
result.err = err
result.data = docs
callback result
return
return
exports.updatePassword = (token, newPassword, callback) ->
if _db
updatingPassword token, <PASSWORD>Password, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
updatingPassword token, newPassword, _db, callback
return
return
updatingPassword = (token, <PASSWORD>Password, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
passwordChangeRequestsCollection = db.collection 'passwordchangerequests'
shasum = crypto.createHash 'sha1'
shasum.update token
tokenhash = shasum.digest 'hex'
passwordChangeRequestsCollection.find({"tokenHash": tokenhash}).toArray (err, results) ->
console.log results
if results.length == 1
result.status = true
email = results[0].email
collection = db.collection 'useraccountdetails'
collection.update {"email": email},{ $set: { "password" : <PASSWORD> } }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
else
result.status = false
result.err = err
result.data = results
callback result
return
return
exports.addUnauthenticatedUser = (unauthenticatedUserObject, callback) ->
if _db
addingUnauthenticatedUser unauthenticatedUserObject, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
addingUnauthenticatedUser unauthenticatedUserObject, _db, callback
return
return
addingUnauthenticatedUser = (unauthenticatedUserObject, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
token = unauthenticatedUserObject.token
unauthenticatedUserObject.token = generateHash unauthenticatedUserObject.token
collection = db.collection 'unauthenticatedaccounts'
collection.insert unauthenticatedUserObject, (err, docs) ->
if err
result.status = false
result.err = err
result.data = docs
callback result
else
body = "<div><p>You have successfully set up your TV Series account and you can now access it by clicking on the following link:</p></div><div><p><a href='https://#{host}/authenticateAccount?token=#{token}'> https://#{host}/authenticateAccount?token=#{token} </a></p></div>"
mailOptions =
from : 'TV Series <<EMAIL>>'
to : unauthenticatedUserObject.email
subject : 'Authenticate your TV Series Account'
text : ''
html : body
mailer.sendMail mailOptions, callback
return
return
generateHash = (string) ->
shasum = crypto.createHash 'sha1'
shasum.update string
hashValue = shasum.digest 'hex'
return hashValue
exports.authenticateAccount = (token, callback) ->
if _db
authenticatingAccount token, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
authenticatingAccount token, _db, callback
return
return
authenticatingAccount = (token, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
unauthenticatedAccountsCollection = db.collection 'unauthenticatedaccounts'
shasum = crypto.createHash 'sha1'
shasum.update token
token = shasum.digest 'hex'
console.log "finding token"
unauthenticatedAccountsCollection.find({"token": token}).toArray (err, results) ->
console.log results
if results.length == 1
result.status = true
email = results[0].email
collection = db.collection 'useraccountdetails'
console.log "updating account"
collection.update {"email": email},{ $set: { "authenticationStatus" : true } }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
else
result.status = false
result.err = err
result.data = results
callback result
return
return
exports.deleteExpiredPasswordResetTokens = (options, callback) ->
console.log "calling deleteExpiredPasswordResetTokens"
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'passwordchangerequests'
collection.find({}).toArray (err, results) ->
console.log results
if results.length > 0
for request in results
if new Date(request.expires) < new Date()
console.log "original date", new Date(request.expires), "curr date", new Date()
collection.remove request, (err, results) ->
console.log result, "removed"
return
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.deleteExpiredAccountAuthenticationTokens = (options, callback) ->
console.log "calling deleteExpiredAccountAuthenticationTokens"
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'unauthenticatedaccounts'
collection.find({}).toArray (err, results) ->
console.log results
if results.length > 0
for request in results
if new Date(request.expires) < new Date()
console.log "account token deleted original date", new Date(request.expires), "curr date", new Date()
collection.remove request, (err, results) ->
console.log result, "removed"
return
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.addNewJob = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobs'
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.getTvShowsAiringOn = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
console.log "day of week", options.object
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.getMailSubscriptionJobs = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobs'
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.checkIfJobsCreated = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobscreatedstatus'
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
if !err && results.length == 0
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.addEntryToJobsCreatedStatusCollection = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobscreatedstatus'
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
return
,
options,callback
return
###
exports.updateDocumentInCollection = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection options.collection
collection.update options.object.searchParameter, { $set: options.object.updatedValue }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
,
options, callback
return
###
createMongodbConnectionAndPerform = (job, options, callback) ->
if _db
job options, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
job options, _db, callback
return
return
###
connects to mongodb and perform the required operation passed as job and on completion of the job calls the callback
###
connectToMongodbAndPerform = (job, options, callback) ->
if _db
options.collection = _db.collection options.collection
job options, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
options.collection = _db.collection options.collection
job options, callback
return
return
###
search for a document in collection
###
searchDocumentInCollection = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
console.log "search ", options.object
collection = options.collection
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
exports.searchDocumentInCollection = searchDocumentInCollection
###
delete document from a collection
###
deleteDocumentFromCollection = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.remove options.object, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
exports.deleteDocumentFromCollection = deleteDocumentFromCollection
###
checks for availability of username
###
exports.checkUsernameAvailability = (options, callback) ->
options.collection = 'useraccountdetails'
connectToMongodbAndPerform searchDocumentInCollection, options, callback
return
###
removes tv shows from subscribed list for a given user
###
exports.removeSeriesFromSubscribedTvShows = (options, callback) ->
options.collection = 'usersubscribedtvshows'
connectToMongodbAndPerform removingSeriesFromSubscribedTvShows, options, callback
return
removingSeriesFromSubscribedTvShows = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
(() ->
tvShowsToBeUnsubscribed = options.object
counter = 0
tvShowsUnsubscribedCount = 0
limit = tvShowsToBeUnsubscribed.length
for tvShow in tvShowsToBeUnsubscribed
console.log "series", tvShow
collection.remove {subscribersUsername:"#{tvShow.subscribersUsername}", id: "#{tvShow.id}"}, (err, docs) ->
counter++
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
if docs.data == 1
tvShowsUnsubscribedCount++
if counter == limit
if counter == tvShowsUnsubscribedCount
result.err =
"msg" : "some tv shows were not unsubscribed"
callback result
return
return
)()
return
###
add document to collection
###
addDocumentToCollection = (options, callback) ->
connectToMongodbAndPerform (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
return
, options, callback
return
exports.addDocumentToCollection = addDocumentToCollection
exports.updateDocumentInCollection = (options, callback) ->
connectToMongodbAndPerform (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.update options.object.searchParameter, { $set: options.object.updatedValue }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
,
options, callback
return
exports.deleteExpiredJobsCreatedStatusCollectionEntries = (options, callback) ->
console.log "calling deleteExpiredJobsCreatedStatusCollectionEntries"
options.collection = "jobscreatedstatus"
connectToMongodbAndPerform (options, callback) ->
result =
"err" : null
"status" : true
"data" : ""
collection = options.collection
collection.find({}).toArray (err, results) ->
if !err && results.length > 0
counter = 0;
totalCount = results.length
deletedEntriesCount = 0;
entriesToBeDeletedCount = 0;
for jobEntry in results
#if jobEntry before Today then delete the job entry
counter++
now = new Date()
now.setHours(now.getHours() - 24)
if new Date(jobEntry.date) < now
console.log "job created status date", new Date(jobEntry.date), "now", new Date()
entriesToBeDeletedCount++
collection.remove jobEntry, (err, results) ->
console.log result, "removed job entry"
if results.data == 1
deletedEntriesCount++
if totalCount == counter
if entriesToBeDeletedCount != deletedEntriesCount
result =
"err":
"message": "not all entries deleted"
"status" : false
callback result
return
return
return
,
options,callback
return
exports.deleteEntriesFromJobsCollectionWithStatusFinished = (options, callback) ->
console.log "calling deleteEntriesFromJobsCollectionWithStatusFinished"
options.collection = "jobs"
connectToMongodbAndPerform (options, callback) ->
result =
"err" : null
"status" : true
"data" : ""
collection = options.collection
collection.find({}).toArray (err, results) ->
if !err && results.length > 0
counter = 0;
totalCount = results.length
deletedEntriesCount = 0;
entriesToBeDeletedCount = 0;
for jobEntry in results
#if jobEntry before Today then delete the job entry
counter++
if jobEntry.status == "finished"
entriesToBeDeletedCount++
collection.remove jobEntry, (err, results) ->
console.log result, "removed job entry"
if results.data == 1
deletedEntriesCount++
if totalCount == counter
if deletedEntriesCount < entriesToBeDeletedCount
result =
"err":
"message": "not all entries deleted with status finished"
"status" : false
else
result.data = 1
callback result
return
return
return
,
options,callback
return
| true | mongoClient = require('mongodb').MongoClient
format = require('util').format;
crypto = require 'crypto'
mailer = require './mailer.js'
moment = require 'moment'
_db = ""
dbConfig =
"dbuser" : ""
"dbpassword" : ""
"dburi" : ""
exports.setDbConfig = (dbuser, dbpassword, dburi) ->
dbConfig.dbuser = dbuser
dbConfig.dbpassword = PI:PASSWORD:<PASSWORD>END_PI
dbConfig.dburi = dburi
console.log dbConfig.dburi
mongoClient.connect dbConfig.dburi, (err, db) ->
if !err
_db = db
return
return
host= ""
exports.setHost = (hostName) ->
host = hostName
console.log "host set", host
return
###
mongoClient.connect dbConfig.dburi, (err, db) ->
if !err
_db = db
###
exports.checkIfEmailAlreadyRegistered = (email, callback) ->
if _db
checkingIfEmailAlreadyRegistered email, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
checkingIfEmailAlreadyRegistered email, db, callback
return
return
checkingIfEmailAlreadyRegistered = (email, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'useraccountdetails'
collection.find({"email": email}).toArray (err, results) ->
console.log results
if results.length > 0
result.status = true
else
result.status = false
result.err = err
result.data = results
callback result
return
exports.addNewUser = (requestingUser, callback) ->
if _db
addingNewUser requestingUser, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" :
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
else
_db = db
addingNewUser requestingUser, db, callback
return
return
addingNewUser = (requestingUser, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
user =
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
collection = db.collection 'useraccountdetails'
collection.insert requestingUser, (err, docs) ->
if err
result.status = false
else
user["firstName"] = docs[0]["firstName"]
user["lastName"] = docs[0]["lastName"]
user["username"] = docs[0]["username"]
user["email"] = docs[0]["email"]
user["signinStatus"] = true
user["siginPage"] = ""
user["dashboardPage"] = "/dashboard"
user["status"] = docs[0]["username"]
user["toggle"] = "dropdown"
result.err = err
result.data = user
callback result
return
return
exports.authenticateUserCredentials = (email, password, callback) ->
console.log "authenticating user+++"
if _db
authenticatingUserCredentials email, password, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err": err
"status": false
"data":
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
else
_db = db
authenticatingUserCredentials email, password, db, callback
return
return
authenticatingUserCredentials = (email, password, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
user =
"firstName" : ""
"lastName" : ""
"username" : ""
"email" : ""
"signinStatus" : false
"siginPage" : "/signin"
"dashboardPage" : ""
"status" : "Sign in"
"toggle" : ""
"authenticationStatus": ""
collection = db.collection 'useraccountdetails'
collection.find({"email": email}).toArray (err, results) ->
if !err and results.length > 0 and results[0].password == PI:PASSWORD:<PASSWORD>END_PI
user["firstName"] = results[0]["firstName"]
user["lastName"] = results[0]["lastName"]
user["username"] = results[0]["username"]
user["email"] = results[0]["email"]
user["signinStatus"] = true
user["siginPage"] = ""
user["dashboardPage"] = "/dashboard"
user["status"] = results[0]["username"]
user["toggle"] = "dropdown"
user["authenticationStatus"] = results[0]["authenticationStatus"]
user["timeZone"] = results[0]["timeZone"]
result.status = true
else
result.err = "signin error"
result.status = false
result.data = user
callback result
return
return
#returns JSON object
exports.addSeriesToSubscribedTvShows = (subscribingTvSeries, callback) ->
if _db
addingSeriesToSubscribedTvShows subscribingTvSeries, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
addingSeriesToSubscribedTvShows subscribingTvSeries, db, callback
return
return
addingSeriesToSubscribedTvShows = (subscribedTvSeries, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
collection.insert subscribedTvSeries, (err, docs) ->
if err
result.status = false
result.err = err
result.status = true
result.data = docs
callback result
return
return
exports.getSubscribedTvShows = (username, callback) ->
if _db
gettingSubscribedTvShows username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingSubscribedTvShows username, db, callback
return
return
gettingSubscribedTvShows = (subscriber, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
collection.find({"subscribersUsername": subscriber}).toArray (err, results) ->
if err
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
exports.getSubscriptionStatusForSeriesWidth = (id, username, callback) ->
if _db
gettingSubscriptionStatusForSeriesWidth id, username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingSubscriptionStatusForSeriesWidth id, username, _db, callback
return
return
gettingSubscriptionStatusForSeriesWidth = (id, username, db, callback) ->
#callback "returning subscription status for series with id #{id}"
collection = db.collection 'usersubscribedtvshows'
result =
"err" : ""
"status" : ""
"data" : ""
collection.find({"subscribersUsername": username, "id": id}).toArray (err, results) ->
if err
result.status = false
else if results.length > 0
result.status = true
else result.status = false
result.err = err
result.data = results
console.log result
callback result
return
###
exports.getTvShowsAiringOn = (dayOfWeek, callback) ->
if _db
gettingSubscribedTvShows dayOfWeek, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
gettingTvShowsAiringOn dayOfWeek, db, callback
return
return
###
gettingTvShowsAiringOn = (dayOfWeek, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
console.log "day of week", dayOfWeek
collection.find({"airsOnDayOfWeek": dayOfWeek}).toArray (err, results) ->
if err
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
exports.deleteAccount = (username, callback) ->
if _db
deletingAccount username, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
deletingAccount username, _db, callback
return
return
deletingAccount = (username, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
userAccountCollection = db.collection 'useraccountdetails'
userSubscriptionsCollection = db.collection 'usersubscribedtvshows'
userAccountCollection.remove {"username": username}, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log "deleting account", result
callback result
return
userSubscriptionsCollection.remove {"subscribersUsername": username}, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log "deleting subscriptions all", result
#callback result
return
return
exports.storePasswordChangeRequest = (passwordResetObject, callback) ->
if _db
storingPasswordChangeRequest passwordResetObject, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
storingPasswordChangeRequest passwordResetObject, _db, callback
return
return
storingPasswordChangeRequest = (passwordResetObject, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'passwordchangerequests'
collection.insert passwordResetObject, (err, docs) ->
if err
result.status = false
result.err = err
result.data = docs
callback result
return
return
exports.updatePassword = (token, newPassword, callback) ->
if _db
updatingPassword token, PI:PASSWORD:<PASSWORD>END_PIPassword, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
updatingPassword token, newPassword, _db, callback
return
return
updatingPassword = (token, PI:PASSWORD:<PASSWORD>END_PIPassword, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
passwordChangeRequestsCollection = db.collection 'passwordchangerequests'
shasum = crypto.createHash 'sha1'
shasum.update token
tokenhash = shasum.digest 'hex'
passwordChangeRequestsCollection.find({"tokenHash": tokenhash}).toArray (err, results) ->
console.log results
if results.length == 1
result.status = true
email = results[0].email
collection = db.collection 'useraccountdetails'
collection.update {"email": email},{ $set: { "password" : PI:PASSWORD:<PASSWORD>END_PI } }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
else
result.status = false
result.err = err
result.data = results
callback result
return
return
exports.addUnauthenticatedUser = (unauthenticatedUserObject, callback) ->
if _db
addingUnauthenticatedUser unauthenticatedUserObject, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
addingUnauthenticatedUser unauthenticatedUserObject, _db, callback
return
return
addingUnauthenticatedUser = (unauthenticatedUserObject, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
token = unauthenticatedUserObject.token
unauthenticatedUserObject.token = generateHash unauthenticatedUserObject.token
collection = db.collection 'unauthenticatedaccounts'
collection.insert unauthenticatedUserObject, (err, docs) ->
if err
result.status = false
result.err = err
result.data = docs
callback result
else
body = "<div><p>You have successfully set up your TV Series account and you can now access it by clicking on the following link:</p></div><div><p><a href='https://#{host}/authenticateAccount?token=#{token}'> https://#{host}/authenticateAccount?token=#{token} </a></p></div>"
mailOptions =
from : 'TV Series <PI:EMAIL:<EMAIL>END_PI>'
to : unauthenticatedUserObject.email
subject : 'Authenticate your TV Series Account'
text : ''
html : body
mailer.sendMail mailOptions, callback
return
return
generateHash = (string) ->
shasum = crypto.createHash 'sha1'
shasum.update string
hashValue = shasum.digest 'hex'
return hashValue
exports.authenticateAccount = (token, callback) ->
if _db
authenticatingAccount token, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
authenticatingAccount token, _db, callback
return
return
authenticatingAccount = (token, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
unauthenticatedAccountsCollection = db.collection 'unauthenticatedaccounts'
shasum = crypto.createHash 'sha1'
shasum.update token
token = shasum.digest 'hex'
console.log "finding token"
unauthenticatedAccountsCollection.find({"token": token}).toArray (err, results) ->
console.log results
if results.length == 1
result.status = true
email = results[0].email
collection = db.collection 'useraccountdetails'
console.log "updating account"
collection.update {"email": email},{ $set: { "authenticationStatus" : true } }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
else
result.status = false
result.err = err
result.data = results
callback result
return
return
exports.deleteExpiredPasswordResetTokens = (options, callback) ->
console.log "calling deleteExpiredPasswordResetTokens"
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'passwordchangerequests'
collection.find({}).toArray (err, results) ->
console.log results
if results.length > 0
for request in results
if new Date(request.expires) < new Date()
console.log "original date", new Date(request.expires), "curr date", new Date()
collection.remove request, (err, results) ->
console.log result, "removed"
return
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.deleteExpiredAccountAuthenticationTokens = (options, callback) ->
console.log "calling deleteExpiredAccountAuthenticationTokens"
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'unauthenticatedaccounts'
collection.find({}).toArray (err, results) ->
console.log results
if results.length > 0
for request in results
if new Date(request.expires) < new Date()
console.log "account token deleted original date", new Date(request.expires), "curr date", new Date()
collection.remove request, (err, results) ->
console.log result, "removed"
return
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.addNewJob = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobs'
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
#callback "deleting expired password reset tokens"
return
,
options,callback
return
exports.getTvShowsAiringOn = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'usersubscribedtvshows'
console.log "day of week", options.object
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.getMailSubscriptionJobs = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobs'
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.checkIfJobsCreated = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobscreatedstatus'
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
if !err && results.length == 0
result.status = false
result.err = err
result.data = results
console.log result
callback result
return
return
,
options,callback
return
exports.addEntryToJobsCreatedStatusCollection = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection 'jobscreatedstatus'
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
return
,
options,callback
return
###
exports.updateDocumentInCollection = (options, callback) ->
createMongodbConnectionAndPerform (options, db, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = db.collection options.collection
collection.update options.object.searchParameter, { $set: options.object.updatedValue }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
,
options, callback
return
###
createMongodbConnectionAndPerform = (job, options, callback) ->
if _db
job options, _db, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
job options, _db, callback
return
return
###
connects to mongodb and perform the required operation passed as job and on completion of the job calls the callback
###
connectToMongodbAndPerform = (job, options, callback) ->
if _db
options.collection = _db.collection options.collection
job options, callback
else
mongoClient.connect dbConfig.dburi, (err, db) ->
if err
callback
"err" : err
"status" : false
"data" : ""
else
_db = db
options.collection = _db.collection options.collection
job options, callback
return
return
###
search for a document in collection
###
searchDocumentInCollection = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
console.log "search ", options.object
collection = options.collection
collection.find(options.object).toArray (err, results) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
exports.searchDocumentInCollection = searchDocumentInCollection
###
delete document from a collection
###
deleteDocumentFromCollection = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.remove options.object, (err, results) ->
if err
result.status = false
else result.status = true
result.err = err
result.data = results
console.log result
callback result
return
return
exports.deleteDocumentFromCollection = deleteDocumentFromCollection
###
checks for availability of username
###
exports.checkUsernameAvailability = (options, callback) ->
options.collection = 'useraccountdetails'
connectToMongodbAndPerform searchDocumentInCollection, options, callback
return
###
removes tv shows from subscribed list for a given user
###
exports.removeSeriesFromSubscribedTvShows = (options, callback) ->
options.collection = 'usersubscribedtvshows'
connectToMongodbAndPerform removingSeriesFromSubscribedTvShows, options, callback
return
removingSeriesFromSubscribedTvShows = (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
(() ->
tvShowsToBeUnsubscribed = options.object
counter = 0
tvShowsUnsubscribedCount = 0
limit = tvShowsToBeUnsubscribed.length
for tvShow in tvShowsToBeUnsubscribed
console.log "series", tvShow
collection.remove {subscribersUsername:"#{tvShow.subscribersUsername}", id: "#{tvShow.id}"}, (err, docs) ->
counter++
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
if docs.data == 1
tvShowsUnsubscribedCount++
if counter == limit
if counter == tvShowsUnsubscribedCount
result.err =
"msg" : "some tv shows were not unsubscribed"
callback result
return
return
)()
return
###
add document to collection
###
addDocumentToCollection = (options, callback) ->
connectToMongodbAndPerform (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.insert options.object, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
return
, options, callback
return
exports.addDocumentToCollection = addDocumentToCollection
exports.updateDocumentInCollection = (options, callback) ->
connectToMongodbAndPerform (options, callback) ->
result =
"err" : ""
"status" : ""
"data" : ""
collection = options.collection
collection.update options.object.searchParameter, { $set: options.object.updatedValue }, (err, docs) ->
if err
result.status = false
else
result.status = true
result.err = err
result.data = docs
callback result
return
,
options, callback
return
exports.deleteExpiredJobsCreatedStatusCollectionEntries = (options, callback) ->
console.log "calling deleteExpiredJobsCreatedStatusCollectionEntries"
options.collection = "jobscreatedstatus"
connectToMongodbAndPerform (options, callback) ->
result =
"err" : null
"status" : true
"data" : ""
collection = options.collection
collection.find({}).toArray (err, results) ->
if !err && results.length > 0
counter = 0;
totalCount = results.length
deletedEntriesCount = 0;
entriesToBeDeletedCount = 0;
for jobEntry in results
#if jobEntry before Today then delete the job entry
counter++
now = new Date()
now.setHours(now.getHours() - 24)
if new Date(jobEntry.date) < now
console.log "job created status date", new Date(jobEntry.date), "now", new Date()
entriesToBeDeletedCount++
collection.remove jobEntry, (err, results) ->
console.log result, "removed job entry"
if results.data == 1
deletedEntriesCount++
if totalCount == counter
if entriesToBeDeletedCount != deletedEntriesCount
result =
"err":
"message": "not all entries deleted"
"status" : false
callback result
return
return
return
,
options,callback
return
exports.deleteEntriesFromJobsCollectionWithStatusFinished = (options, callback) ->
console.log "calling deleteEntriesFromJobsCollectionWithStatusFinished"
options.collection = "jobs"
connectToMongodbAndPerform (options, callback) ->
result =
"err" : null
"status" : true
"data" : ""
collection = options.collection
collection.find({}).toArray (err, results) ->
if !err && results.length > 0
counter = 0;
totalCount = results.length
deletedEntriesCount = 0;
entriesToBeDeletedCount = 0;
for jobEntry in results
#if jobEntry before Today then delete the job entry
counter++
if jobEntry.status == "finished"
entriesToBeDeletedCount++
collection.remove jobEntry, (err, results) ->
console.log result, "removed job entry"
if results.data == 1
deletedEntriesCount++
if totalCount == counter
if deletedEntriesCount < entriesToBeDeletedCount
result =
"err":
"message": "not all entries deleted with status finished"
"status" : false
else
result.data = 1
callback result
return
return
return
,
options,callback
return
|
[
{
"context": "tions due to usage of `await` or `yield`\n# @author Teddy Katz\n###\n'use strict'\n\n#------------------------------",
"end": 130,
"score": 0.9998606443405151,
"start": 120,
"tag": "NAME",
"value": "Teddy Katz"
}
] | src/tests/rules/require-atomic-updates.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview disallow assignments that can lead to race conditions due to usage of `await` or `yield`
# @author Teddy Katz
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/require-atomic-updates'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
VARIABLE_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo'
type: 'AssignmentExpression'
STATIC_PROPERTY_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo.bar'
type: 'AssignmentExpression'
COMPUTED_PROPERTY_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo[bar].baz'
type: 'AssignmentExpression'
ruleTester.run 'require-atomic-updates', rule,
valid: [
'''
foo = null
x = ->
await y
foo += bar
'''
'''
foo = null
x = ->
await y
foo = foo + bar
'''
'''
foo = null
x = ->
foo = await bar + foo
'''
'''
->
foo = null
foo += await bar
'''
'''
foo = null
->
foo = (await result)(foo)
'''
'''
foo = null
->
foo = bar(await something, foo)
'''
'''
->
foo = null
foo += yield bar
'''
'''
foo = {}
->
foo.bar = await baz
'''
'''
foo = []
->
await y
foo[x] += 1
'''
'''
foo = null
->
yield
foo = bar + foo
'''
'''
->
foo = null
bar(() => baz += 1)
foo += await amount
'''
'''
foo = null
->
foo = if condition then foo else await bar
'''
]
invalid: [
code: '''
foo = null
-> foo += await amount
'''
errors: [messageId: 'nonAtomicUpdate', data: value: 'foo']
,
code: '''
foo = null
->
while condition
foo += await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + (if bar then baz else await amount)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + (if bar then await amount else baz)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = if condition then foo + await amount else somethingElse
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = (if condition then foo else await bar) + await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo += bar + await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
->
foo = null
bar () => foo
foo += await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo += yield baz
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = bar(foo, await something)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = {}
-> foo.bar += await baz
'''
errors: [STATIC_PROPERTY_ERROR]
,
code: '''
foo = []
-> foo[bar].baz += await result
'''
errors: [COMPUTED_PROPERTY_ERROR]
,
code: '''
foo = null
-> foo = (yield foo) + await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + await result(foo)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = await result(foo, await somethingElse)
'''
errors: [VARIABLE_ERROR]
,
code: '''
->
foo = null
yield -> foo += await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = await foo + (yield bar)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = bar + await foo
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = {}
-> foo[bar].baz = await (foo.bar += await foo[bar].baz)
'''
errors: [COMPUTED_PROPERTY_ERROR, STATIC_PROPERTY_ERROR]
,
code: '-> foo += await bar'
errors: [VARIABLE_ERROR]
]
| 144251 | ###*
# @fileoverview disallow assignments that can lead to race conditions due to usage of `await` or `yield`
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/require-atomic-updates'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
VARIABLE_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo'
type: 'AssignmentExpression'
STATIC_PROPERTY_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo.bar'
type: 'AssignmentExpression'
COMPUTED_PROPERTY_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo[bar].baz'
type: 'AssignmentExpression'
ruleTester.run 'require-atomic-updates', rule,
valid: [
'''
foo = null
x = ->
await y
foo += bar
'''
'''
foo = null
x = ->
await y
foo = foo + bar
'''
'''
foo = null
x = ->
foo = await bar + foo
'''
'''
->
foo = null
foo += await bar
'''
'''
foo = null
->
foo = (await result)(foo)
'''
'''
foo = null
->
foo = bar(await something, foo)
'''
'''
->
foo = null
foo += yield bar
'''
'''
foo = {}
->
foo.bar = await baz
'''
'''
foo = []
->
await y
foo[x] += 1
'''
'''
foo = null
->
yield
foo = bar + foo
'''
'''
->
foo = null
bar(() => baz += 1)
foo += await amount
'''
'''
foo = null
->
foo = if condition then foo else await bar
'''
]
invalid: [
code: '''
foo = null
-> foo += await amount
'''
errors: [messageId: 'nonAtomicUpdate', data: value: 'foo']
,
code: '''
foo = null
->
while condition
foo += await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + (if bar then baz else await amount)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + (if bar then await amount else baz)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = if condition then foo + await amount else somethingElse
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = (if condition then foo else await bar) + await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo += bar + await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
->
foo = null
bar () => foo
foo += await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo += yield baz
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = bar(foo, await something)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = {}
-> foo.bar += await baz
'''
errors: [STATIC_PROPERTY_ERROR]
,
code: '''
foo = []
-> foo[bar].baz += await result
'''
errors: [COMPUTED_PROPERTY_ERROR]
,
code: '''
foo = null
-> foo = (yield foo) + await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + await result(foo)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = await result(foo, await somethingElse)
'''
errors: [VARIABLE_ERROR]
,
code: '''
->
foo = null
yield -> foo += await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = await foo + (yield bar)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = bar + await foo
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = {}
-> foo[bar].baz = await (foo.bar += await foo[bar].baz)
'''
errors: [COMPUTED_PROPERTY_ERROR, STATIC_PROPERTY_ERROR]
,
code: '-> foo += await bar'
errors: [VARIABLE_ERROR]
]
| true | ###*
# @fileoverview disallow assignments that can lead to race conditions due to usage of `await` or `yield`
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/require-atomic-updates'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
VARIABLE_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo'
type: 'AssignmentExpression'
STATIC_PROPERTY_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo.bar'
type: 'AssignmentExpression'
COMPUTED_PROPERTY_ERROR =
messageId: 'nonAtomicUpdate'
data: value: 'foo[bar].baz'
type: 'AssignmentExpression'
ruleTester.run 'require-atomic-updates', rule,
valid: [
'''
foo = null
x = ->
await y
foo += bar
'''
'''
foo = null
x = ->
await y
foo = foo + bar
'''
'''
foo = null
x = ->
foo = await bar + foo
'''
'''
->
foo = null
foo += await bar
'''
'''
foo = null
->
foo = (await result)(foo)
'''
'''
foo = null
->
foo = bar(await something, foo)
'''
'''
->
foo = null
foo += yield bar
'''
'''
foo = {}
->
foo.bar = await baz
'''
'''
foo = []
->
await y
foo[x] += 1
'''
'''
foo = null
->
yield
foo = bar + foo
'''
'''
->
foo = null
bar(() => baz += 1)
foo += await amount
'''
'''
foo = null
->
foo = if condition then foo else await bar
'''
]
invalid: [
code: '''
foo = null
-> foo += await amount
'''
errors: [messageId: 'nonAtomicUpdate', data: value: 'foo']
,
code: '''
foo = null
->
while condition
foo += await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + (if bar then baz else await amount)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + (if bar then await amount else baz)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = if condition then foo + await amount else somethingElse
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = (if condition then foo else await bar) + await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo += bar + await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
->
foo = null
bar () => foo
foo += await amount
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo += yield baz
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = bar(foo, await something)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = {}
-> foo.bar += await baz
'''
errors: [STATIC_PROPERTY_ERROR]
,
code: '''
foo = []
-> foo[bar].baz += await result
'''
errors: [COMPUTED_PROPERTY_ERROR]
,
code: '''
foo = null
-> foo = (yield foo) + await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = foo + await result(foo)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = await result(foo, await somethingElse)
'''
errors: [VARIABLE_ERROR]
,
code: '''
->
foo = null
yield -> foo += await bar
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = await foo + (yield bar)
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = null
-> foo = bar + await foo
'''
errors: [VARIABLE_ERROR]
,
code: '''
foo = {}
-> foo[bar].baz = await (foo.bar += await foo[bar].baz)
'''
errors: [COMPUTED_PROPERTY_ERROR, STATIC_PROPERTY_ERROR]
,
code: '-> foo += await bar'
errors: [VARIABLE_ERROR]
]
|
[
{
"context": "uage: Indonesian (Bahasa Indonesia)\n# Translators: Hellstad\n\nid =\n\n add: \"tambah\"\n and: \"dan\"\n back: \"kemb",
"end": 65,
"score": 0.991890013217926,
"start": 57,
"tag": "NAME",
"value": "Hellstad"
},
{
"context": " and: \"dan\"\n back: \"kembali\"\n cha... | t9n/id.coffee | coWorkr-InSights/meteor-accounts-t9n | 80 | # Language: Indonesian (Bahasa Indonesia)
# Translators: Hellstad
id =
add: "tambah"
and: "dan"
back: "kembali"
changePassword: "Ganti Password"
choosePassword: "Masukkan Password"
clickAgree: "Dengan Anda mendaftar, Anda setuju dengan"
configure: "Mengaturkan"
createAccount: "Buat Account"
currentPassword: "Password Anda Saat Ini"
dontHaveAnAccount: "Tidak punya account?"
email: "Email"
emailAddress: "Alamat email"
emailResetLink: "Link untuk email reset"
forgotPassword: "Lupa password?"
ifYouAlreadyHaveAnAccount: "Jika Anda sudah punya akun"
newPassword: "Password Baru"
newPasswordAgain: "Password Baru (ulang)"
optional: "Opsional"
OR: "ATAU"
password: "Password"
passwordAgain: "Password (ulang)"
privacyPolicy: "Kebijakan Privasi"
remove: "hapus"
resetYourPassword: "Reset password anda"
setPassword: "Masukkan Password"
sign: "Sign"
signIn: "Sign In"
signin: "sign in"
signOut: "Sign Out"
signUp: "Mendaftar"
signupCode: "Kode Registrasi"
signUpWithYourEmailAddress: "Mendaftar dengan alamat email Anda"
terms: "Persyaratan Layanan"
updateYourPassword: "Perbarui password Anda"
username: "Username"
usernameOrEmail: "Username atau email"
with: "dengan"
info:
emailSent: "Email terkirim"
emailVerified: "Email diverifikasi"
passwordChanged: "Password terganti"
passwordReset: "Password direset"
error:
emailRequired: "Alamat email dibutuhkan."
minChar: "Minimum password 7 karakter."
pwdsDontMatch: "Password yang diulang tidak sama."
pwOneDigit: "Password harus ada minimum 1 angka."
pwOneLetter: "Password harus ada minimum 1 huruf."
signInRequired: "Anda harus sign in untuk melakukan itu."
signupCodeIncorrect: "Kode registrasi salah."
signupCodeRequired: "Kode registrasi dibutuhkan."
usernameIsEmail: "Username Anda tidak bisa sama dengan email address."
usernameRequired: "Username dibutuhkan."
accounts:
#---- accounts-base
#"@" + domain + " email required"
#"A login handler should return a result or undefined"
"Email already exists.": "Alamat email sudah dipakai."
"Email doesn't match the criteria.": "Alamat email tidak sesuai dengan kriteria."
"Invalid login token": "Login token tidak valid"
"Login forbidden": "Login dilarang"
#"Service " + options.service + " already configured"
"Service unknown": "Layanan unknown"
"Unrecognized options for login request": "Options tidak tersedia untuk permintaan login"
"User validation failed": "Validasi user gagal"
"Username already exists.": "Username sudah dipakai."
"You are not logged in.": "Anda belum login."
"You've been logged out by the server. Please log in again.": "Anda belum dilogout oleh server. Silahkan coba login lagi."
"Your session has expired. Please log in again.": "Session Anda telah kadaluarsa. Silahkan coba login lagi."
#---- accounts-oauth
"No matching login attempt found": "Usaha login tidak ditemukan."
#---- accounts-password-client
"Password is old. Please reset your password.": "Password Anda terlalu tua. Silahkan ganti password Anda."
#---- accounts-password
"Incorrect password": "Password salah"
"Invalid email": "Alamat email tidak valid"
"Must be logged in": "Anda harus login"
"Need to set a username or email": "Anda harus masukkan username atau email"
"old password format": "format password lama"
"Password may not be empty": "Password tidak boleh kosong"
"Signups forbidden": "Signup dilarang"
"Token expired": "Token telah kadaluarsa"
"Token has invalid email address": "Token memberikan alamat email yang tidak valid"
"User has no password set": "User belum memasukkan password"
"User not found": "User tidak ditemukan"
"Verify email link expired": "Link untuk verifikasi alamat email telah kadaluarsa"
"Verify email link is for unknown address": "Link untuk verifikasi alamat email memberikan alamat email yang tidak dikenalkan"
#---- match
"Match failed": "Mencocokan gagal"
#---- Misc...
"Unknown error": "Error tidak dikenalkan"
T9n?.map "id", id
module?.exports = id
| 34243 | # Language: Indonesian (Bahasa Indonesia)
# Translators: <NAME>
id =
add: "tambah"
and: "dan"
back: "kembali"
changePassword: "<PASSWORD>"
choosePassword: "<PASSWORD>"
clickAgree: "Dengan Anda mendaftar, Anda setuju dengan"
configure: "Mengaturkan"
createAccount: "Buat Account"
currentPassword: "<PASSWORD>"
dontHaveAnAccount: "Tidak punya account?"
email: "Email"
emailAddress: "Alamat email"
emailResetLink: "Link untuk email reset"
forgotPassword: "<PASSWORD>?"
ifYouAlreadyHaveAnAccount: "Jika Anda sudah punya akun"
newPassword: "<PASSWORD>"
newPasswordAgain: "<PASSWORD>)"
optional: "Opsional"
OR: "ATAU"
password: "<PASSWORD>"
passwordAgain: "<PASSWORD>)"
privacyPolicy: "Kebijakan Privasi"
remove: "hapus"
resetYourPassword: "<PASSWORD>"
setPassword: "<PASSWORD>"
sign: "Sign"
signIn: "Sign In"
signin: "sign in"
signOut: "Sign Out"
signUp: "Mendaftar"
signupCode: "Kode Registrasi"
signUpWithYourEmailAddress: "Mendaftar dengan alamat email Anda"
terms: "Persyaratan Layanan"
updateYourPassword: "<PASSWORD>"
username: "Username"
usernameOrEmail: "Username atau email"
with: "dengan"
info:
emailSent: "Email terkirim"
emailVerified: "Email diverifikasi"
passwordChanged: "<PASSWORD>"
passwordReset: "<PASSWORD>"
error:
emailRequired: "Alamat email dibutuhkan."
minChar: "Minimum password 7 karakter."
pwdsDontMatch: "Password yang diulang tidak sama."
pwOneDigit: "Password harus ada minimum 1 angka."
pwOneLetter: "Password harus ada minimum 1 huruf."
signInRequired: "Anda harus sign in untuk melakukan itu."
signupCodeIncorrect: "Kode registrasi salah."
signupCodeRequired: "Kode registrasi dibutuhkan."
usernameIsEmail: "Username Anda tidak bisa sama dengan email address."
usernameRequired: "Username dibutuhkan."
accounts:
#---- accounts-base
#"@" + domain + " email required"
#"A login handler should return a result or undefined"
"Email already exists.": "Alamat email sudah dipakai."
"Email doesn't match the criteria.": "Alamat email tidak sesuai dengan kriteria."
"Invalid login token": "Login token tidak valid"
"Login forbidden": "Login dilarang"
#"Service " + options.service + " already configured"
"Service unknown": "Layanan unknown"
"Unrecognized options for login request": "Options tidak tersedia untuk permintaan login"
"User validation failed": "Validasi user gagal"
"Username already exists.": "Username sudah dipakai."
"You are not logged in.": "Anda belum login."
"You've been logged out by the server. Please log in again.": "Anda belum dilogout oleh server. Silahkan coba login lagi."
"Your session has expired. Please log in again.": "Session Anda telah kadaluarsa. Silahkan coba login lagi."
#---- accounts-oauth
"No matching login attempt found": "Usaha login tidak ditemukan."
#---- accounts-password-client
"Password is old. Please reset your password.": "Password Anda terlalu tua. Silahkan ganti password Anda."
#---- accounts-password
"Incorrect password": "<PASSWORD>"
"Invalid email": "Alamat email tidak valid"
"Must be logged in": "Anda harus login"
"Need to set a username or email": "Anda harus masukkan username atau email"
"old password format": "format password lama"
"Password may not be empty": "Password tidak boleh kosong"
"Signups forbidden": "Signup dilarang"
"Token expired": "Token telah kadaluarsa"
"Token has invalid email address": "Token memberikan alamat email yang tidak valid"
"User has no password set": "<PASSWORD>"
"User not found": "User tidak ditemukan"
"Verify email link expired": "Link untuk verifikasi alamat email telah kadaluarsa"
"Verify email link is for unknown address": "Link untuk verifikasi alamat email memberikan alamat email yang tidak dikenalkan"
#---- match
"Match failed": "Mencocokan gagal"
#---- Misc...
"Unknown error": "Error tidak dikenalkan"
T9n?.map "id", id
module?.exports = id
| true | # Language: Indonesian (Bahasa Indonesia)
# Translators: PI:NAME:<NAME>END_PI
id =
add: "tambah"
and: "dan"
back: "kembali"
changePassword: "PI:PASSWORD:<PASSWORD>END_PI"
choosePassword: "PI:PASSWORD:<PASSWORD>END_PI"
clickAgree: "Dengan Anda mendaftar, Anda setuju dengan"
configure: "Mengaturkan"
createAccount: "Buat Account"
currentPassword: "PI:PASSWORD:<PASSWORD>END_PI"
dontHaveAnAccount: "Tidak punya account?"
email: "Email"
emailAddress: "Alamat email"
emailResetLink: "Link untuk email reset"
forgotPassword: "PI:PASSWORD:<PASSWORD>END_PI?"
ifYouAlreadyHaveAnAccount: "Jika Anda sudah punya akun"
newPassword: "PI:PASSWORD:<PASSWORD>END_PI"
newPasswordAgain: "PI:PASSWORD:<PASSWORD>END_PI)"
optional: "Opsional"
OR: "ATAU"
password: "PI:PASSWORD:<PASSWORD>END_PI"
passwordAgain: "PI:PASSWORD:<PASSWORD>END_PI)"
privacyPolicy: "Kebijakan Privasi"
remove: "hapus"
resetYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
setPassword: "PI:PASSWORD:<PASSWORD>END_PI"
sign: "Sign"
signIn: "Sign In"
signin: "sign in"
signOut: "Sign Out"
signUp: "Mendaftar"
signupCode: "Kode Registrasi"
signUpWithYourEmailAddress: "Mendaftar dengan alamat email Anda"
terms: "Persyaratan Layanan"
updateYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
username: "Username"
usernameOrEmail: "Username atau email"
with: "dengan"
info:
emailSent: "Email terkirim"
emailVerified: "Email diverifikasi"
passwordChanged: "PI:PASSWORD:<PASSWORD>END_PI"
passwordReset: "PI:PASSWORD:<PASSWORD>END_PI"
error:
emailRequired: "Alamat email dibutuhkan."
minChar: "Minimum password 7 karakter."
pwdsDontMatch: "Password yang diulang tidak sama."
pwOneDigit: "Password harus ada minimum 1 angka."
pwOneLetter: "Password harus ada minimum 1 huruf."
signInRequired: "Anda harus sign in untuk melakukan itu."
signupCodeIncorrect: "Kode registrasi salah."
signupCodeRequired: "Kode registrasi dibutuhkan."
usernameIsEmail: "Username Anda tidak bisa sama dengan email address."
usernameRequired: "Username dibutuhkan."
accounts:
#---- accounts-base
#"@" + domain + " email required"
#"A login handler should return a result or undefined"
"Email already exists.": "Alamat email sudah dipakai."
"Email doesn't match the criteria.": "Alamat email tidak sesuai dengan kriteria."
"Invalid login token": "Login token tidak valid"
"Login forbidden": "Login dilarang"
#"Service " + options.service + " already configured"
"Service unknown": "Layanan unknown"
"Unrecognized options for login request": "Options tidak tersedia untuk permintaan login"
"User validation failed": "Validasi user gagal"
"Username already exists.": "Username sudah dipakai."
"You are not logged in.": "Anda belum login."
"You've been logged out by the server. Please log in again.": "Anda belum dilogout oleh server. Silahkan coba login lagi."
"Your session has expired. Please log in again.": "Session Anda telah kadaluarsa. Silahkan coba login lagi."
#---- accounts-oauth
"No matching login attempt found": "Usaha login tidak ditemukan."
#---- accounts-password-client
"Password is old. Please reset your password.": "Password Anda terlalu tua. Silahkan ganti password Anda."
#---- accounts-password
"Incorrect password": "PI:PASSWORD:<PASSWORD>END_PI"
"Invalid email": "Alamat email tidak valid"
"Must be logged in": "Anda harus login"
"Need to set a username or email": "Anda harus masukkan username atau email"
"old password format": "format password lama"
"Password may not be empty": "Password tidak boleh kosong"
"Signups forbidden": "Signup dilarang"
"Token expired": "Token telah kadaluarsa"
"Token has invalid email address": "Token memberikan alamat email yang tidak valid"
"User has no password set": "PI:PASSWORD:<PASSWORD>END_PI"
"User not found": "User tidak ditemukan"
"Verify email link expired": "Link untuk verifikasi alamat email telah kadaluarsa"
"Verify email link is for unknown address": "Link untuk verifikasi alamat email memberikan alamat email yang tidak dikenalkan"
#---- match
"Match failed": "Mencocokan gagal"
#---- Misc...
"Unknown error": "Error tidak dikenalkan"
T9n?.map "id", id
module?.exports = id
|
[
{
"context": "# Coffeebar\n# ---------\n# Copyright (c) 2013 Charles Moncrief <cmoncrief@gmail.com>\n#\n# MIT Licensed\n\n# Coffeeb",
"end": 61,
"score": 0.9998130798339844,
"start": 45,
"tag": "NAME",
"value": "Charles Moncrief"
},
{
"context": " ---------\n# Copyright (c) 2013 Char... | src/coffeebar.coffee | jalavosus/coffeebar | 4 | # Coffeebar
# ---------
# Copyright (c) 2013 Charles Moncrief <cmoncrief@gmail.com>
#
# MIT Licensed
# Coffeebar is a minimalistic CoffeeScript build utility. It supports file
# watching, minification and concatenation of source files. Coffeebar is
# available as a command line utility and can also be used directly from the
# public API.
# External dependencies.
fs = require 'fs'
path = require 'path'
beholder = require 'beholder'
coffee = require 'coffee-script'
glob = require 'glob'
mkdirp = require 'mkdirp'
xcolor = require 'xcolor'
Source = require './source'
sourcemap = require 'source-map'
# Valid CoffeeScript file extentsions
exts = ['coffee', 'litcoffee', 'coffee.md']
# The Coffebar class is the main entry point of the API. Creating a new
# instance will initialize and kick off a build.
class Coffeebar
# Initialization
# --------------
# Initialize the default options and the color scheme. The join option is
# implied rather then specific. Once initial setup is completed, kick off
# the initial build.
constructor: (@inputPaths, @options = {}) ->
@sources = []
@options.watch ?= false
@options.silent ?= true
@options.minify ?= false
@options.sourceMap ?= false
@options.sourceMap = false if @options.minify
@options.join = true if @options.output and path.extname(@options.output)
@options.bare ?= false
@options.header ?= true
@initColors()
@initPaths()
@start()
# Prepare the specified input paths to be scanned by glob by assuming that
# we actually want to compile the entire directory tree that was passed in,
# unless an actual filename was passed in.
initPaths: ->
unless Array.isArray(@inputPaths) then @inputPaths = [@inputPaths]
for inputPath, i in @inputPaths
inputPath = path.normalize inputPath
unless path.extname(inputPath)
@inputPaths[i] = "#{inputPath}/**/*.{#{exts}}"
# Find all the src files in the input trees and create a new representation
# of them via the Source class.
addSources: ->
for inputPath in @inputPaths
files = glob.sync inputPath
@sources.push(new Source(@options, file, inputPath)) for file in files
# Start-up the initial process by adding the sources, building them,
# and starting a watch on them if specified. This is only called once,
# subsequent builds will be called directly from the watch process.
start: ->
@addSources()
@build()
if @options.watch
@watch i for i in @inputPaths
# Build
# -----
# Compile and write out all of the sources in our collection, transforming
# and reporting errors along the way.
build: ->
@offsetSources()
@compileSources()
@mapSources() if @options.sourceMap
@minifySources() if @options.minify
@reportErrors()
@writeSources()
@writeJoinSources() if @options.sourceMap and @options.join
# Compile each source in the collection if it has been updated
# more recently than the last time it was written out. If this
# build is targetting a joined file, join all of the sources
# prior to compilation.
compileSources: ->
@outputs = if @options.join then @joinSources() else @sources
source.compile() for source in @outputs when source.updated
# Minify each source in the collection if it was compiled without
# errors more recently than it was written out.
minifySources: ->
source.minify() for source in @outputs when source.outputReady()
# Source maps
# -----------
# Append the source map comments to each output file. In the case of
# a joined file, we take the mappings that came out of the compiler
# and remap them to the original source files, rather than the joined
# src file that we sent to the compiler.
mapSources: ->
unless @options.join
source.writeMapComment() for source in @sources
return
return unless @outputs[0].sourceMap
smOld = new sourcemap.SourceMapConsumer @outputs[0].sourceMap
smNew = new sourcemap.SourceMapGenerator {file: smOld.file, sourceRoot: "#{path.basename(@options.output, '.js')}_mapsrc"}
smOld.eachMapping (map) => smNew.addMapping(@offsetMapping map)
@outputs[0].writeMapComment smNew.toString()
# After compilation, report each error that was logged. In the event
# that this is a joined output file, use the line number offset to
# detect which input file the error actually occurred in.
reportErrors: ->
if @options.join and @outputs[0].error
source = @getOriginalSource @outputs[0].errorLine
@outputs[0].errorLine = @outputs[0].errorLine - source.offset
@outputs[0].errorFile = source.file
source.reportError() for source in @outputs when source.error
# Write out each source in the collection if it was compiled without
# error more recently than it was written out.
writeSources: ->
source.write() for source in @outputs when source.outputReady()
# After writing out a joined output file with source maps enabled we
# write out a special mapping directory next to it that contains all of
# the original source files. The source map itself points to this directory
# rather than the original files.
writeJoinSources: ->
outputPath = path.join path.dirname(@options.output), "#{path.basename(@options.output, '.js')}_mapsrc"
source.writeSource(outputPath) for source in @sources when source.outputReady()
# Watch
# -----
# Watch an input path for changes, additions and removals. When
# an event is triggered, add or remove the source and kick off
# a build.
watch: (inputPath) ->
watcher = beholder inputPath
watcher.on 'change', (file) =>
source = @getSource file
source.read()
@build()
watcher.on 'new', (file) =>
@sources.push(new Source(@options, file))
@build()
watcher.on 'remove', (file) =>
@sources = (i for i in @sources when i.file isnt file)
@build() if @options.join
# Utilities
#----------
# Record the line offsets for each source file for later use.
offsetSources: ->
offset = 0
for source in @sources
source.offset = offset
offset += source.lines
# Retrieves the original source from a joined line number.
getOriginalSource: (line) ->
for source in @sources
return source if source.offset + source.lines > line
# Remaps a source mappping from the joined version to the original files.
offsetMapping: (map) ->
source = @getOriginalSource map.originalLine - 1
newMap =
generated: {line: map.generatedLine, column: map.generatedColumn}
original: {line: map.originalLine - source.offset, column: map.originalColumn}
source: source.file
# Join all sources by concatenating the input src code and return
# an array with only the newly joined source element for output.
joinSources: ->
joinSrc = ""
joinSrc = joinSrc.concat(i.src + "\n") for i in @sources
joinSource = new Source(@options)
joinSource.src = joinSrc
joinSource.outputPath = @options.output
[joinSource]
# Retrieves the source in our collection by file name.
getSource: (file) ->
return i for i in @sources when i.file is file
# Initialize our CLI theme with some sharp looking colors.
initColors: ->
xcolor.addStyle coffee : 'chocolate'
xcolor.addStyle boldCoffee : ['bold', 'chocolate']
xcolor.addStyle error : 'crimson'
# Exports
# -------
# Export a new instance of Coffeebar.
module.exports = (inputPaths, options) ->
new Coffeebar inputPaths, options
| 150533 | # Coffeebar
# ---------
# Copyright (c) 2013 <NAME> <<EMAIL>>
#
# MIT Licensed
# Coffeebar is a minimalistic CoffeeScript build utility. It supports file
# watching, minification and concatenation of source files. Coffeebar is
# available as a command line utility and can also be used directly from the
# public API.
# External dependencies.
fs = require 'fs'
path = require 'path'
beholder = require 'beholder'
coffee = require 'coffee-script'
glob = require 'glob'
mkdirp = require 'mkdirp'
xcolor = require 'xcolor'
Source = require './source'
sourcemap = require 'source-map'
# Valid CoffeeScript file extentsions
exts = ['coffee', 'litcoffee', 'coffee.md']
# The Coffebar class is the main entry point of the API. Creating a new
# instance will initialize and kick off a build.
class Coffeebar
# Initialization
# --------------
# Initialize the default options and the color scheme. The join option is
# implied rather then specific. Once initial setup is completed, kick off
# the initial build.
constructor: (@inputPaths, @options = {}) ->
@sources = []
@options.watch ?= false
@options.silent ?= true
@options.minify ?= false
@options.sourceMap ?= false
@options.sourceMap = false if @options.minify
@options.join = true if @options.output and path.extname(@options.output)
@options.bare ?= false
@options.header ?= true
@initColors()
@initPaths()
@start()
# Prepare the specified input paths to be scanned by glob by assuming that
# we actually want to compile the entire directory tree that was passed in,
# unless an actual filename was passed in.
initPaths: ->
unless Array.isArray(@inputPaths) then @inputPaths = [@inputPaths]
for inputPath, i in @inputPaths
inputPath = path.normalize inputPath
unless path.extname(inputPath)
@inputPaths[i] = "#{inputPath}/**/*.{#{exts}}"
# Find all the src files in the input trees and create a new representation
# of them via the Source class.
addSources: ->
for inputPath in @inputPaths
files = glob.sync inputPath
@sources.push(new Source(@options, file, inputPath)) for file in files
# Start-up the initial process by adding the sources, building them,
# and starting a watch on them if specified. This is only called once,
# subsequent builds will be called directly from the watch process.
start: ->
@addSources()
@build()
if @options.watch
@watch i for i in @inputPaths
# Build
# -----
# Compile and write out all of the sources in our collection, transforming
# and reporting errors along the way.
build: ->
@offsetSources()
@compileSources()
@mapSources() if @options.sourceMap
@minifySources() if @options.minify
@reportErrors()
@writeSources()
@writeJoinSources() if @options.sourceMap and @options.join
# Compile each source in the collection if it has been updated
# more recently than the last time it was written out. If this
# build is targetting a joined file, join all of the sources
# prior to compilation.
compileSources: ->
@outputs = if @options.join then @joinSources() else @sources
source.compile() for source in @outputs when source.updated
# Minify each source in the collection if it was compiled without
# errors more recently than it was written out.
minifySources: ->
source.minify() for source in @outputs when source.outputReady()
# Source maps
# -----------
# Append the source map comments to each output file. In the case of
# a joined file, we take the mappings that came out of the compiler
# and remap them to the original source files, rather than the joined
# src file that we sent to the compiler.
mapSources: ->
unless @options.join
source.writeMapComment() for source in @sources
return
return unless @outputs[0].sourceMap
smOld = new sourcemap.SourceMapConsumer @outputs[0].sourceMap
smNew = new sourcemap.SourceMapGenerator {file: smOld.file, sourceRoot: "#{path.basename(@options.output, '.js')}_mapsrc"}
smOld.eachMapping (map) => smNew.addMapping(@offsetMapping map)
@outputs[0].writeMapComment smNew.toString()
# After compilation, report each error that was logged. In the event
# that this is a joined output file, use the line number offset to
# detect which input file the error actually occurred in.
reportErrors: ->
if @options.join and @outputs[0].error
source = @getOriginalSource @outputs[0].errorLine
@outputs[0].errorLine = @outputs[0].errorLine - source.offset
@outputs[0].errorFile = source.file
source.reportError() for source in @outputs when source.error
# Write out each source in the collection if it was compiled without
# error more recently than it was written out.
writeSources: ->
source.write() for source in @outputs when source.outputReady()
# After writing out a joined output file with source maps enabled we
# write out a special mapping directory next to it that contains all of
# the original source files. The source map itself points to this directory
# rather than the original files.
writeJoinSources: ->
outputPath = path.join path.dirname(@options.output), "#{path.basename(@options.output, '.js')}_mapsrc"
source.writeSource(outputPath) for source in @sources when source.outputReady()
# Watch
# -----
# Watch an input path for changes, additions and removals. When
# an event is triggered, add or remove the source and kick off
# a build.
watch: (inputPath) ->
watcher = beholder inputPath
watcher.on 'change', (file) =>
source = @getSource file
source.read()
@build()
watcher.on 'new', (file) =>
@sources.push(new Source(@options, file))
@build()
watcher.on 'remove', (file) =>
@sources = (i for i in @sources when i.file isnt file)
@build() if @options.join
# Utilities
#----------
# Record the line offsets for each source file for later use.
offsetSources: ->
offset = 0
for source in @sources
source.offset = offset
offset += source.lines
# Retrieves the original source from a joined line number.
getOriginalSource: (line) ->
for source in @sources
return source if source.offset + source.lines > line
# Remaps a source mappping from the joined version to the original files.
offsetMapping: (map) ->
source = @getOriginalSource map.originalLine - 1
newMap =
generated: {line: map.generatedLine, column: map.generatedColumn}
original: {line: map.originalLine - source.offset, column: map.originalColumn}
source: source.file
# Join all sources by concatenating the input src code and return
# an array with only the newly joined source element for output.
joinSources: ->
joinSrc = ""
joinSrc = joinSrc.concat(i.src + "\n") for i in @sources
joinSource = new Source(@options)
joinSource.src = joinSrc
joinSource.outputPath = @options.output
[joinSource]
# Retrieves the source in our collection by file name.
getSource: (file) ->
return i for i in @sources when i.file is file
# Initialize our CLI theme with some sharp looking colors.
initColors: ->
xcolor.addStyle coffee : 'chocolate'
xcolor.addStyle boldCoffee : ['bold', 'chocolate']
xcolor.addStyle error : 'crimson'
# Exports
# -------
# Export a new instance of Coffeebar.
module.exports = (inputPaths, options) ->
new Coffeebar inputPaths, options
| true | # Coffeebar
# ---------
# Copyright (c) 2013 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# MIT Licensed
# Coffeebar is a minimalistic CoffeeScript build utility. It supports file
# watching, minification and concatenation of source files. Coffeebar is
# available as a command line utility and can also be used directly from the
# public API.
# External dependencies.
fs = require 'fs'
path = require 'path'
beholder = require 'beholder'
coffee = require 'coffee-script'
glob = require 'glob'
mkdirp = require 'mkdirp'
xcolor = require 'xcolor'
Source = require './source'
sourcemap = require 'source-map'
# Valid CoffeeScript file extentsions
exts = ['coffee', 'litcoffee', 'coffee.md']
# The Coffebar class is the main entry point of the API. Creating a new
# instance will initialize and kick off a build.
class Coffeebar
# Initialization
# --------------
# Initialize the default options and the color scheme. The join option is
# implied rather then specific. Once initial setup is completed, kick off
# the initial build.
constructor: (@inputPaths, @options = {}) ->
@sources = []
@options.watch ?= false
@options.silent ?= true
@options.minify ?= false
@options.sourceMap ?= false
@options.sourceMap = false if @options.minify
@options.join = true if @options.output and path.extname(@options.output)
@options.bare ?= false
@options.header ?= true
@initColors()
@initPaths()
@start()
# Prepare the specified input paths to be scanned by glob by assuming that
# we actually want to compile the entire directory tree that was passed in,
# unless an actual filename was passed in.
initPaths: ->
unless Array.isArray(@inputPaths) then @inputPaths = [@inputPaths]
for inputPath, i in @inputPaths
inputPath = path.normalize inputPath
unless path.extname(inputPath)
@inputPaths[i] = "#{inputPath}/**/*.{#{exts}}"
# Find all the src files in the input trees and create a new representation
# of them via the Source class.
addSources: ->
for inputPath in @inputPaths
files = glob.sync inputPath
@sources.push(new Source(@options, file, inputPath)) for file in files
# Start-up the initial process by adding the sources, building them,
# and starting a watch on them if specified. This is only called once,
# subsequent builds will be called directly from the watch process.
start: ->
@addSources()
@build()
if @options.watch
@watch i for i in @inputPaths
# Build
# -----
# Compile and write out all of the sources in our collection, transforming
# and reporting errors along the way.
build: ->
@offsetSources()
@compileSources()
@mapSources() if @options.sourceMap
@minifySources() if @options.minify
@reportErrors()
@writeSources()
@writeJoinSources() if @options.sourceMap and @options.join
# Compile each source in the collection if it has been updated
# more recently than the last time it was written out. If this
# build is targetting a joined file, join all of the sources
# prior to compilation.
compileSources: ->
@outputs = if @options.join then @joinSources() else @sources
source.compile() for source in @outputs when source.updated
# Minify each source in the collection if it was compiled without
# errors more recently than it was written out.
minifySources: ->
source.minify() for source in @outputs when source.outputReady()
# Source maps
# -----------
# Append the source map comments to each output file. In the case of
# a joined file, we take the mappings that came out of the compiler
# and remap them to the original source files, rather than the joined
# src file that we sent to the compiler.
mapSources: ->
unless @options.join
source.writeMapComment() for source in @sources
return
return unless @outputs[0].sourceMap
smOld = new sourcemap.SourceMapConsumer @outputs[0].sourceMap
smNew = new sourcemap.SourceMapGenerator {file: smOld.file, sourceRoot: "#{path.basename(@options.output, '.js')}_mapsrc"}
smOld.eachMapping (map) => smNew.addMapping(@offsetMapping map)
@outputs[0].writeMapComment smNew.toString()
# After compilation, report each error that was logged. In the event
# that this is a joined output file, use the line number offset to
# detect which input file the error actually occurred in.
reportErrors: ->
if @options.join and @outputs[0].error
source = @getOriginalSource @outputs[0].errorLine
@outputs[0].errorLine = @outputs[0].errorLine - source.offset
@outputs[0].errorFile = source.file
source.reportError() for source in @outputs when source.error
# Write out each source in the collection if it was compiled without
# error more recently than it was written out.
writeSources: ->
source.write() for source in @outputs when source.outputReady()
# After writing out a joined output file with source maps enabled we
# write out a special mapping directory next to it that contains all of
# the original source files. The source map itself points to this directory
# rather than the original files.
writeJoinSources: ->
outputPath = path.join path.dirname(@options.output), "#{path.basename(@options.output, '.js')}_mapsrc"
source.writeSource(outputPath) for source in @sources when source.outputReady()
# Watch
# -----
# Watch an input path for changes, additions and removals. When
# an event is triggered, add or remove the source and kick off
# a build.
watch: (inputPath) ->
watcher = beholder inputPath
watcher.on 'change', (file) =>
source = @getSource file
source.read()
@build()
watcher.on 'new', (file) =>
@sources.push(new Source(@options, file))
@build()
watcher.on 'remove', (file) =>
@sources = (i for i in @sources when i.file isnt file)
@build() if @options.join
# Utilities
#----------
# Record the line offsets for each source file for later use.
offsetSources: ->
offset = 0
for source in @sources
source.offset = offset
offset += source.lines
# Retrieves the original source from a joined line number.
getOriginalSource: (line) ->
for source in @sources
return source if source.offset + source.lines > line
# Remaps a source mappping from the joined version to the original files.
offsetMapping: (map) ->
source = @getOriginalSource map.originalLine - 1
newMap =
generated: {line: map.generatedLine, column: map.generatedColumn}
original: {line: map.originalLine - source.offset, column: map.originalColumn}
source: source.file
# Join all sources by concatenating the input src code and return
# an array with only the newly joined source element for output.
joinSources: ->
joinSrc = ""
joinSrc = joinSrc.concat(i.src + "\n") for i in @sources
joinSource = new Source(@options)
joinSource.src = joinSrc
joinSource.outputPath = @options.output
[joinSource]
# Retrieves the source in our collection by file name.
getSource: (file) ->
return i for i in @sources when i.file is file
# Initialize our CLI theme with some sharp looking colors.
initColors: ->
xcolor.addStyle coffee : 'chocolate'
xcolor.addStyle boldCoffee : ['bold', 'chocolate']
xcolor.addStyle error : 'crimson'
# Exports
# -------
# Export a new instance of Coffeebar.
module.exports = (inputPaths, options) ->
new Coffeebar inputPaths, options
|
[
{
"context": "### ^\nBSD 3-Clause License\n\nCopyright (c) 2017, Stephan Jorek\nAll rights reserved.\n\nRedistribution and use in s",
"end": 61,
"score": 0.9998364448547363,
"start": 48,
"tag": "NAME",
"value": "Stephan Jorek"
}
] | src/Command.coffee | sjorek/goatee-rules.js | 0 | ### ^
BSD 3-Clause License
Copyright (c) 2017, Stephan Jorek
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
# External dependencies.
ScriptCommand = require 'goatee-script.js/lib/Command'
###
# # Commandline …
# ---------------
#
# … of the `goatee-rules` utility. Handles evaluation of
# statements or launches an interactive REPL.
###
###*
# -------------
# @class Command
# @namespace GoateeRules
###
class Command extends ScriptCommand
###*
# -------------
# @constructor
# @param {Function} [command=GoateeRules.GoateeRules] class function
###
constructor: (command = require('./GoateeRules')) ->
super(command)
module.exports = Command
| 148704 | ### ^
BSD 3-Clause License
Copyright (c) 2017, <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
# External dependencies.
ScriptCommand = require 'goatee-script.js/lib/Command'
###
# # Commandline …
# ---------------
#
# … of the `goatee-rules` utility. Handles evaluation of
# statements or launches an interactive REPL.
###
###*
# -------------
# @class Command
# @namespace GoateeRules
###
class Command extends ScriptCommand
###*
# -------------
# @constructor
# @param {Function} [command=GoateeRules.GoateeRules] class function
###
constructor: (command = require('./GoateeRules')) ->
super(command)
module.exports = Command
| true | ### ^
BSD 3-Clause License
Copyright (c) 2017, PI:NAME:<NAME>END_PI
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###
# External dependencies.
ScriptCommand = require 'goatee-script.js/lib/Command'
###
# # Commandline …
# ---------------
#
# … of the `goatee-rules` utility. Handles evaluation of
# statements or launches an interactive REPL.
###
###*
# -------------
# @class Command
# @namespace GoateeRules
###
class Command extends ScriptCommand
###*
# -------------
# @constructor
# @param {Function} [command=GoateeRules.GoateeRules] class function
###
constructor: (command = require('./GoateeRules')) ->
super(command)
module.exports = Command
|
[
{
"context": "1486033460000)\n nonce = '1048450919'\n secret = 'qwerty';\n datetime = HiveTaxi.util.date.rfc822(date)\n ",
"end": 1500,
"score": 0.9988561868667603,
"start": 1494,
"tag": "KEY",
"value": "qwerty"
},
{
"context": "(date)\n creds = null\n signature = 'N0RySpCZm7t7... | test/signers/v1.spec.coffee | HIVETAXI/hive.taxi.sdk.js | 0 | helpers = require('../helpers')
HiveTaxi = helpers.HiveTaxi
Operation = HiveTaxi.Model.Operation
svc = HiveTaxi.Protocol.RestJson
beforeEach ->
helpers.spyOn(HiveTaxi.util, 'userAgent').andReturn('hivetaxi-sdk-js/0.1')
buildRequest = ->
crt = new HiveTaxi.Contractor({region: 'region', endpoint: 'localhost', apiVersion: '1.0'})
req = crt.makeRequest('getEmployees', {contractor: 100000044547, groupId: '100000051646'})
req.build()
req.httpRequest.headers['X-Hive-User-Agent'] = 'hivetaxi-sdk-js/0.1'
req.httpRequest
buildSigner = (request, signatureCache) ->
if typeof signatureCache != 'boolean'
signatureCache = true
return new HiveTaxi.Signers.V1(request || buildRequest(), 'Contractor', signatureCache)
buildSignerFromService = (signatureCache) ->
if typeof signatureCache != 'boolean'
signatureCache = true
crt = new HiveTaxi.Contractor({region: 'region', endpoint: 'localhost', apiVersion: '1.0'})
signer = buildSigner(null, signatureCache)
signer.setServiceClientId(crt._clientId)
return signer
MockJSONRESTService = helpers.util.inherit HiveTaxi.Service,
endpointPrefix: 'mockservice'
operation = null
request = null
response = null
service = null
defop = (op) ->
helpers.util.property(service.api.operations, 'sampleOperation',
new Operation('sampleOperation', op, api: service.api))
build = -> svc.buildRequest(request); request
describe 'HiveTaxi.Signers.V1', ->
date = new Date(1486033460000)
nonce = '1048450919'
secret = 'qwerty';
datetime = HiveTaxi.util.date.rfc822(date)
creds = null
signature = 'N0RySpCZm7t72FjJJ6osHbUZrsASItIA2j3dO3/FBgE='
authorization = 'hmac admin:' + nonce + ':' + signature
signer = null
beforeEach ->
creds = accessKeyId: 'admin', secretAccessKey: secret, sessionToken: 'session'
signer = buildSigner()
signer.addAuthorization(creds, date, nonce)
describe 'constructor', ->
it 'can build a signer for a request object', ->
req = buildRequest()
signer = buildSigner(req)
expect(signer.request).to.equal(req)
describe 'addAuthorization', ->
headers = {
'Content-Type': 'application/json',
'Content-Length': 85,
# 'X-Hive-Target': 'Employees.getEmployees',
'Host': 'localhost',
'X-Hive-Date': datetime,
'x-hive-security-token' : 'session',
'Authentication' : authorization
}
for key, value of headers
func = (k) ->
it 'should add ' + k + ' header', ->
expect(signer.request.headers[k]).to.equal(headers[k])
func(key)
describe 'authorization', ->
it 'should return authorization part for signer', ->
expect(signer.authorization(creds, datetime)).to.equal(authorization)
describe 'signature', ->
it 'should generate proper signature', ->
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for text:// secret', ->
creds.secretAccessKey = 'text://qwerty'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for plain:// secret', ->
creds.secretAccessKey = 'plain://qwerty'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for plain-text:// secret', ->
creds.secretAccessKey = 'plain-text://qwerty'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for sha256-encoded secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = 'sha256://' + encoded_secret
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should generate proper signature for base64-encoded secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = 'base64://' + HiveTaxi.util.base64.encode(encoded_secret)
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should generate proper signature for b64-encoded (alias for base64) secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = 'b64://' + HiveTaxi.util.base64.encode(encoded_secret)
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should not compute HMAC more than once', ->
spy = helpers.spyOn(HiveTaxi.util.crypto, 'hmac').andCallThrough()
signer.signature(creds, datetime)
expect(spy.calls.length).to.eql(1)
describe 'caching', ->
hmacCallCount = null
hmacCalls = null
sha256CallCount = null
sha256Calls = null
beforeEach ->
helpers.spyOn(HiveTaxi.util.crypto, 'hmac')
helpers.spyOn(HiveTaxi.util.crypto, 'sha256')
signer.signature(creds, datetime)
hmacCalls = HiveTaxi.util.crypto.hmac.calls
hmacCallCount = hmacCalls.length
sha256Calls = HiveTaxi.util.crypto.sha256.calls
sha256CallCount = sha256Calls.length
it 'will cache a maximum of 50 clients', (done) ->
maxCacheEntries = 50
clientSigners = (buildSignerFromService() for i in [0..maxCacheEntries-1])
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
#Get signature for all clients to store them in cache
(clientSigners[i].signature(creds, datetime) for i in [0..clientSigners.length-1])
expect(hmacCalls.length).to.equal(hmacCallCount + maxCacheEntries)
expect(sha256Calls.length).to.equal(sha256CallCount + maxCacheEntries)
#Signer should use cache
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
clientSigners[0].signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
expect(sha256Calls.length).to.equal(sha256CallCount)
#add a new signer, pushing past cache limit
newestSigner = buildSignerFromService()
newestSigner.signature(creds, datetime)
#old signer shouldn't be using cache anymore
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
clientSigners[0].signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
done()
#Calling signer.signature should call hmac 1 time when caching, and 5 times when not caching
it 'caches subsequent requests', ->
signer.signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
signer.signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 2)
it 'busts cache if caching is disabled', ->
signer = buildSigner(null, false)
sha256CallCount = sha256Calls.length
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if region changes', ->
signer.request.region = 'new-region'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if service changes', ->
signer.serviceName = 'newService'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if access key changes', ->
creds.accessKeyId = 'NEWAKID'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if date changes', ->
newDate = new Date(date.getTime() + 1000000000)
newDatetime = HiveTaxi.util.date.rfc822(newDate)
signer.signature(creds, newDatetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'uses a different cache if client is different', ->
signer1 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
signer2 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer2.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'works when using the same client', ->
signer1 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
describe 'stringToSign', ->
it 'should sign correctly generated input string', ->
expect(signer.stringToSign(datetime, nonce)).to.equal 'POST/api/contractors/100000044547' + datetime + nonce
describe 'canonicalString', ->
beforeEach ->
MockJSONRESTService.prototype.api = new HiveTaxi.Model.Api
operations:
sampleOperation:
http:
method: 'POST'
uri: '/'
input:
type: 'structure'
members: {}
output:
type: 'structure'
members:
a: type: 'string'
b: type: 'string'
shapes:
structureshape:
type: 'structure'
members:
a: type: 'string'
b: type: 'string'
HiveTaxi.Service.defineMethods(MockJSONRESTService)
operation = MockJSONRESTService.prototype.api.operations.sampleOperation
service = new MockJSONRESTService(region: 'region')
request = new HiveTaxi.Request(service, 'sampleOperation')
response = new HiveTaxi.Response(request)
it 'sorts the search string', ->
request.params = query: 'foo', cursor: 'initial', queryOptions: '{}'
defop
http: requestUri: '/path?format=sdk&pretty=true'
input:
type: 'structure'
members:
query:
location: 'querystring'
locationName: 'q'
queryOptions:
location: 'querystring'
locationName: 'q.options'
cursor:
location: 'querystring'
locationName: 'cursor'
req = build()
signer = new HiveTaxi.Signers.V1(req.httpRequest, 'mockservice')
expect(signer.canonicalString().split('\n')[2]).to.equal('cursor=initial&format=sdk&pretty=true&q=foo&q.options=%7B%7D')
it 'double URI encodes paths', ->
request.params = ClientId: '111', AddressId: 'a:b:c'
defop
http: requestUri: '/client/{ClientId}/quick-address/{AddressId}/update'
input:
type: 'structure'
members:
ClientId:
location: 'uri'
locationName: 'ClientId'
AddressId:
location: 'uri'
locationName: 'AddressId'
req = build()
signer = new HiveTaxi.Signers.V1(req.httpRequest, 'mockservice')
expect(signer.canonicalString().split('\n')[1]).to.equal('/client/111/quick-address/a%253Ab%253Ac/update')
describe 'canonicalHeaders', ->
it 'should return headers', ->
expect(signer.canonicalHeaders()).to.eql [
'host:localhost',
# 'x-hive-content-sha256:c4f6509a8266e2e6bb3442091272baa28a8ac16de0821f3ffa2036c6cbd3bfba',
'x-hive-date:' + datetime,
'x-hive-security-token:session',
# 'x-hive-target:Employees.getEmployees',
# 'x-hive-user-agent:hivetaxi-sdk-js/0.1'
].join('\n')
it 'should ignore Authentication header', ->
signer.request.headers = {'Authentication': 'foo'}
expect(signer.canonicalHeaders()).to.equal('')
it 'should lowercase all header names (not values)', ->
signer.request.headers = {'FOO': 'BAR'}
expect(signer.canonicalHeaders()).to.equal('foo:BAR')
it 'should sort headers by key', ->
signer.request.headers = {abc: 'a', bca: 'b', Qux: 'c', bar: 'd'}
expect(signer.canonicalHeaders()).to.equal('abc:a\nbar:d\nbca:b\nqux:c')
it 'should compact multiple spaces in keys/values to a single space', ->
signer.request.headers = {'Header': 'Value with Multiple \t spaces'}
expect(signer.canonicalHeaders()).to.equal('header:Value with Multiple spaces')
it 'should strip starting and end of line spaces', ->
signer.request.headers = {'Header': ' \t Value \t '}
expect(signer.canonicalHeaders()).to.equal('header:Value')
| 222652 | helpers = require('../helpers')
HiveTaxi = helpers.HiveTaxi
Operation = HiveTaxi.Model.Operation
svc = HiveTaxi.Protocol.RestJson
beforeEach ->
helpers.spyOn(HiveTaxi.util, 'userAgent').andReturn('hivetaxi-sdk-js/0.1')
buildRequest = ->
crt = new HiveTaxi.Contractor({region: 'region', endpoint: 'localhost', apiVersion: '1.0'})
req = crt.makeRequest('getEmployees', {contractor: 100000044547, groupId: '100000051646'})
req.build()
req.httpRequest.headers['X-Hive-User-Agent'] = 'hivetaxi-sdk-js/0.1'
req.httpRequest
buildSigner = (request, signatureCache) ->
if typeof signatureCache != 'boolean'
signatureCache = true
return new HiveTaxi.Signers.V1(request || buildRequest(), 'Contractor', signatureCache)
buildSignerFromService = (signatureCache) ->
if typeof signatureCache != 'boolean'
signatureCache = true
crt = new HiveTaxi.Contractor({region: 'region', endpoint: 'localhost', apiVersion: '1.0'})
signer = buildSigner(null, signatureCache)
signer.setServiceClientId(crt._clientId)
return signer
MockJSONRESTService = helpers.util.inherit HiveTaxi.Service,
endpointPrefix: 'mockservice'
operation = null
request = null
response = null
service = null
defop = (op) ->
helpers.util.property(service.api.operations, 'sampleOperation',
new Operation('sampleOperation', op, api: service.api))
build = -> svc.buildRequest(request); request
describe 'HiveTaxi.Signers.V1', ->
date = new Date(1486033460000)
nonce = '1048450919'
secret = '<KEY>';
datetime = HiveTaxi.util.date.rfc822(date)
creds = null
signature = 'N0RySpCZm7t72<KEY>
authorization = 'hmac admin:' + nonce + ':' + signature
signer = null
beforeEach ->
creds = accessKeyId: '<KEY>', secretAccessKey: <KEY>, sessionToken: '<PASSWORD>'
signer = buildSigner()
signer.addAuthorization(creds, date, nonce)
describe 'constructor', ->
it 'can build a signer for a request object', ->
req = buildRequest()
signer = buildSigner(req)
expect(signer.request).to.equal(req)
describe 'addAuthorization', ->
headers = {
'Content-Type': 'application/json',
'Content-Length': 85,
# 'X-Hive-Target': 'Employees.getEmployees',
'Host': 'localhost',
'X-Hive-Date': datetime,
'x-hive-security-token' : 'session',
'Authentication' : authorization
}
for key, value of headers
func = (k) ->
it 'should add ' + k + ' header', ->
expect(signer.request.headers[k]).to.equal(headers[k])
func(key)
describe 'authorization', ->
it 'should return authorization part for signer', ->
expect(signer.authorization(creds, datetime)).to.equal(authorization)
describe 'signature', ->
it 'should generate proper signature', ->
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for text:// secret', ->
creds.secretAccessKey = 'text://<KEY>'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for plain:// secret', ->
creds.secretAccessKey = 'plain://<KEY>'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for plain-text:// secret', ->
creds.secretAccessKey = 'plain-<KEY>'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for sha256-encoded secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = 'sha<KEY>://' + encoded_secret
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should generate proper signature for base64-encoded secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = '<KEY>' + HiveTaxi.util.base64.encode(encoded_secret)
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should generate proper signature for b64-encoded (alias for base64) secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = '<KEY>' + HiveTaxi.util.base64.encode(encoded_secret)
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should not compute HMAC more than once', ->
spy = helpers.spyOn(HiveTaxi.util.crypto, 'hmac').andCallThrough()
signer.signature(creds, datetime)
expect(spy.calls.length).to.eql(1)
describe 'caching', ->
hmacCallCount = null
hmacCalls = null
sha256CallCount = null
sha256Calls = null
beforeEach ->
helpers.spyOn(HiveTaxi.util.crypto, 'hmac')
helpers.spyOn(HiveTaxi.util.crypto, 'sha256')
signer.signature(creds, datetime)
hmacCalls = HiveTaxi.util.crypto.hmac.calls
hmacCallCount = hmacCalls.length
sha256Calls = HiveTaxi.util.crypto.sha256.calls
sha256CallCount = sha256Calls.length
it 'will cache a maximum of 50 clients', (done) ->
maxCacheEntries = 50
clientSigners = (buildSignerFromService() for i in [0..maxCacheEntries-1])
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
#Get signature for all clients to store them in cache
(clientSigners[i].signature(creds, datetime) for i in [0..clientSigners.length-1])
expect(hmacCalls.length).to.equal(hmacCallCount + maxCacheEntries)
expect(sha256Calls.length).to.equal(sha256CallCount + maxCacheEntries)
#Signer should use cache
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
clientSigners[0].signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
expect(sha256Calls.length).to.equal(sha256CallCount)
#add a new signer, pushing past cache limit
newestSigner = buildSignerFromService()
newestSigner.signature(creds, datetime)
#old signer shouldn't be using cache anymore
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
clientSigners[0].signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
done()
#Calling signer.signature should call hmac 1 time when caching, and 5 times when not caching
it 'caches subsequent requests', ->
signer.signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
signer.signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 2)
it 'busts cache if caching is disabled', ->
signer = buildSigner(null, false)
sha256CallCount = sha256Calls.length
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if region changes', ->
signer.request.region = 'new-region'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if service changes', ->
signer.serviceName = 'newService'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if access key changes', ->
creds.accessKeyId = '<KEY>'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if date changes', ->
newDate = new Date(date.getTime() + 1000000000)
newDatetime = HiveTaxi.util.date.rfc822(newDate)
signer.signature(creds, newDatetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'uses a different cache if client is different', ->
signer1 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
signer2 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer2.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'works when using the same client', ->
signer1 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
describe 'stringToSign', ->
it 'should sign correctly generated input string', ->
expect(signer.stringToSign(datetime, nonce)).to.equal 'POST/api/contractors/100000044547' + datetime + nonce
describe 'canonicalString', ->
beforeEach ->
MockJSONRESTService.prototype.api = new HiveTaxi.Model.Api
operations:
sampleOperation:
http:
method: 'POST'
uri: '/'
input:
type: 'structure'
members: {}
output:
type: 'structure'
members:
a: type: 'string'
b: type: 'string'
shapes:
structureshape:
type: 'structure'
members:
a: type: 'string'
b: type: 'string'
HiveTaxi.Service.defineMethods(MockJSONRESTService)
operation = MockJSONRESTService.prototype.api.operations.sampleOperation
service = new MockJSONRESTService(region: 'region')
request = new HiveTaxi.Request(service, 'sampleOperation')
response = new HiveTaxi.Response(request)
it 'sorts the search string', ->
request.params = query: 'foo', cursor: 'initial', queryOptions: '{}'
defop
http: requestUri: '/path?format=sdk&pretty=true'
input:
type: 'structure'
members:
query:
location: 'querystring'
locationName: 'q'
queryOptions:
location: 'querystring'
locationName: 'q.options'
cursor:
location: 'querystring'
locationName: 'cursor'
req = build()
signer = new HiveTaxi.Signers.V1(req.httpRequest, 'mockservice')
expect(signer.canonicalString().split('\n')[2]).to.equal('cursor=initial&format=sdk&pretty=true&q=foo&q.options=%7B%7D')
it 'double URI encodes paths', ->
request.params = ClientId: '111', AddressId: 'a:b:c'
defop
http: requestUri: '/client/{ClientId}/quick-address/{AddressId}/update'
input:
type: 'structure'
members:
ClientId:
location: 'uri'
locationName: 'ClientId'
AddressId:
location: 'uri'
locationName: 'AddressId'
req = build()
signer = new HiveTaxi.Signers.V1(req.httpRequest, 'mockservice')
expect(signer.canonicalString().split('\n')[1]).to.equal('/client/111/quick-address/a%253Ab%253Ac/update')
describe 'canonicalHeaders', ->
it 'should return headers', ->
expect(signer.canonicalHeaders()).to.eql [
'host:localhost',
# 'x-hive-content-sha256:c4f6509a8266e2e6bb3442091272baa28a8ac16de0821f3ffa2036c6cbd3bfba',
'x-hive-date:' + datetime,
'x-hive-security-token:session',
# 'x-hive-target:Employees.getEmployees',
# 'x-hive-user-agent:hivetaxi-sdk-js/0.1'
].join('\n')
it 'should ignore Authentication header', ->
signer.request.headers = {'Authentication': 'foo'}
expect(signer.canonicalHeaders()).to.equal('')
it 'should lowercase all header names (not values)', ->
signer.request.headers = {'FOO': 'BAR'}
expect(signer.canonicalHeaders()).to.equal('foo:BAR')
it 'should sort headers by key', ->
signer.request.headers = {abc: 'a', bca: 'b', Qux: 'c', bar: 'd'}
expect(signer.canonicalHeaders()).to.equal('abc:a\nbar:d\nbca:b\nqux:c')
it 'should compact multiple spaces in keys/values to a single space', ->
signer.request.headers = {'Header': 'Value with Multiple \t spaces'}
expect(signer.canonicalHeaders()).to.equal('header:Value with Multiple spaces')
it 'should strip starting and end of line spaces', ->
signer.request.headers = {'Header': ' \t Value \t '}
expect(signer.canonicalHeaders()).to.equal('header:Value')
| true | helpers = require('../helpers')
HiveTaxi = helpers.HiveTaxi
Operation = HiveTaxi.Model.Operation
svc = HiveTaxi.Protocol.RestJson
beforeEach ->
helpers.spyOn(HiveTaxi.util, 'userAgent').andReturn('hivetaxi-sdk-js/0.1')
buildRequest = ->
crt = new HiveTaxi.Contractor({region: 'region', endpoint: 'localhost', apiVersion: '1.0'})
req = crt.makeRequest('getEmployees', {contractor: 100000044547, groupId: '100000051646'})
req.build()
req.httpRequest.headers['X-Hive-User-Agent'] = 'hivetaxi-sdk-js/0.1'
req.httpRequest
buildSigner = (request, signatureCache) ->
if typeof signatureCache != 'boolean'
signatureCache = true
return new HiveTaxi.Signers.V1(request || buildRequest(), 'Contractor', signatureCache)
buildSignerFromService = (signatureCache) ->
if typeof signatureCache != 'boolean'
signatureCache = true
crt = new HiveTaxi.Contractor({region: 'region', endpoint: 'localhost', apiVersion: '1.0'})
signer = buildSigner(null, signatureCache)
signer.setServiceClientId(crt._clientId)
return signer
MockJSONRESTService = helpers.util.inherit HiveTaxi.Service,
endpointPrefix: 'mockservice'
operation = null
request = null
response = null
service = null
defop = (op) ->
helpers.util.property(service.api.operations, 'sampleOperation',
new Operation('sampleOperation', op, api: service.api))
build = -> svc.buildRequest(request); request
describe 'HiveTaxi.Signers.V1', ->
date = new Date(1486033460000)
nonce = '1048450919'
secret = 'PI:KEY:<KEY>END_PI';
datetime = HiveTaxi.util.date.rfc822(date)
creds = null
signature = 'N0RySpCZm7t72PI:KEY:<KEY>END_PI
authorization = 'hmac admin:' + nonce + ':' + signature
signer = null
beforeEach ->
creds = accessKeyId: 'PI:KEY:<KEY>END_PI', secretAccessKey: PI:KEY:<KEY>END_PI, sessionToken: 'PI:PASSWORD:<PASSWORD>END_PI'
signer = buildSigner()
signer.addAuthorization(creds, date, nonce)
describe 'constructor', ->
it 'can build a signer for a request object', ->
req = buildRequest()
signer = buildSigner(req)
expect(signer.request).to.equal(req)
describe 'addAuthorization', ->
headers = {
'Content-Type': 'application/json',
'Content-Length': 85,
# 'X-Hive-Target': 'Employees.getEmployees',
'Host': 'localhost',
'X-Hive-Date': datetime,
'x-hive-security-token' : 'session',
'Authentication' : authorization
}
for key, value of headers
func = (k) ->
it 'should add ' + k + ' header', ->
expect(signer.request.headers[k]).to.equal(headers[k])
func(key)
describe 'authorization', ->
it 'should return authorization part for signer', ->
expect(signer.authorization(creds, datetime)).to.equal(authorization)
describe 'signature', ->
it 'should generate proper signature', ->
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for text:// secret', ->
creds.secretAccessKey = 'text://PI:KEY:<KEY>END_PI'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for plain:// secret', ->
creds.secretAccessKey = 'plain://PI:KEY:<KEY>END_PI'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for plain-text:// secret', ->
creds.secretAccessKey = 'plain-PI:KEY:<KEY>END_PI'
expect(signer.signature(creds, datetime)).to.equal(signature)
it 'should generate proper signature for sha256-encoded secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = 'shaPI:KEY:<KEY>END_PI://' + encoded_secret
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should generate proper signature for base64-encoded secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = 'PI:KEY:<KEY>END_PI' + HiveTaxi.util.base64.encode(encoded_secret)
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should generate proper signature for b64-encoded (alias for base64) secret', ->
encoded_secret = HiveTaxi.util.crypto.sha256(secret)
creds.secretAccessKey = 'PI:KEY:<KEY>END_PI' + HiveTaxi.util.base64.encode(encoded_secret)
expect(signer.signature(creds, datetime)).to.equal(signature);
it 'should not compute HMAC more than once', ->
spy = helpers.spyOn(HiveTaxi.util.crypto, 'hmac').andCallThrough()
signer.signature(creds, datetime)
expect(spy.calls.length).to.eql(1)
describe 'caching', ->
hmacCallCount = null
hmacCalls = null
sha256CallCount = null
sha256Calls = null
beforeEach ->
helpers.spyOn(HiveTaxi.util.crypto, 'hmac')
helpers.spyOn(HiveTaxi.util.crypto, 'sha256')
signer.signature(creds, datetime)
hmacCalls = HiveTaxi.util.crypto.hmac.calls
hmacCallCount = hmacCalls.length
sha256Calls = HiveTaxi.util.crypto.sha256.calls
sha256CallCount = sha256Calls.length
it 'will cache a maximum of 50 clients', (done) ->
maxCacheEntries = 50
clientSigners = (buildSignerFromService() for i in [0..maxCacheEntries-1])
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
#Get signature for all clients to store them in cache
(clientSigners[i].signature(creds, datetime) for i in [0..clientSigners.length-1])
expect(hmacCalls.length).to.equal(hmacCallCount + maxCacheEntries)
expect(sha256Calls.length).to.equal(sha256CallCount + maxCacheEntries)
#Signer should use cache
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
clientSigners[0].signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
expect(sha256Calls.length).to.equal(sha256CallCount)
#add a new signer, pushing past cache limit
newestSigner = buildSignerFromService()
newestSigner.signature(creds, datetime)
#old signer shouldn't be using cache anymore
hmacCallCount = hmacCalls.length
sha256CallCount = sha256Calls.length
clientSigners[0].signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
done()
#Calling signer.signature should call hmac 1 time when caching, and 5 times when not caching
it 'caches subsequent requests', ->
signer.signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 1)
signer.signature(creds, datetime)
expect(hmacCalls.length).to.equal(hmacCallCount + 2)
it 'busts cache if caching is disabled', ->
signer = buildSigner(null, false)
sha256CallCount = sha256Calls.length
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if region changes', ->
signer.request.region = 'new-region'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if service changes', ->
signer.serviceName = 'newService'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if access key changes', ->
creds.accessKeyId = 'PI:KEY:<KEY>END_PI'
signer.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'busts cache if date changes', ->
newDate = new Date(date.getTime() + 1000000000)
newDatetime = HiveTaxi.util.date.rfc822(newDate)
signer.signature(creds, newDatetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'uses a different cache if client is different', ->
signer1 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
signer2 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer2.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
it 'works when using the same client', ->
signer1 = buildSignerFromService()
sha256CallCount = sha256Calls.length
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
signer1.signature(creds, datetime)
expect(sha256Calls.length).to.equal(sha256CallCount + 1)
describe 'stringToSign', ->
it 'should sign correctly generated input string', ->
expect(signer.stringToSign(datetime, nonce)).to.equal 'POST/api/contractors/100000044547' + datetime + nonce
describe 'canonicalString', ->
beforeEach ->
MockJSONRESTService.prototype.api = new HiveTaxi.Model.Api
operations:
sampleOperation:
http:
method: 'POST'
uri: '/'
input:
type: 'structure'
members: {}
output:
type: 'structure'
members:
a: type: 'string'
b: type: 'string'
shapes:
structureshape:
type: 'structure'
members:
a: type: 'string'
b: type: 'string'
HiveTaxi.Service.defineMethods(MockJSONRESTService)
operation = MockJSONRESTService.prototype.api.operations.sampleOperation
service = new MockJSONRESTService(region: 'region')
request = new HiveTaxi.Request(service, 'sampleOperation')
response = new HiveTaxi.Response(request)
it 'sorts the search string', ->
request.params = query: 'foo', cursor: 'initial', queryOptions: '{}'
defop
http: requestUri: '/path?format=sdk&pretty=true'
input:
type: 'structure'
members:
query:
location: 'querystring'
locationName: 'q'
queryOptions:
location: 'querystring'
locationName: 'q.options'
cursor:
location: 'querystring'
locationName: 'cursor'
req = build()
signer = new HiveTaxi.Signers.V1(req.httpRequest, 'mockservice')
expect(signer.canonicalString().split('\n')[2]).to.equal('cursor=initial&format=sdk&pretty=true&q=foo&q.options=%7B%7D')
it 'double URI encodes paths', ->
request.params = ClientId: '111', AddressId: 'a:b:c'
defop
http: requestUri: '/client/{ClientId}/quick-address/{AddressId}/update'
input:
type: 'structure'
members:
ClientId:
location: 'uri'
locationName: 'ClientId'
AddressId:
location: 'uri'
locationName: 'AddressId'
req = build()
signer = new HiveTaxi.Signers.V1(req.httpRequest, 'mockservice')
expect(signer.canonicalString().split('\n')[1]).to.equal('/client/111/quick-address/a%253Ab%253Ac/update')
describe 'canonicalHeaders', ->
it 'should return headers', ->
expect(signer.canonicalHeaders()).to.eql [
'host:localhost',
# 'x-hive-content-sha256:c4f6509a8266e2e6bb3442091272baa28a8ac16de0821f3ffa2036c6cbd3bfba',
'x-hive-date:' + datetime,
'x-hive-security-token:session',
# 'x-hive-target:Employees.getEmployees',
# 'x-hive-user-agent:hivetaxi-sdk-js/0.1'
].join('\n')
it 'should ignore Authentication header', ->
signer.request.headers = {'Authentication': 'foo'}
expect(signer.canonicalHeaders()).to.equal('')
it 'should lowercase all header names (not values)', ->
signer.request.headers = {'FOO': 'BAR'}
expect(signer.canonicalHeaders()).to.equal('foo:BAR')
it 'should sort headers by key', ->
signer.request.headers = {abc: 'a', bca: 'b', Qux: 'c', bar: 'd'}
expect(signer.canonicalHeaders()).to.equal('abc:a\nbar:d\nbca:b\nqux:c')
it 'should compact multiple spaces in keys/values to a single space', ->
signer.request.headers = {'Header': 'Value with Multiple \t spaces'}
expect(signer.canonicalHeaders()).to.equal('header:Value with Multiple spaces')
it 'should strip starting and end of line spaces', ->
signer.request.headers = {'Header': ' \t Value \t '}
expect(signer.canonicalHeaders()).to.equal('header:Value')
|
[
{
"context": "obj\", ->\n m = \n name: \"jim\"\n age: 8\n\n o = {}\n\n ",
"end": 382,
"score": 0.9995672702789307,
"start": 379,
"tag": "NAME",
"value": "jim"
},
{
"context": " sayHello: sayHello\n\n o = {name: \... | test/moduleSpec.coffee | zhongxingdou/mayjs | 0 | # encoding: utf-8
sinon = require 'sinon'
describe 'module.js', ->
M = require("../may.js")
describe '$module(obj)', ->
it "should return given object itself", ->
obj = {}
M.$module(obj).should.eql obj
describe '$include(obj, module, option)', ->
it "should merge module to obj", ->
m =
name: "jim"
age: 8
o = {}
M.$include(o, m)
o.should.have.property(member, m[member]) for member of m
it "should merge methodized method to obj if option.methodize be true", ->
sayHello = (man) ->
"hello " + man.name
m =
sayHello: sayHello
o = {name: "jim"}
M.$include(o, m,
methodize: true
)
#check methodized
o.sayHello().should.eql sayHello(o)
it "should call the onIncluded() after include", ->
spy = sinon.spy()
m =
onIncluded: spy
o = {}
M.$include(o, m)
spy.calledOn(o).should.be.true
spy.calledWith(o).should.be.true
it "should call the onIncluded() with passing context", ->
spy = sinon.spy()
m =
onIncluded: spy
o = {}
a = {}
M.$include(o, m,
context: a
)
spy.calledOn(o).should.be.true
spy.calledWith(a).should.be.true
it "should not merge specific member, include meta member which name starts with __, onIncluded callback", ->
m =
onIncluded: ->
__interface__: []
member1: {}
o = {}
M.$include(o, m)
o.should.not.have.property(member) for member in ["onIncluded", "__interface__"]
o.should.have.property("member1")
it "should implementation all of interfaces from module", ->
interfaces = [{}, {}]
m =
__interfaces__: interfaces
o = {}
M.$include(o, m)
o.should.have.property("__interfaces__")
o.__interfaces__.should.containEql inter for inter in interfaces
it "should not call onIncluded() if option.stopCallback be true", ->
m =
onIncluded: sinon.spy()
M.$include({}, m, {stopCallback: true})
m.onIncluded.called.should.be.false
| 154336 | # encoding: utf-8
sinon = require 'sinon'
describe 'module.js', ->
M = require("../may.js")
describe '$module(obj)', ->
it "should return given object itself", ->
obj = {}
M.$module(obj).should.eql obj
describe '$include(obj, module, option)', ->
it "should merge module to obj", ->
m =
name: "<NAME>"
age: 8
o = {}
M.$include(o, m)
o.should.have.property(member, m[member]) for member of m
it "should merge methodized method to obj if option.methodize be true", ->
sayHello = (man) ->
"hello " + man.name
m =
sayHello: sayHello
o = {name: "<NAME>"}
M.$include(o, m,
methodize: true
)
#check methodized
o.sayHello().should.eql sayHello(o)
it "should call the onIncluded() after include", ->
spy = sinon.spy()
m =
onIncluded: spy
o = {}
M.$include(o, m)
spy.calledOn(o).should.be.true
spy.calledWith(o).should.be.true
it "should call the onIncluded() with passing context", ->
spy = sinon.spy()
m =
onIncluded: spy
o = {}
a = {}
M.$include(o, m,
context: a
)
spy.calledOn(o).should.be.true
spy.calledWith(a).should.be.true
it "should not merge specific member, include meta member which name starts with __, onIncluded callback", ->
m =
onIncluded: ->
__interface__: []
member1: {}
o = {}
M.$include(o, m)
o.should.not.have.property(member) for member in ["onIncluded", "__interface__"]
o.should.have.property("member1")
it "should implementation all of interfaces from module", ->
interfaces = [{}, {}]
m =
__interfaces__: interfaces
o = {}
M.$include(o, m)
o.should.have.property("__interfaces__")
o.__interfaces__.should.containEql inter for inter in interfaces
it "should not call onIncluded() if option.stopCallback be true", ->
m =
onIncluded: sinon.spy()
M.$include({}, m, {stopCallback: true})
m.onIncluded.called.should.be.false
| true | # encoding: utf-8
sinon = require 'sinon'
describe 'module.js', ->
M = require("../may.js")
describe '$module(obj)', ->
it "should return given object itself", ->
obj = {}
M.$module(obj).should.eql obj
describe '$include(obj, module, option)', ->
it "should merge module to obj", ->
m =
name: "PI:NAME:<NAME>END_PI"
age: 8
o = {}
M.$include(o, m)
o.should.have.property(member, m[member]) for member of m
it "should merge methodized method to obj if option.methodize be true", ->
sayHello = (man) ->
"hello " + man.name
m =
sayHello: sayHello
o = {name: "PI:NAME:<NAME>END_PI"}
M.$include(o, m,
methodize: true
)
#check methodized
o.sayHello().should.eql sayHello(o)
it "should call the onIncluded() after include", ->
spy = sinon.spy()
m =
onIncluded: spy
o = {}
M.$include(o, m)
spy.calledOn(o).should.be.true
spy.calledWith(o).should.be.true
it "should call the onIncluded() with passing context", ->
spy = sinon.spy()
m =
onIncluded: spy
o = {}
a = {}
M.$include(o, m,
context: a
)
spy.calledOn(o).should.be.true
spy.calledWith(a).should.be.true
it "should not merge specific member, include meta member which name starts with __, onIncluded callback", ->
m =
onIncluded: ->
__interface__: []
member1: {}
o = {}
M.$include(o, m)
o.should.not.have.property(member) for member in ["onIncluded", "__interface__"]
o.should.have.property("member1")
it "should implementation all of interfaces from module", ->
interfaces = [{}, {}]
m =
__interfaces__: interfaces
o = {}
M.$include(o, m)
o.should.have.property("__interfaces__")
o.__interfaces__.should.containEql inter for inter in interfaces
it "should not call onIncluded() if option.stopCallback be true", ->
m =
onIncluded: sinon.spy()
M.$include({}, m, {stopCallback: true})
m.onIncluded.called.should.be.false
|
[
{
"context": "# Copyright (c) 161 SARL, https://161.io\n#'use strict'\n\n# Alternative Reti",
"end": 24,
"score": 0.9953351616859436,
"start": 20,
"tag": "NAME",
"value": "SARL"
}
] | retina-2x.coffee | basselin/retina-2x | 0 | # Copyright (c) 161 SARL, https://161.io
#'use strict'
# Alternative Retina.js : loading issues
win = window
$ = win.jQuery
# @return {Boolean}
isRetina = ->
return true if win.devicePixelRatio > 1
mediaQuery = '(-webkit-min-device-pixel-ratio: 2), (min--moz-device-pixel-ratio: 2), (-o-min-device-pixel-ratio: 2/1), (min-device-pixel-ratio: 2), (min-resolution: 192dpi), (min-resolution: 2dppx)';
return true if win.matchMedia && win.matchMedia(mediaQuery).matches
false
# Load images -2x OR with attribute data-r2x=""
load2x = ->
suffix = '-2x'
regexMatch = /\.\w+$/
data2x = 'r2x'
# @param {String} str
# @return {String}
suffixReplace = (str) ->
suffix + str
$('img:not([data-no-retina])').each ->
$img = $(@)
if $img.data data2x
src2 = $img.data data2x
else
parts = $img.attr('src').split '?'
parts[0] = parts[0].replace regexMatch, suffixReplace
src2 = parts.join '?'
$tmpImg = $('<img>')
$tmpImg.on 'load', ->
setTimeout( ->
attr = {
'src': src2
}
attr['width'] = $img.width() if $img.width()
attr['height'] = $img.height() if $img.height()
$img.attr attr
return
, 10)
return
$tmpImg.attr 'src', src2
return
# Start
if isRetina()
$(win).on 'load', load2x
| 156274 | # Copyright (c) 161 <NAME>, https://161.io
#'use strict'
# Alternative Retina.js : loading issues
win = window
$ = win.jQuery
# @return {Boolean}
isRetina = ->
return true if win.devicePixelRatio > 1
mediaQuery = '(-webkit-min-device-pixel-ratio: 2), (min--moz-device-pixel-ratio: 2), (-o-min-device-pixel-ratio: 2/1), (min-device-pixel-ratio: 2), (min-resolution: 192dpi), (min-resolution: 2dppx)';
return true if win.matchMedia && win.matchMedia(mediaQuery).matches
false
# Load images -2x OR with attribute data-r2x=""
load2x = ->
suffix = '-2x'
regexMatch = /\.\w+$/
data2x = 'r2x'
# @param {String} str
# @return {String}
suffixReplace = (str) ->
suffix + str
$('img:not([data-no-retina])').each ->
$img = $(@)
if $img.data data2x
src2 = $img.data data2x
else
parts = $img.attr('src').split '?'
parts[0] = parts[0].replace regexMatch, suffixReplace
src2 = parts.join '?'
$tmpImg = $('<img>')
$tmpImg.on 'load', ->
setTimeout( ->
attr = {
'src': src2
}
attr['width'] = $img.width() if $img.width()
attr['height'] = $img.height() if $img.height()
$img.attr attr
return
, 10)
return
$tmpImg.attr 'src', src2
return
# Start
if isRetina()
$(win).on 'load', load2x
| true | # Copyright (c) 161 PI:NAME:<NAME>END_PI, https://161.io
#'use strict'
# Alternative Retina.js : loading issues
win = window
$ = win.jQuery
# @return {Boolean}
isRetina = ->
return true if win.devicePixelRatio > 1
mediaQuery = '(-webkit-min-device-pixel-ratio: 2), (min--moz-device-pixel-ratio: 2), (-o-min-device-pixel-ratio: 2/1), (min-device-pixel-ratio: 2), (min-resolution: 192dpi), (min-resolution: 2dppx)';
return true if win.matchMedia && win.matchMedia(mediaQuery).matches
false
# Load images -2x OR with attribute data-r2x=""
load2x = ->
suffix = '-2x'
regexMatch = /\.\w+$/
data2x = 'r2x'
# @param {String} str
# @return {String}
suffixReplace = (str) ->
suffix + str
$('img:not([data-no-retina])').each ->
$img = $(@)
if $img.data data2x
src2 = $img.data data2x
else
parts = $img.attr('src').split '?'
parts[0] = parts[0].replace regexMatch, suffixReplace
src2 = parts.join '?'
$tmpImg = $('<img>')
$tmpImg.on 'load', ->
setTimeout( ->
attr = {
'src': src2
}
attr['width'] = $img.width() if $img.width()
attr['height'] = $img.height() if $img.height()
$img.attr attr
return
, 10)
return
$tmpImg.attr 'src', src2
return
# Start
if isRetina()
$(win).on 'load', load2x
|
[
{
"context": " xmppComp.channels.stanza res\n @c.describe \"class@c.domain.tld\", (err, s, res) ->\n (expect s.tree().name)",
"end": 1763,
"score": 0.999584436416626,
"start": 1745,
"tag": "EMAIL",
"value": "class@c.domain.tld"
},
{
"context": "d\", xmlns: JOAP_NS)\n ... | spec/Client.spec.coffee | flosse/node-xmpp-joap | 0 | joap = require "../src/node-xmpp-joap"
ltx = require "ltx"
xmpp = require "node-xmpp-core"
chai = require 'chai'
expect = chai.expect
{ JID } = xmpp
JOAP_NS = "jabber:iq:joap"
RPC_NS = "jabber:iq:rpc"
describe "Client", ->
compJID = "comp.exmaple.tld"
xmppComp =
channels: {}
send: (data) ->
onData: (data) ->
on: (channel, cb) ->
@channels[channel] = cb
removeListener: ->
connection: jid: new JID compJID
beforeEach -> @c = new joap.Client xmppComp
it "is a class", ->
(expect typeof joap.Client).to.equal "function"
it "takes an xmpp object as first argument", ->
(expect (new joap.Client xmppComp).xmpp).to.equal xmppComp
it "provides methods to perform JOAP actions", ->
c = new joap.Client xmppComp
(expect typeof c.describe).to.equal "function"
(expect typeof c.read).to.equal "function"
(expect typeof c.add).to.equal "function"
(expect typeof c.edit).to.equal "function"
(expect typeof c.search).to.equal "function"
describe "description method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("describe")
.c("attributedescription")
.c("name").t("foo").up()
.c("type").t("int").up()
.c("desc", "xml:lang": 'en-US').t("foo").up()
.c("desc", "xml:lang": 'de-DE').t("bar").up().up()
.c("methoddescription")
.c("name").t("myMethod").up()
.c("returnType").t("boolean").up()
.c("desc", "xml:lang": 'en-US').t("great method").up()
xmppComp.channels.stanza res
@c.describe "class@c.domain.tld", (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect s.tree().attrs.id).to.equal iq.tree().attrs.id
(expect res).to.deep.equal
desc: {}
attributes:
foo:
name: "foo"
type: "int"
desc:
'en-US': "foo"
'de-DE': "bar"
methods:
myMethod:
name: "myMethod"
returnType: 'boolean'
desc: 'en-US': "great method"
classes: []
(expect iq.tree().name).to.equal 'iq'
(expect iq.tree().children[0].toString()).to.equal '<describe ' +
'xmlns="jabber:iq:joap"/>'
done()
describe "add method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req.tree()
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("add", xmlns: JOAP_NS)
.c("newAddress").t("class@example.org/instance")
xmppComp.channels.stanza res
@c.add "class@c.domain.tld", {x:"y"}, (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.equal "class@example.org/instance"
done()
describe "read method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("read", xmlns: JOAP_NS)
.c("attribute")
.c("name").t("magic").up()
.c("value").c("i4").t(23)
xmppComp.channels.stanza res
@c.read "class@c.domain.tld/x", ["magic"], (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.eql {magic: 23}
done()
describe "read method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("edit", xmlns: JOAP_NS)
.c("newAddress").t("x@y.z/0")
xmppComp.channels.stanza res
@c.edit "class@c.domain.tld/x", { "magic":6 } , (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.equal "x@y.z/0"
done()
describe "search method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("search", xmlns: JOAP_NS)
.c("item").t("x@y.z/0")
xmppComp.channels.stanza res
@c.search "class@c.domain.tld", { "magic":6 } , (err, s, res) ->
(expect res).to.eql ["x@y.z/0"]
done()
describe "rpc method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req.tree()
mName = iq.getChild "query"
.getChild "methodCall"
.getChild "methodName"
.text()
(expect mName).to.equal "myMethod"
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("query", xmlns: RPC_NS)
.c "methodResponse"
.c "params"
.c "param"
.c("value").c("int").t(7)
xmppComp.channels.stanza res.tree()
@c.methodCall "myMethod", "class@c.domain.tld", ["avalue"] , (err, s, res) ->
(expect res).to.equal 7
done()
describe "delete method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req.tree()
(expect iq.getChild("delete")).to.exist
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("delete", xmlns: JOAP_NS)
xmppComp.channels.stanza res
@c.delete "class@c.domain.tld/inst", (err, s, res) -> done()
| 60510 | joap = require "../src/node-xmpp-joap"
ltx = require "ltx"
xmpp = require "node-xmpp-core"
chai = require 'chai'
expect = chai.expect
{ JID } = xmpp
JOAP_NS = "jabber:iq:joap"
RPC_NS = "jabber:iq:rpc"
describe "Client", ->
compJID = "comp.exmaple.tld"
xmppComp =
channels: {}
send: (data) ->
onData: (data) ->
on: (channel, cb) ->
@channels[channel] = cb
removeListener: ->
connection: jid: new JID compJID
beforeEach -> @c = new joap.Client xmppComp
it "is a class", ->
(expect typeof joap.Client).to.equal "function"
it "takes an xmpp object as first argument", ->
(expect (new joap.Client xmppComp).xmpp).to.equal xmppComp
it "provides methods to perform JOAP actions", ->
c = new joap.Client xmppComp
(expect typeof c.describe).to.equal "function"
(expect typeof c.read).to.equal "function"
(expect typeof c.add).to.equal "function"
(expect typeof c.edit).to.equal "function"
(expect typeof c.search).to.equal "function"
describe "description method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("describe")
.c("attributedescription")
.c("name").t("foo").up()
.c("type").t("int").up()
.c("desc", "xml:lang": 'en-US').t("foo").up()
.c("desc", "xml:lang": 'de-DE').t("bar").up().up()
.c("methoddescription")
.c("name").t("myMethod").up()
.c("returnType").t("boolean").up()
.c("desc", "xml:lang": 'en-US').t("great method").up()
xmppComp.channels.stanza res
@c.describe "<EMAIL>", (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect s.tree().attrs.id).to.equal iq.tree().attrs.id
(expect res).to.deep.equal
desc: {}
attributes:
foo:
name: "foo"
type: "int"
desc:
'en-US': "foo"
'de-DE': "bar"
methods:
myMethod:
name: "myMethod"
returnType: 'boolean'
desc: 'en-US': "great method"
classes: []
(expect iq.tree().name).to.equal 'iq'
(expect iq.tree().children[0].toString()).to.equal '<describe ' +
'xmlns="jabber:iq:joap"/>'
done()
describe "add method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req.tree()
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("add", xmlns: JOAP_NS)
.c("newAddress").t("<EMAIL>/instance")
xmppComp.channels.stanza res
@c.add "<EMAIL>", {x:"y"}, (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.equal "<EMAIL>/instance"
done()
describe "read method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("read", xmlns: JOAP_NS)
.c("attribute")
.c("name").t("magic").up()
.c("value").c("i4").t(23)
xmppComp.channels.stanza res
@c.read "<EMAIL>ld/x", ["magic"], (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.eql {magic: 23}
done()
describe "read method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("edit", xmlns: JOAP_NS)
.c("newAddress").t("x@y.z/0")
xmppComp.channels.stanza res
@c.edit "<EMAIL>ld/x", { "magic":6 } , (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.equal "x@y.z/0"
done()
describe "search method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("search", xmlns: JOAP_NS)
.c("item").t("x@y.z/0")
xmppComp.channels.stanza res
@c.search "<EMAIL>", { "magic":6 } , (err, s, res) ->
(expect res).to.eql ["x@y.z/0"]
done()
describe "rpc method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req.tree()
mName = iq.getChild "query"
.getChild "methodCall"
.getChild "methodName"
.text()
(expect mName).to.equal "myMethod"
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("query", xmlns: RPC_NS)
.c "methodResponse"
.c "params"
.c "param"
.c("value").c("int").t(7)
xmppComp.channels.stanza res.tree()
@c.methodCall "myMethod", "<EMAIL>", ["avalue"] , (err, s, res) ->
(expect res).to.equal 7
done()
describe "delete method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req.tree()
(expect iq.getChild("delete")).to.exist
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("delete", xmlns: JOAP_NS)
xmppComp.channels.stanza res
@c.delete "<EMAIL>/inst", (err, s, res) -> done()
| true | joap = require "../src/node-xmpp-joap"
ltx = require "ltx"
xmpp = require "node-xmpp-core"
chai = require 'chai'
expect = chai.expect
{ JID } = xmpp
JOAP_NS = "jabber:iq:joap"
RPC_NS = "jabber:iq:rpc"
describe "Client", ->
compJID = "comp.exmaple.tld"
xmppComp =
channels: {}
send: (data) ->
onData: (data) ->
on: (channel, cb) ->
@channels[channel] = cb
removeListener: ->
connection: jid: new JID compJID
beforeEach -> @c = new joap.Client xmppComp
it "is a class", ->
(expect typeof joap.Client).to.equal "function"
it "takes an xmpp object as first argument", ->
(expect (new joap.Client xmppComp).xmpp).to.equal xmppComp
it "provides methods to perform JOAP actions", ->
c = new joap.Client xmppComp
(expect typeof c.describe).to.equal "function"
(expect typeof c.read).to.equal "function"
(expect typeof c.add).to.equal "function"
(expect typeof c.edit).to.equal "function"
(expect typeof c.search).to.equal "function"
describe "description method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("describe")
.c("attributedescription")
.c("name").t("foo").up()
.c("type").t("int").up()
.c("desc", "xml:lang": 'en-US').t("foo").up()
.c("desc", "xml:lang": 'de-DE').t("bar").up().up()
.c("methoddescription")
.c("name").t("myMethod").up()
.c("returnType").t("boolean").up()
.c("desc", "xml:lang": 'en-US').t("great method").up()
xmppComp.channels.stanza res
@c.describe "PI:EMAIL:<EMAIL>END_PI", (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect s.tree().attrs.id).to.equal iq.tree().attrs.id
(expect res).to.deep.equal
desc: {}
attributes:
foo:
name: "foo"
type: "int"
desc:
'en-US': "foo"
'de-DE': "bar"
methods:
myMethod:
name: "myMethod"
returnType: 'boolean'
desc: 'en-US': "great method"
classes: []
(expect iq.tree().name).to.equal 'iq'
(expect iq.tree().children[0].toString()).to.equal '<describe ' +
'xmlns="jabber:iq:joap"/>'
done()
describe "add method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req.tree()
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("add", xmlns: JOAP_NS)
.c("newAddress").t("PI:EMAIL:<EMAIL>END_PI/instance")
xmppComp.channels.stanza res
@c.add "PI:EMAIL:<EMAIL>END_PI", {x:"y"}, (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.equal "PI:EMAIL:<EMAIL>END_PI/instance"
done()
describe "read method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("read", xmlns: JOAP_NS)
.c("attribute")
.c("name").t("magic").up()
.c("value").c("i4").t(23)
xmppComp.channels.stanza res
@c.read "PI:EMAIL:<EMAIL>END_PIld/x", ["magic"], (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.eql {magic: 23}
done()
describe "read method", ->
it "sends a correct iq", (done) ->
iq = null
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("edit", xmlns: JOAP_NS)
.c("newAddress").t("x@y.z/0")
xmppComp.channels.stanza res
@c.edit "PI:EMAIL:<EMAIL>END_PIld/x", { "magic":6 } , (err, s, res) ->
(expect s.tree().name).to.equal 'iq'
(expect res).to.equal "x@y.z/0"
done()
describe "search method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("search", xmlns: JOAP_NS)
.c("item").t("x@y.z/0")
xmppComp.channels.stanza res
@c.search "PI:EMAIL:<EMAIL>END_PI", { "magic":6 } , (err, s, res) ->
(expect res).to.eql ["x@y.z/0"]
done()
describe "rpc method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req.tree()
mName = iq.getChild "query"
.getChild "methodCall"
.getChild "methodName"
.text()
(expect mName).to.equal "myMethod"
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("query", xmlns: RPC_NS)
.c "methodResponse"
.c "params"
.c "param"
.c("value").c("int").t(7)
xmppComp.channels.stanza res.tree()
@c.methodCall "myMethod", "PI:EMAIL:<EMAIL>END_PI", ["avalue"] , (err, s, res) ->
(expect res).to.equal 7
done()
describe "delete method", ->
it "sends a correct iq", (done) ->
xmppComp.send = (req) ->
iq = req.tree()
(expect iq.getChild("delete")).to.exist
res = new ltx.Element "iq", type: 'result', id: iq.tree().attrs.id
res.c("delete", xmlns: JOAP_NS)
xmppComp.channels.stanza res
@c.delete "PI:EMAIL:<EMAIL>END_PI/inst", (err, s, res) -> done()
|
[
{
"context": "orm('#form')\n .addEmail('Email')\n .addPassword('Password')\n .addSubmit('Submit')\n\nnew hx.MorphSection('#c",
"end": 66,
"score": 0.9991852045059204,
"start": 58,
"tag": "PASSWORD",
"value": "Password"
}
] | docs/content/examples/morphing-sections/index1.coffee | p-koscielniak/hexagonjs | 61 | new hx.Form('#form')
.addEmail('Email')
.addPassword('Password')
.addSubmit('Submit')
new hx.MorphSection('#container')
| 222701 | new hx.Form('#form')
.addEmail('Email')
.addPassword('<PASSWORD>')
.addSubmit('Submit')
new hx.MorphSection('#container')
| true | new hx.Form('#form')
.addEmail('Email')
.addPassword('PI:PASSWORD:<PASSWORD>END_PI')
.addSubmit('Submit')
new hx.MorphSection('#container')
|
[
{
"context": "s file is part of the Konsserto package.\n *\n * (c) Jessym Reziga <jessym@konsserto.com>\n *\n * For the full copyrig",
"end": 74,
"score": 0.9998795986175537,
"start": 61,
"tag": "NAME",
"value": "Jessym Reziga"
},
{
"context": "f the Konsserto package.\n *\n * (c) Je... | node_modules/konsserto/lib/src/Konsserto/Bundle/FrameworkBundle/Controller/Controller.coffee | konsserto/konsserto | 2 | ###
* This file is part of the Konsserto package.
*
* (c) Jessym Reziga <jessym@konsserto.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
#
# Controller
#
# @author Jessym Reziga <jessym@konsserto.com>
#
class Controller
constructor: () ->
return this
getRequest: () ->
return @container.get('Request_Stack').getCurrentRequest()
getResponse: () ->
return @response
setRequest: (@request) ->
return this
setResponse: (@response) ->
return this
setContainer: (@container) ->
return this
getContainer: () ->
return @container
getApplication: () ->
return @application
setApplication: (@app) ->
return this
get: (name)->
return @container.get(name)
setNext: (@next) ->
return this
render: (view, options)->
@container.get('Templating').renderResponse(view, options, @getResponse())
renderView: (view, options)->
@container.get('Templating').render(view, options)
generateUrl: (route, parameters, referenceType) ->
@container.get('Router').generate(route, parameters, referenceType)
redirect: (url, status = 302)->
@response.redirect(status, url)
createForm: (name, data = null, options = {}) ->
@container.get('Form_Factory').createBuilder(name, data, options)
createFormBuilder: (data = null, options = {}) ->
@container.get('Form_Factory').createBuilder('form', data, options)
has: (name)->
@container.has(name)
next: () ->
return @next
module.exports = Controller; | 175601 | ###
* This file is part of the Konsserto package.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
#
# Controller
#
# @author <NAME> <<EMAIL>>
#
class Controller
constructor: () ->
return this
getRequest: () ->
return @container.get('Request_Stack').getCurrentRequest()
getResponse: () ->
return @response
setRequest: (@request) ->
return this
setResponse: (@response) ->
return this
setContainer: (@container) ->
return this
getContainer: () ->
return @container
getApplication: () ->
return @application
setApplication: (@app) ->
return this
get: (name)->
return @container.get(name)
setNext: (@next) ->
return this
render: (view, options)->
@container.get('Templating').renderResponse(view, options, @getResponse())
renderView: (view, options)->
@container.get('Templating').render(view, options)
generateUrl: (route, parameters, referenceType) ->
@container.get('Router').generate(route, parameters, referenceType)
redirect: (url, status = 302)->
@response.redirect(status, url)
createForm: (name, data = null, options = {}) ->
@container.get('Form_Factory').createBuilder(name, data, options)
createFormBuilder: (data = null, options = {}) ->
@container.get('Form_Factory').createBuilder('form', data, options)
has: (name)->
@container.has(name)
next: () ->
return @next
module.exports = Controller; | true | ###
* This file is part of the Konsserto package.
*
* (c) PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
#
# Controller
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
class Controller
constructor: () ->
return this
getRequest: () ->
return @container.get('Request_Stack').getCurrentRequest()
getResponse: () ->
return @response
setRequest: (@request) ->
return this
setResponse: (@response) ->
return this
setContainer: (@container) ->
return this
getContainer: () ->
return @container
getApplication: () ->
return @application
setApplication: (@app) ->
return this
get: (name)->
return @container.get(name)
setNext: (@next) ->
return this
render: (view, options)->
@container.get('Templating').renderResponse(view, options, @getResponse())
renderView: (view, options)->
@container.get('Templating').render(view, options)
generateUrl: (route, parameters, referenceType) ->
@container.get('Router').generate(route, parameters, referenceType)
redirect: (url, status = 302)->
@response.redirect(status, url)
createForm: (name, data = null, options = {}) ->
@container.get('Form_Factory').createBuilder(name, data, options)
createFormBuilder: (data = null, options = {}) ->
@container.get('Form_Factory').createBuilder('form', data, options)
has: (name)->
@container.has(name)
next: () ->
return @next
module.exports = Controller; |
[
{
"context": "###\nCopyright (c) 2014 Ramesh Nair (hiddentao.com)\n\nPermission is hereby granted, fr",
"end": 34,
"score": 0.9998839497566223,
"start": 23,
"tag": "NAME",
"value": "Ramesh Nair"
}
] | test/insert.test.coffee | MajesticRolePlay/squel | 0 | ###
Copyright (c) 2014 Ramesh Nair (hiddentao.com)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../dist/squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['INSERT builder'] =
beforeEach: ->
@func = squel.insert
@inst = @func()
'instanceof QueryBuilder': ->
assert.instanceOf @inst, squel.cls.QueryBuilder
'constructor':
'override options': ->
@inst = squel.update
usingValuePlaceholders: true
dummy: true
expectedOptions = _.extend {}, squel.cls.getDefaultQueryBuilderOptions(),
usingValuePlaceholders: true
dummy: true
for block in @inst.blocks
assert.same _.pick(block.options, _.keys(expectedOptions)), expectedOptions
'override blocks': ->
block = new squel.cls.StringBlock('SELECT')
@inst = @func {}, [block]
assert.same [block], @inst.blocks
'build query':
'need to call into() first': ->
assert.throws (=> @inst.toString()), 'into() needs to be called'
'when set() not called': ->
assert.same 'INSERT INTO table', @inst.into('table').toString()
'>> into(table).set(field, null)':
beforeEach: -> @inst.into('table').set('field', null)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES (NULL)'
toParam: ->
assert.same @inst.toParam(), { text: 'INSERT INTO table (field) VALUES (?)', values: [null] }
'>> into(table)':
beforeEach: -> @inst.into('table')
'>> set(field, 1)':
beforeEach: -> @inst.set('field', 1)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES (1)'
'>> set(field2, 1.2)':
beforeEach: -> @inst.set('field2', 1.2)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, 1.2)'
'>> set(field2, "str")':
beforeEach: -> @inst.set('field2', 'str')
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, \'str\')'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [ 1, 'str' ]
}
'>> set(field2, "str", { dontQuote: true } )':
beforeEach: -> @inst.set('field2', 'str', dontQuote: true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, str)'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [ 1, 'str' ]
}
'>> set(field2, true)':
beforeEach: -> @inst.set('field2', true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, TRUE)'
'>> set(field2, null)':
beforeEach: -> @inst.set('field2', null)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, NULL)'
'>> set(field, query builder)':
beforeEach: ->
@subQuery = squel.select().field('MAX(score)').from('scores')
@inst.set( 'field', @subQuery )
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES ((SELECT MAX(score) FROM scores))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field) VALUES ((SELECT MAX(score) FROM scores))'
assert.same parameterized.values, []
'>> setFields({field2: \'value2\', field3: true })':
beforeEach: -> @inst.setFields({field2: 'value2', field3: true })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2, field3) VALUES (1, \'value2\', TRUE)'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2, field3) VALUES (?, ?, ?)'
assert.same parameterized.values, [1,'value2',true]
'>> setFields({field2: \'value2\', field: true })':
beforeEach: -> @inst.setFields({field2: 'value2', field: true })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (TRUE, \'value2\')'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2) VALUES (?, ?)'
assert.same parameterized.values, [true, 'value2']
'>> setFields(custom value type)':
beforeEach: ->
class MyClass
@inst.registerValueHandler MyClass, -> 'abcd'
@inst.setFields({ field: new MyClass() })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES ((abcd))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field) VALUES (?)'
assert.same parameterized.values, ['abcd']
'>> setFieldsRows([{field: \'value2\', field2: true },{field: \'value3\', field2: 13 }]])':
beforeEach: -> @inst.setFieldsRows([{field: 'value2', field2: true },{field: 'value3', field2: 13 }])
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (\'value2\', TRUE), (\'value3\', 13)'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2) VALUES (?, ?), (?, ?)'
assert.same parameterized.values, ['value2',true, 'value3',13]
'Function values':
beforeEach: -> @inst.set('field', squel.str('GETDATE(?, ?)', 2014, 'feb'))
toString: ->
assert.same 'INSERT INTO table (field) VALUES ((GETDATE(2014, \'feb\')))', @inst.toString()
toParam: ->
assert.same { text: 'INSERT INTO table (field) VALUES ((GETDATE(?, ?)))', values: [2014, 'feb'] }, @inst.toParam()
'>> fromQuery([field1, field2], select query)':
beforeEach: -> @inst.fromQuery(
['field1', 'field2'],
squel.select().from('students').where('a = ?', 2)
)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field1, field2) (SELECT * FROM students WHERE (a = 2))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field1, field2) (SELECT * FROM students WHERE (a = ?))'
assert.same parameterized.values, [ 2 ]
'>> setFieldsRows([{field1: 13, field2: \'value2\'},{field1: true, field3: \'value4\'}])': ->
assert.throws (=> @inst.setFieldsRows([{field1: 13, field2: 'value2'},{field1: true, field3: 'value4'}]).toString()), 'All fields in subsequent rows must match the fields in the first row'
'dontQuote and replaceSingleQuotes set(field2, "ISNULL(\'str\', str)", { dontQuote: true })':
beforeEach: ->
@inst = squel.insert replaceSingleQuotes: true
@inst.into('table').set('field', 1)
@inst.set('field2', "ISNULL('str', str)", dontQuote: true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, ISNULL(\'str\', str))'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [1, "ISNULL('str', str)"]
}
'fix for #225 - autoquoting field names': ->
@inst = squel.insert(autoQuoteFieldNames: true)
.into('users')
.set('active', 1)
.set('regular', 0)
.set('moderator',1)
assert.same @inst.toParam(), {
text: 'INSERT INTO users (`active`, `regular`, `moderator`) VALUES (?, ?, ?)',
values: [1, 0, 1],
}
'cloning': ->
newinst = @inst.into('students').set('field', 1).clone()
newinst.set('field', 2).set('field2', true)
assert.same 'INSERT INTO students (field) VALUES (1)', @inst.toString()
assert.same 'INSERT INTO students (field, field2) VALUES (2, TRUE)', newinst.toString()
module?.exports[require('path').basename(__filename)] = test
| 60178 | ###
Copyright (c) 2014 <NAME> (hiddentao.com)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../dist/squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['INSERT builder'] =
beforeEach: ->
@func = squel.insert
@inst = @func()
'instanceof QueryBuilder': ->
assert.instanceOf @inst, squel.cls.QueryBuilder
'constructor':
'override options': ->
@inst = squel.update
usingValuePlaceholders: true
dummy: true
expectedOptions = _.extend {}, squel.cls.getDefaultQueryBuilderOptions(),
usingValuePlaceholders: true
dummy: true
for block in @inst.blocks
assert.same _.pick(block.options, _.keys(expectedOptions)), expectedOptions
'override blocks': ->
block = new squel.cls.StringBlock('SELECT')
@inst = @func {}, [block]
assert.same [block], @inst.blocks
'build query':
'need to call into() first': ->
assert.throws (=> @inst.toString()), 'into() needs to be called'
'when set() not called': ->
assert.same 'INSERT INTO table', @inst.into('table').toString()
'>> into(table).set(field, null)':
beforeEach: -> @inst.into('table').set('field', null)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES (NULL)'
toParam: ->
assert.same @inst.toParam(), { text: 'INSERT INTO table (field) VALUES (?)', values: [null] }
'>> into(table)':
beforeEach: -> @inst.into('table')
'>> set(field, 1)':
beforeEach: -> @inst.set('field', 1)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES (1)'
'>> set(field2, 1.2)':
beforeEach: -> @inst.set('field2', 1.2)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, 1.2)'
'>> set(field2, "str")':
beforeEach: -> @inst.set('field2', 'str')
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, \'str\')'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [ 1, 'str' ]
}
'>> set(field2, "str", { dontQuote: true } )':
beforeEach: -> @inst.set('field2', 'str', dontQuote: true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, str)'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [ 1, 'str' ]
}
'>> set(field2, true)':
beforeEach: -> @inst.set('field2', true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, TRUE)'
'>> set(field2, null)':
beforeEach: -> @inst.set('field2', null)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, NULL)'
'>> set(field, query builder)':
beforeEach: ->
@subQuery = squel.select().field('MAX(score)').from('scores')
@inst.set( 'field', @subQuery )
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES ((SELECT MAX(score) FROM scores))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field) VALUES ((SELECT MAX(score) FROM scores))'
assert.same parameterized.values, []
'>> setFields({field2: \'value2\', field3: true })':
beforeEach: -> @inst.setFields({field2: 'value2', field3: true })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2, field3) VALUES (1, \'value2\', TRUE)'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2, field3) VALUES (?, ?, ?)'
assert.same parameterized.values, [1,'value2',true]
'>> setFields({field2: \'value2\', field: true })':
beforeEach: -> @inst.setFields({field2: 'value2', field: true })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (TRUE, \'value2\')'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2) VALUES (?, ?)'
assert.same parameterized.values, [true, 'value2']
'>> setFields(custom value type)':
beforeEach: ->
class MyClass
@inst.registerValueHandler MyClass, -> 'abcd'
@inst.setFields({ field: new MyClass() })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES ((abcd))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field) VALUES (?)'
assert.same parameterized.values, ['abcd']
'>> setFieldsRows([{field: \'value2\', field2: true },{field: \'value3\', field2: 13 }]])':
beforeEach: -> @inst.setFieldsRows([{field: 'value2', field2: true },{field: 'value3', field2: 13 }])
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (\'value2\', TRUE), (\'value3\', 13)'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2) VALUES (?, ?), (?, ?)'
assert.same parameterized.values, ['value2',true, 'value3',13]
'Function values':
beforeEach: -> @inst.set('field', squel.str('GETDATE(?, ?)', 2014, 'feb'))
toString: ->
assert.same 'INSERT INTO table (field) VALUES ((GETDATE(2014, \'feb\')))', @inst.toString()
toParam: ->
assert.same { text: 'INSERT INTO table (field) VALUES ((GETDATE(?, ?)))', values: [2014, 'feb'] }, @inst.toParam()
'>> fromQuery([field1, field2], select query)':
beforeEach: -> @inst.fromQuery(
['field1', 'field2'],
squel.select().from('students').where('a = ?', 2)
)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field1, field2) (SELECT * FROM students WHERE (a = 2))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field1, field2) (SELECT * FROM students WHERE (a = ?))'
assert.same parameterized.values, [ 2 ]
'>> setFieldsRows([{field1: 13, field2: \'value2\'},{field1: true, field3: \'value4\'}])': ->
assert.throws (=> @inst.setFieldsRows([{field1: 13, field2: 'value2'},{field1: true, field3: 'value4'}]).toString()), 'All fields in subsequent rows must match the fields in the first row'
'dontQuote and replaceSingleQuotes set(field2, "ISNULL(\'str\', str)", { dontQuote: true })':
beforeEach: ->
@inst = squel.insert replaceSingleQuotes: true
@inst.into('table').set('field', 1)
@inst.set('field2', "ISNULL('str', str)", dontQuote: true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, ISNULL(\'str\', str))'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [1, "ISNULL('str', str)"]
}
'fix for #225 - autoquoting field names': ->
@inst = squel.insert(autoQuoteFieldNames: true)
.into('users')
.set('active', 1)
.set('regular', 0)
.set('moderator',1)
assert.same @inst.toParam(), {
text: 'INSERT INTO users (`active`, `regular`, `moderator`) VALUES (?, ?, ?)',
values: [1, 0, 1],
}
'cloning': ->
newinst = @inst.into('students').set('field', 1).clone()
newinst.set('field', 2).set('field2', true)
assert.same 'INSERT INTO students (field) VALUES (1)', @inst.toString()
assert.same 'INSERT INTO students (field, field2) VALUES (2, TRUE)', newinst.toString()
module?.exports[require('path').basename(__filename)] = test
| true | ###
Copyright (c) 2014 PI:NAME:<NAME>END_PI (hiddentao.com)
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
###
squel = require "../dist/squel-basic"
{_, testCreator, assert, expect, should} = require './testbase'
test = testCreator()
test['INSERT builder'] =
beforeEach: ->
@func = squel.insert
@inst = @func()
'instanceof QueryBuilder': ->
assert.instanceOf @inst, squel.cls.QueryBuilder
'constructor':
'override options': ->
@inst = squel.update
usingValuePlaceholders: true
dummy: true
expectedOptions = _.extend {}, squel.cls.getDefaultQueryBuilderOptions(),
usingValuePlaceholders: true
dummy: true
for block in @inst.blocks
assert.same _.pick(block.options, _.keys(expectedOptions)), expectedOptions
'override blocks': ->
block = new squel.cls.StringBlock('SELECT')
@inst = @func {}, [block]
assert.same [block], @inst.blocks
'build query':
'need to call into() first': ->
assert.throws (=> @inst.toString()), 'into() needs to be called'
'when set() not called': ->
assert.same 'INSERT INTO table', @inst.into('table').toString()
'>> into(table).set(field, null)':
beforeEach: -> @inst.into('table').set('field', null)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES (NULL)'
toParam: ->
assert.same @inst.toParam(), { text: 'INSERT INTO table (field) VALUES (?)', values: [null] }
'>> into(table)':
beforeEach: -> @inst.into('table')
'>> set(field, 1)':
beforeEach: -> @inst.set('field', 1)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES (1)'
'>> set(field2, 1.2)':
beforeEach: -> @inst.set('field2', 1.2)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, 1.2)'
'>> set(field2, "str")':
beforeEach: -> @inst.set('field2', 'str')
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, \'str\')'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [ 1, 'str' ]
}
'>> set(field2, "str", { dontQuote: true } )':
beforeEach: -> @inst.set('field2', 'str', dontQuote: true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, str)'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [ 1, 'str' ]
}
'>> set(field2, true)':
beforeEach: -> @inst.set('field2', true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, TRUE)'
'>> set(field2, null)':
beforeEach: -> @inst.set('field2', null)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, NULL)'
'>> set(field, query builder)':
beforeEach: ->
@subQuery = squel.select().field('MAX(score)').from('scores')
@inst.set( 'field', @subQuery )
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES ((SELECT MAX(score) FROM scores))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field) VALUES ((SELECT MAX(score) FROM scores))'
assert.same parameterized.values, []
'>> setFields({field2: \'value2\', field3: true })':
beforeEach: -> @inst.setFields({field2: 'value2', field3: true })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2, field3) VALUES (1, \'value2\', TRUE)'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2, field3) VALUES (?, ?, ?)'
assert.same parameterized.values, [1,'value2',true]
'>> setFields({field2: \'value2\', field: true })':
beforeEach: -> @inst.setFields({field2: 'value2', field: true })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (TRUE, \'value2\')'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2) VALUES (?, ?)'
assert.same parameterized.values, [true, 'value2']
'>> setFields(custom value type)':
beforeEach: ->
class MyClass
@inst.registerValueHandler MyClass, -> 'abcd'
@inst.setFields({ field: new MyClass() })
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field) VALUES ((abcd))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field) VALUES (?)'
assert.same parameterized.values, ['abcd']
'>> setFieldsRows([{field: \'value2\', field2: true },{field: \'value3\', field2: 13 }]])':
beforeEach: -> @inst.setFieldsRows([{field: 'value2', field2: true },{field: 'value3', field2: 13 }])
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (\'value2\', TRUE), (\'value3\', 13)'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field, field2) VALUES (?, ?), (?, ?)'
assert.same parameterized.values, ['value2',true, 'value3',13]
'Function values':
beforeEach: -> @inst.set('field', squel.str('GETDATE(?, ?)', 2014, 'feb'))
toString: ->
assert.same 'INSERT INTO table (field) VALUES ((GETDATE(2014, \'feb\')))', @inst.toString()
toParam: ->
assert.same { text: 'INSERT INTO table (field) VALUES ((GETDATE(?, ?)))', values: [2014, 'feb'] }, @inst.toParam()
'>> fromQuery([field1, field2], select query)':
beforeEach: -> @inst.fromQuery(
['field1', 'field2'],
squel.select().from('students').where('a = ?', 2)
)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field1, field2) (SELECT * FROM students WHERE (a = 2))'
toParam: ->
parameterized = @inst.toParam()
assert.same parameterized.text, 'INSERT INTO table (field1, field2) (SELECT * FROM students WHERE (a = ?))'
assert.same parameterized.values, [ 2 ]
'>> setFieldsRows([{field1: 13, field2: \'value2\'},{field1: true, field3: \'value4\'}])': ->
assert.throws (=> @inst.setFieldsRows([{field1: 13, field2: 'value2'},{field1: true, field3: 'value4'}]).toString()), 'All fields in subsequent rows must match the fields in the first row'
'dontQuote and replaceSingleQuotes set(field2, "ISNULL(\'str\', str)", { dontQuote: true })':
beforeEach: ->
@inst = squel.insert replaceSingleQuotes: true
@inst.into('table').set('field', 1)
@inst.set('field2', "ISNULL('str', str)", dontQuote: true)
toString: ->
assert.same @inst.toString(), 'INSERT INTO table (field, field2) VALUES (1, ISNULL(\'str\', str))'
toParam: ->
assert.same @inst.toParam(), {
text: 'INSERT INTO table (field, field2) VALUES (?, ?)'
values: [1, "ISNULL('str', str)"]
}
'fix for #225 - autoquoting field names': ->
@inst = squel.insert(autoQuoteFieldNames: true)
.into('users')
.set('active', 1)
.set('regular', 0)
.set('moderator',1)
assert.same @inst.toParam(), {
text: 'INSERT INTO users (`active`, `regular`, `moderator`) VALUES (?, ?, ?)',
values: [1, 0, 1],
}
'cloning': ->
newinst = @inst.into('students').set('field', 1).clone()
newinst.set('field', 2).set('field2', true)
assert.same 'INSERT INTO students (field) VALUES (1)', @inst.toString()
assert.same 'INSERT INTO students (field, field2) VALUES (2, TRUE)', newinst.toString()
module?.exports[require('path').basename(__filename)] = test
|
[
{
"context": "c.freenode.net'\nnick = 'anotherunregisterednick23423j23r'\n\nbot = new Ann server, nick,\n debug: true\n\nbot.",
"end": 89,
"score": 0.52761310338974,
"start": 83,
"tag": "PASSWORD",
"value": "23j23r"
}
] | test/identify/notregistered.coffee | fent/ann | 0 | Ann = require 'ann'
server = 'irc.freenode.net'
nick = 'anotherunregisterednick23423j23r'
bot = new Ann server, nick,
debug: true
bot.ready ->
bot.identify 'something', (err) ->
throw err if err
| 211572 | Ann = require 'ann'
server = 'irc.freenode.net'
nick = 'anotherunregisterednick234<PASSWORD>'
bot = new Ann server, nick,
debug: true
bot.ready ->
bot.identify 'something', (err) ->
throw err if err
| true | Ann = require 'ann'
server = 'irc.freenode.net'
nick = 'anotherunregisterednick234PI:PASSWORD:<PASSWORD>END_PI'
bot = new Ann server, nick,
debug: true
bot.ready ->
bot.identify 'something', (err) ->
throw err if err
|
[
{
"context": "d> - send youtube video to meowziq\n#\n# Author:\n# bouzuya <m@bouzuya.net>\n#\n\nfetch = require '../fetch'\npos",
"end": 194,
"score": 0.9997125267982483,
"start": 187,
"tag": "USERNAME",
"value": "bouzuya"
},
{
"context": "youtube video to meowziq\n#\n# Author:\n# ... | src/scripts/meowziq-youtube.coffee | bouzuya/hubot-meowziq-youtube | 0 | # Description
# A Hubot script
#
# Configuration:
# HUBOT_MEOWZIQ_YOUTUBE_BASE_URL
#
# Commands:
# hubot meowziq youtube <url or id> - send youtube video to meowziq
#
# Author:
# bouzuya <m@bouzuya.net>
#
fetch = require '../fetch'
post = require '../post'
module.exports = (robot) ->
baseUrl = process.env.HUBOT_MEOWZIQ_YOUTUBE_BASE_URL
robot.respond /meowziq.youtube (.+)$/, (res) ->
u = res.match[1]
u = if u.match /^www\.youtube\.com/ then 'https://' + u else u
u = if u.match /^https?:/ then u else 'https://www.youtube.com/watch?v=' + u
fetch u, (song) ->
post baseUrl, song
.then ->
res.send 'OK'
.catch (e) ->
robot.logger.error e
res.send 'ERROR'
| 155637 | # Description
# A Hubot script
#
# Configuration:
# HUBOT_MEOWZIQ_YOUTUBE_BASE_URL
#
# Commands:
# hubot meowziq youtube <url or id> - send youtube video to meowziq
#
# Author:
# bouzuya <<EMAIL>>
#
fetch = require '../fetch'
post = require '../post'
module.exports = (robot) ->
baseUrl = process.env.HUBOT_MEOWZIQ_YOUTUBE_BASE_URL
robot.respond /meowziq.youtube (.+)$/, (res) ->
u = res.match[1]
u = if u.match /^www\.youtube\.com/ then 'https://' + u else u
u = if u.match /^https?:/ then u else 'https://www.youtube.com/watch?v=' + u
fetch u, (song) ->
post baseUrl, song
.then ->
res.send 'OK'
.catch (e) ->
robot.logger.error e
res.send 'ERROR'
| true | # Description
# A Hubot script
#
# Configuration:
# HUBOT_MEOWZIQ_YOUTUBE_BASE_URL
#
# Commands:
# hubot meowziq youtube <url or id> - send youtube video to meowziq
#
# Author:
# bouzuya <PI:EMAIL:<EMAIL>END_PI>
#
fetch = require '../fetch'
post = require '../post'
module.exports = (robot) ->
baseUrl = process.env.HUBOT_MEOWZIQ_YOUTUBE_BASE_URL
robot.respond /meowziq.youtube (.+)$/, (res) ->
u = res.match[1]
u = if u.match /^www\.youtube\.com/ then 'https://' + u else u
u = if u.match /^https?:/ then u else 'https://www.youtube.com/watch?v=' + u
fetch u, (song) ->
post baseUrl, song
.then ->
res.send 'OK'
.catch (e) ->
robot.logger.error e
res.send 'ERROR'
|
[
{
"context": "###\n author: huyinghuan\n date: 2015-07-03\n###\n_mysql = require 'mysql'\n_",
"end": 24,
"score": 0.9994889497756958,
"start": 14,
"tag": "USERNAME",
"value": "huyinghuan"
},
{
"context": "pe: _enume.gitMapType.project})\n )\n\n###\n Author: ec.huyinghuan@gmail.co... | src/biz/gitlab.coffee | kiteam/kiteam | 0 | ###
author: huyinghuan
date: 2015-07-03
###
_mysql = require 'mysql'
_async = require 'async'
_http = require('bijou').http
_GitLabInterface = require 'gitlab-few'
_entity = require '../entity'
_cache = require '../cache'
_enume = require('../common').enumerate
_config = require('../common').config
_database_config = _config.gitlab.database
connection = _mysql.createConnection(_database_config)
exports.fork = (client, cb)->
project_id = client.params.project_id
be_forked_git_id = client.params.gitlab_id
member_id = client.member.member_id
queue = []
gitlab = null
#初始化gitlab接口
queue.push((done)->
gitlab_token = _cache.member.get(member_id).gitlab_token
gitlab = new _GitLabInterface(gitlab_token, _config.gitlab.api)
done(null)
)
#fork项目
queue.push((done)->
gitlab.projects().fork(be_forked_git_id).then((data)->
done(null, data.id, data.ssh_url_to_repo)
)
.catch((err)->
console.error err
done({msg: "Fork失败,请查看项目权限或是否已存在自己的仓库"})
)
)
#保存git到项目
queue.push((git_id, git_url, done)->
project =
type: _enume.gitMapType.project
target_id: project_id
git: git_url
git_id: git_id
_entity.git_map.save(project, (err)->
return done({msg: "gits关联项目失败"}) if err
done(null, git_id)
)
)
#插入hooks
queue.push((git_id, done)->
gitlab.projects(git_id).hooks().post(_config.gitlab.hooks)
.then(()->
done(null)
)
.catch((err)->
console.error err
done({msg: "gits设置hooks失败"})
)
)
_async.waterfall(queue, (error)->
return cb() if not error
if error.msg
cb(_http.notAcceptableError(error.msg))
else if error
console.error error
cb(error)
else
cb()
_catch.git_map.load({type: _enume.gitMapType.project})
)
###
Author: ec.huyinghuan@gmail.com
Date: 2015.07.09
Describe:
给项目添加关联一个已存在的gitlab地址
###
exports.addGitToProject = (client, cb)->
project_id = client.params.project_id
gitlab_url = client.body.gitlab_url
member_id = client.member.member_id
return cb _http.notAcceptableError("gitlab的地址不能为空") if not gitlab_url
queue = []
gitlab = null
#查询这个gitlab是否已经关联过
queue.push((done)->
_entity.git_map.find({git: gitlab_url}, (err, result)->
#如果不存在
return done(null, null) if not result.length
done(null, result[0].target_id)
)
)
#已存在的项目做友好提示
queue.push((target_project_id, done)->
return done() if not target_project_id
if target_project_id is ~~project_id
return done({msg: "gitlab地址不需要重复绑定!"})
_entity.project.find({id: target_project_id}, (err, result)->
title = result[0]?.title or "未知"
done({msg: "该gitlab地址已被#{title}项目绑定!请先解除绑定或重新关联gitlab地址"})
)
)
#初始化gitlab接口
queue.push((done)->
gitlab_token = _cache.member.get(member_id).gitlab_token
gitlab = new _GitLabInterface(gitlab_token, _config.gitlab.api)
done()
)
#获取gitlab id
queue.push((done)->
namespace = (gitlab_url.split(':')[1]).split('/')
path = namespace[0]
name = namespace[1].replace(/\.git$/, "")
sql = "select p.id
from projects p left join namespaces n
on p.namespace_id = n.id
where p.name = ? and n.path= ?
limit 1"
connection.query(sql, [name, path], (err, result)->
return done({msg: "仓库不存在"}) if result.length is 0
done(err, result[0].id)
)
)
#查看hooks是否已经设置过了
queue.push((git_id, done)->
sql = "
select id from web_hooks where project_id = ? and url = ?
"
connection.query(sql, [git_id, _config.gitlab.hooks], (err, result)->
return done(err) if err
done(null, git_id, result.length)
)
)
#设置hooks
queue.push((git_id, hooksCount, done)->
#如果已经设置过了hooks,那么跳过
return done(null, git_id) if hooksCount isnt 0
gitlab.projects(git_id).hooks().post(_config.gitlab.hooks)
.then(->
done(null, git_id)
)
.catch((err)->
done({msg: "gits设置hooks失败, 请检查是否具有该仓库权限!", err: err})
)
)
#保存git到项目
queue.push((git_id, done)->
project =
type: _enume.gitMapType.project
target_id: project_id
git: gitlab_url
git_id: git_id
_entity.git_map.save(project, (err)->
return done({msg: "gits关联项目失败", err: err}) if err
done(null)
)
)
_async.waterfall(queue, (error)->
return cb() if not error
console.error error if error
if error.msg
cb _http.notAcceptableError(error.msg)
else if error
cb(error)
else
cb()
_cache.gitMap.load({type: _enume.gitMapType.project})
)
###
Author: ec.huyinghuan@gmail.com
Date: 2015.07.09 16:20 PM
Describe:
根据token和仓库名查询是否存在
@params {string} 私人的gitlab的token
@params {string} 需要查询的仓库名称
@params {function} 回调函数
接收两个参数function(err, exists){...}
@params {Error}
@params {boolean} 如果项目名称存在,则exists为true, 否则false
@return {null}
###
exports.isExistsProjectInMyAccountByName = (token, name, cb)->
sql = "
select p.*
from users u right join projects p
on u.id = p.creator_id
where u.authentication_token = ? and p.name = ?
"
connection.query(sql, [token, name], (err, result)->
return cb(err) if err
#存在
if result.length
cb(null, true)
else
cb(null, false)
)
###
Author: ec@huyinghuan@gmail.com
Date: 2015.07.16
Describe: 在指定的gitlab列表中找到属于自己的gitlab列表
###
exports.getMyGitListInGiven = (auth_token, givenGitIdList, cb)->
sql = "
select p . *
from users u right join projects p ON u.id = p.creator_id
where u.authentication_token = ? and p.id in (?)
"
connection.query(sql, [auth_token, givenGitIdList], (err, result)->
cb(err, result)
)
###
Author: ec@huyinghuan@gmail.com
Date: 2015.07.16
Describe: 根据id获取namespace
###
exports.getNamespaceById = (id, cb)->
sql = "
select * from projects where id = ?
"
connection.query(sql, [id], (err, result)->
return cb(err) if err
return cb(null, false) if not result.length
project = result[0]
cb(null, "#{project.path}/#{project.name}")
)
| 186635 | ###
author: huyinghuan
date: 2015-07-03
###
_mysql = require 'mysql'
_async = require 'async'
_http = require('bijou').http
_GitLabInterface = require 'gitlab-few'
_entity = require '../entity'
_cache = require '../cache'
_enume = require('../common').enumerate
_config = require('../common').config
_database_config = _config.gitlab.database
connection = _mysql.createConnection(_database_config)
exports.fork = (client, cb)->
project_id = client.params.project_id
be_forked_git_id = client.params.gitlab_id
member_id = client.member.member_id
queue = []
gitlab = null
#初始化gitlab接口
queue.push((done)->
gitlab_token = _cache.member.get(member_id).gitlab_token
gitlab = new _GitLabInterface(gitlab_token, _config.gitlab.api)
done(null)
)
#fork项目
queue.push((done)->
gitlab.projects().fork(be_forked_git_id).then((data)->
done(null, data.id, data.ssh_url_to_repo)
)
.catch((err)->
console.error err
done({msg: "Fork失败,请查看项目权限或是否已存在自己的仓库"})
)
)
#保存git到项目
queue.push((git_id, git_url, done)->
project =
type: _enume.gitMapType.project
target_id: project_id
git: git_url
git_id: git_id
_entity.git_map.save(project, (err)->
return done({msg: "gits关联项目失败"}) if err
done(null, git_id)
)
)
#插入hooks
queue.push((git_id, done)->
gitlab.projects(git_id).hooks().post(_config.gitlab.hooks)
.then(()->
done(null)
)
.catch((err)->
console.error err
done({msg: "gits设置hooks失败"})
)
)
_async.waterfall(queue, (error)->
return cb() if not error
if error.msg
cb(_http.notAcceptableError(error.msg))
else if error
console.error error
cb(error)
else
cb()
_catch.git_map.load({type: _enume.gitMapType.project})
)
###
Author: <EMAIL>
Date: 2015.07.09
Describe:
给项目添加关联一个已存在的gitlab地址
###
exports.addGitToProject = (client, cb)->
project_id = client.params.project_id
gitlab_url = client.body.gitlab_url
member_id = client.member.member_id
return cb _http.notAcceptableError("gitlab的地址不能为空") if not gitlab_url
queue = []
gitlab = null
#查询这个gitlab是否已经关联过
queue.push((done)->
_entity.git_map.find({git: gitlab_url}, (err, result)->
#如果不存在
return done(null, null) if not result.length
done(null, result[0].target_id)
)
)
#已存在的项目做友好提示
queue.push((target_project_id, done)->
return done() if not target_project_id
if target_project_id is ~~project_id
return done({msg: "gitlab地址不需要重复绑定!"})
_entity.project.find({id: target_project_id}, (err, result)->
title = result[0]?.title or "未知"
done({msg: "该gitlab地址已被#{title}项目绑定!请先解除绑定或重新关联gitlab地址"})
)
)
#初始化gitlab接口
queue.push((done)->
gitlab_token = _cache.member.get(member_id).gitlab_token
gitlab = new _GitLabInterface(gitlab_token, _config.gitlab.api)
done()
)
#获取gitlab id
queue.push((done)->
namespace = (gitlab_url.split(':')[1]).split('/')
path = namespace[0]
name = namespace[1].replace(/\.git$/, "")
sql = "select p.id
from projects p left join namespaces n
on p.namespace_id = n.id
where p.name = ? and n.path= ?
limit 1"
connection.query(sql, [name, path], (err, result)->
return done({msg: "仓库不存在"}) if result.length is 0
done(err, result[0].id)
)
)
#查看hooks是否已经设置过了
queue.push((git_id, done)->
sql = "
select id from web_hooks where project_id = ? and url = ?
"
connection.query(sql, [git_id, _config.gitlab.hooks], (err, result)->
return done(err) if err
done(null, git_id, result.length)
)
)
#设置hooks
queue.push((git_id, hooksCount, done)->
#如果已经设置过了hooks,那么跳过
return done(null, git_id) if hooksCount isnt 0
gitlab.projects(git_id).hooks().post(_config.gitlab.hooks)
.then(->
done(null, git_id)
)
.catch((err)->
done({msg: "gits设置hooks失败, 请检查是否具有该仓库权限!", err: err})
)
)
#保存git到项目
queue.push((git_id, done)->
project =
type: _enume.gitMapType.project
target_id: project_id
git: gitlab_url
git_id: git_id
_entity.git_map.save(project, (err)->
return done({msg: "gits关联项目失败", err: err}) if err
done(null)
)
)
_async.waterfall(queue, (error)->
return cb() if not error
console.error error if error
if error.msg
cb _http.notAcceptableError(error.msg)
else if error
cb(error)
else
cb()
_cache.gitMap.load({type: _enume.gitMapType.project})
)
###
Author: <EMAIL>
Date: 2015.07.09 16:20 PM
Describe:
根据token和仓库名查询是否存在
@params {string} 私人的gitlab的token
@params {string} 需要查询的仓库名称
@params {function} 回调函数
接收两个参数function(err, exists){...}
@params {Error}
@params {boolean} 如果项目名称存在,则exists为true, 否则false
@return {null}
###
exports.isExistsProjectInMyAccountByName = (token, name, cb)->
sql = "
select p.*
from users u right join projects p
on u.id = p.creator_id
where u.authentication_token = ? and p.name = ?
"
connection.query(sql, [token, name], (err, result)->
return cb(err) if err
#存在
if result.length
cb(null, true)
else
cb(null, false)
)
###
Author: <EMAIL>
Date: 2015.07.16
Describe: 在指定的gitlab列表中找到属于自己的gitlab列表
###
exports.getMyGitListInGiven = (auth_token, givenGitIdList, cb)->
sql = "
select p . *
from users u right join projects p ON u.id = p.creator_id
where u.authentication_token = ? and p.id in (?)
"
connection.query(sql, [auth_token, givenGitIdList], (err, result)->
cb(err, result)
)
###
Author: <EMAIL>
Date: 2015.07.16
Describe: 根据id获取namespace
###
exports.getNamespaceById = (id, cb)->
sql = "
select * from projects where id = ?
"
connection.query(sql, [id], (err, result)->
return cb(err) if err
return cb(null, false) if not result.length
project = result[0]
cb(null, "#{project.path}/#{project.name}")
)
| true | ###
author: huyinghuan
date: 2015-07-03
###
_mysql = require 'mysql'
_async = require 'async'
_http = require('bijou').http
_GitLabInterface = require 'gitlab-few'
_entity = require '../entity'
_cache = require '../cache'
_enume = require('../common').enumerate
_config = require('../common').config
_database_config = _config.gitlab.database
connection = _mysql.createConnection(_database_config)
exports.fork = (client, cb)->
project_id = client.params.project_id
be_forked_git_id = client.params.gitlab_id
member_id = client.member.member_id
queue = []
gitlab = null
#初始化gitlab接口
queue.push((done)->
gitlab_token = _cache.member.get(member_id).gitlab_token
gitlab = new _GitLabInterface(gitlab_token, _config.gitlab.api)
done(null)
)
#fork项目
queue.push((done)->
gitlab.projects().fork(be_forked_git_id).then((data)->
done(null, data.id, data.ssh_url_to_repo)
)
.catch((err)->
console.error err
done({msg: "Fork失败,请查看项目权限或是否已存在自己的仓库"})
)
)
#保存git到项目
queue.push((git_id, git_url, done)->
project =
type: _enume.gitMapType.project
target_id: project_id
git: git_url
git_id: git_id
_entity.git_map.save(project, (err)->
return done({msg: "gits关联项目失败"}) if err
done(null, git_id)
)
)
#插入hooks
queue.push((git_id, done)->
gitlab.projects(git_id).hooks().post(_config.gitlab.hooks)
.then(()->
done(null)
)
.catch((err)->
console.error err
done({msg: "gits设置hooks失败"})
)
)
_async.waterfall(queue, (error)->
return cb() if not error
if error.msg
cb(_http.notAcceptableError(error.msg))
else if error
console.error error
cb(error)
else
cb()
_catch.git_map.load({type: _enume.gitMapType.project})
)
###
Author: PI:EMAIL:<EMAIL>END_PI
Date: 2015.07.09
Describe:
给项目添加关联一个已存在的gitlab地址
###
exports.addGitToProject = (client, cb)->
project_id = client.params.project_id
gitlab_url = client.body.gitlab_url
member_id = client.member.member_id
return cb _http.notAcceptableError("gitlab的地址不能为空") if not gitlab_url
queue = []
gitlab = null
#查询这个gitlab是否已经关联过
queue.push((done)->
_entity.git_map.find({git: gitlab_url}, (err, result)->
#如果不存在
return done(null, null) if not result.length
done(null, result[0].target_id)
)
)
#已存在的项目做友好提示
queue.push((target_project_id, done)->
return done() if not target_project_id
if target_project_id is ~~project_id
return done({msg: "gitlab地址不需要重复绑定!"})
_entity.project.find({id: target_project_id}, (err, result)->
title = result[0]?.title or "未知"
done({msg: "该gitlab地址已被#{title}项目绑定!请先解除绑定或重新关联gitlab地址"})
)
)
#初始化gitlab接口
queue.push((done)->
gitlab_token = _cache.member.get(member_id).gitlab_token
gitlab = new _GitLabInterface(gitlab_token, _config.gitlab.api)
done()
)
#获取gitlab id
queue.push((done)->
namespace = (gitlab_url.split(':')[1]).split('/')
path = namespace[0]
name = namespace[1].replace(/\.git$/, "")
sql = "select p.id
from projects p left join namespaces n
on p.namespace_id = n.id
where p.name = ? and n.path= ?
limit 1"
connection.query(sql, [name, path], (err, result)->
return done({msg: "仓库不存在"}) if result.length is 0
done(err, result[0].id)
)
)
#查看hooks是否已经设置过了
queue.push((git_id, done)->
sql = "
select id from web_hooks where project_id = ? and url = ?
"
connection.query(sql, [git_id, _config.gitlab.hooks], (err, result)->
return done(err) if err
done(null, git_id, result.length)
)
)
#设置hooks
queue.push((git_id, hooksCount, done)->
#如果已经设置过了hooks,那么跳过
return done(null, git_id) if hooksCount isnt 0
gitlab.projects(git_id).hooks().post(_config.gitlab.hooks)
.then(->
done(null, git_id)
)
.catch((err)->
done({msg: "gits设置hooks失败, 请检查是否具有该仓库权限!", err: err})
)
)
#保存git到项目
queue.push((git_id, done)->
project =
type: _enume.gitMapType.project
target_id: project_id
git: gitlab_url
git_id: git_id
_entity.git_map.save(project, (err)->
return done({msg: "gits关联项目失败", err: err}) if err
done(null)
)
)
_async.waterfall(queue, (error)->
return cb() if not error
console.error error if error
if error.msg
cb _http.notAcceptableError(error.msg)
else if error
cb(error)
else
cb()
_cache.gitMap.load({type: _enume.gitMapType.project})
)
###
Author: PI:EMAIL:<EMAIL>END_PI
Date: 2015.07.09 16:20 PM
Describe:
根据token和仓库名查询是否存在
@params {string} 私人的gitlab的token
@params {string} 需要查询的仓库名称
@params {function} 回调函数
接收两个参数function(err, exists){...}
@params {Error}
@params {boolean} 如果项目名称存在,则exists为true, 否则false
@return {null}
###
exports.isExistsProjectInMyAccountByName = (token, name, cb)->
sql = "
select p.*
from users u right join projects p
on u.id = p.creator_id
where u.authentication_token = ? and p.name = ?
"
connection.query(sql, [token, name], (err, result)->
return cb(err) if err
#存在
if result.length
cb(null, true)
else
cb(null, false)
)
###
Author: PI:EMAIL:<EMAIL>END_PI
Date: 2015.07.16
Describe: 在指定的gitlab列表中找到属于自己的gitlab列表
###
exports.getMyGitListInGiven = (auth_token, givenGitIdList, cb)->
sql = "
select p . *
from users u right join projects p ON u.id = p.creator_id
where u.authentication_token = ? and p.id in (?)
"
connection.query(sql, [auth_token, givenGitIdList], (err, result)->
cb(err, result)
)
###
Author: PI:EMAIL:<EMAIL>END_PI
Date: 2015.07.16
Describe: 根据id获取namespace
###
exports.getNamespaceById = (id, cb)->
sql = "
select * from projects where id = ?
"
connection.query(sql, [id], (err, result)->
return cb(err) if err
return cb(null, false) if not result.length
project = result[0]
cb(null, "#{project.path}/#{project.name}")
)
|
[
{
"context": " , ->\n project = new Project\n name : \"testProject\"\n project.createFile\n name : \"",
"end": 485,
"score": 0.8179288506507874,
"start": 481,
"tag": "NAME",
"value": "test"
}
] | src/test/spec/library.spec.coffee | kaosat-dev/CoffeeSCad | 110 | define (require)->
Library = require "core/projects/library"
Project = require "core/projects/project"
describe "library", ->
library = null
beforeEach ->
library = new Library()
it 'can list projects from different types of storage', ->
allProjects = library.getProjectByStore("all")
expect(allProjects).toEqual([])
it 'can save a project to browser (localstorage)' , ->
project = new Project
name : "testProject"
project.createFile
name : "testFile"
content : "someContent"
| 178229 | define (require)->
Library = require "core/projects/library"
Project = require "core/projects/project"
describe "library", ->
library = null
beforeEach ->
library = new Library()
it 'can list projects from different types of storage', ->
allProjects = library.getProjectByStore("all")
expect(allProjects).toEqual([])
it 'can save a project to browser (localstorage)' , ->
project = new Project
name : "<NAME>Project"
project.createFile
name : "testFile"
content : "someContent"
| true | define (require)->
Library = require "core/projects/library"
Project = require "core/projects/project"
describe "library", ->
library = null
beforeEach ->
library = new Library()
it 'can list projects from different types of storage', ->
allProjects = library.getProjectByStore("all")
expect(allProjects).toEqual([])
it 'can save a project to browser (localstorage)' , ->
project = new Project
name : "PI:NAME:<NAME>END_PIProject"
project.createFile
name : "testFile"
content : "someContent"
|
[
{
"context": "###\n\nCopyright (c) 2013 - 2014 Bruce Davidson darkoverlordofdata@gmail.com\nCopyright (c) 2005, ",
"end": 45,
"score": 0.9998569488525391,
"start": 31,
"tag": "NAME",
"value": "Bruce Davidson"
},
{
"context": "###\n\nCopyright (c) 2013 - 2014 Bruce Davidson darkoverlo... | src/liquid.coffee | darkoverlordofdata/liquid.coffee | 0 | ###
Copyright (c) 2013 - 2014 Bruce Davidson darkoverlordofdata@gmail.com
Copyright (c) 2005, 2006 Tobias Luetke
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
module.exports = class Liquid
@Liquid = Liquid # dereference for AMD
@FilterSeparator = ///\|///
@ArgumentSeparator = ','
@FilterArgumentSeparator = ':'
@VariableAttributeSeparator = '.'
@TagStart = ///\{\%///
@TagEnd = ///\%\}///
@VariableSignature = ///\(?[\w\-\.\[\]]\)?///
@VariableSegment = ///[\w\-]///
@VariableStart = ///\{\{///
@VariableEnd = ///\}\}///
@VariableIncompleteEnd = ///\}\}?///
@QuotedString = ///"[^"]*"|'[^']*'///
@QuotedFragment = ///#{@QuotedString.source}|(?:[^\s,\|'"]|#{@QuotedString.source})+///
@StrictQuotedFragment = ///"[^"]+"|'[^']+'|[^\s|:,]+///
@FirstFilterArgument = ///#{@FilterArgumentSeparator}(?:#{@StrictQuotedFragment.source})///
@OtherFilterArgument = ///#{@ArgumentSeparator}(?:#{@StrictQuotedFragment.source})///
@SpacelessFilter = ///^(?:'[^']+'|"[^"]+"|[^'"])*#{@FilterSeparator.source}(?:#{@StrictQuotedFragment.source})(?:#{@FirstFilterArgument.source}(?:#{@OtherFilterArgument.source})*)?///
@Expression = ///(?:#{@QuotedFragment.source}(?:#{@SpacelessFilter.source})*)///
@TagAttributes = ///(\w+)\s*\:\s*(#{@QuotedFragment.source})///
@AnyStartingTag = ///\{\{|\{\%///
@PartialTemplateParser = ///#{@TagStart.source}.*?#{@TagEnd.source}|#{@VariableStart.source}.*?#{@VariableIncompleteEnd.source}///
@TemplateParser = ///(#{@PartialTemplateParser.source}|#{@AnyStartingTag.source})///
@VariableParser = ///\[[^\]]+\]|#{@VariableSegment.source}+\??///
@LiteralShorthand = ///^(?:\{\{\{\s?)(.*?)(?:\s*\}\}\})$///
#
# Setting a path enables a simple disk based file system
#
@setPath = (path) ->
#
# Templates load their own extends and includes
#
Liquid.Template.fileSystem = new Liquid.LocalFileSystem(path)
return Liquid
#
# Hapi wants a compile function
#
@compile = (template, options) ->
t = Liquid.Template.parse(template)
(context, options) ->
t.render(context)
require './liquid/version'
require './liquid/drop'
require './liquid/errors'
require './liquid/interrupts'
require './liquid/strainer'
require './liquid/context'
require './liquid/tag'
require './liquid/block'
require './liquid/document'
require './liquid/variable'
require './liquid/filesystem'
require './liquid/template'
require './liquid/standardfilters'
require './liquid/condition'
class Liquid.Tags
require './liquid/tags/assign'
require './liquid/tags/block'
require './liquid/tags/break'
require './liquid/tags/capture'
require './liquid/tags/case'
require './liquid/tags/comment'
require './liquid/tags/continue'
require './liquid/tags/cycle'
require './liquid/tags/decrement'
require './liquid/tags/extends'
require './liquid/tags/for'
require './liquid/tags/if'
require './liquid/tags/ifchanged'
require './liquid/tags/include'
require './liquid/tags/increment'
require './liquid/tags/raw'
require './liquid/tags/unless'
require './extras/liquidView' | 204904 | ###
Copyright (c) 2013 - 2014 <NAME> <EMAIL>
Copyright (c) 2005, 2006 <NAME>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
module.exports = class Liquid
@Liquid = Liquid # dereference for AMD
@FilterSeparator = ///\|///
@ArgumentSeparator = ','
@FilterArgumentSeparator = ':'
@VariableAttributeSeparator = '.'
@TagStart = ///\{\%///
@TagEnd = ///\%\}///
@VariableSignature = ///\(?[\w\-\.\[\]]\)?///
@VariableSegment = ///[\w\-]///
@VariableStart = ///\{\{///
@VariableEnd = ///\}\}///
@VariableIncompleteEnd = ///\}\}?///
@QuotedString = ///"[^"]*"|'[^']*'///
@QuotedFragment = ///#{@QuotedString.source}|(?:[^\s,\|'"]|#{@QuotedString.source})+///
@StrictQuotedFragment = ///"[^"]+"|'[^']+'|[^\s|:,]+///
@FirstFilterArgument = ///#{@FilterArgumentSeparator}(?:#{@StrictQuotedFragment.source})///
@OtherFilterArgument = ///#{@ArgumentSeparator}(?:#{@StrictQuotedFragment.source})///
@SpacelessFilter = ///^(?:'[^']+'|"[^"]+"|[^'"])*#{@FilterSeparator.source}(?:#{@StrictQuotedFragment.source})(?:#{@FirstFilterArgument.source}(?:#{@OtherFilterArgument.source})*)?///
@Expression = ///(?:#{@QuotedFragment.source}(?:#{@SpacelessFilter.source})*)///
@TagAttributes = ///(\w+)\s*\:\s*(#{@QuotedFragment.source})///
@AnyStartingTag = ///\{\{|\{\%///
@PartialTemplateParser = ///#{@TagStart.source}.*?#{@TagEnd.source}|#{@VariableStart.source}.*?#{@VariableIncompleteEnd.source}///
@TemplateParser = ///(#{@PartialTemplateParser.source}|#{@AnyStartingTag.source})///
@VariableParser = ///\[[^\]]+\]|#{@VariableSegment.source}+\??///
@LiteralShorthand = ///^(?:\{\{\{\s?)(.*?)(?:\s*\}\}\})$///
#
# Setting a path enables a simple disk based file system
#
@setPath = (path) ->
#
# Templates load their own extends and includes
#
Liquid.Template.fileSystem = new Liquid.LocalFileSystem(path)
return Liquid
#
# Hapi wants a compile function
#
@compile = (template, options) ->
t = Liquid.Template.parse(template)
(context, options) ->
t.render(context)
require './liquid/version'
require './liquid/drop'
require './liquid/errors'
require './liquid/interrupts'
require './liquid/strainer'
require './liquid/context'
require './liquid/tag'
require './liquid/block'
require './liquid/document'
require './liquid/variable'
require './liquid/filesystem'
require './liquid/template'
require './liquid/standardfilters'
require './liquid/condition'
class Liquid.Tags
require './liquid/tags/assign'
require './liquid/tags/block'
require './liquid/tags/break'
require './liquid/tags/capture'
require './liquid/tags/case'
require './liquid/tags/comment'
require './liquid/tags/continue'
require './liquid/tags/cycle'
require './liquid/tags/decrement'
require './liquid/tags/extends'
require './liquid/tags/for'
require './liquid/tags/if'
require './liquid/tags/ifchanged'
require './liquid/tags/include'
require './liquid/tags/increment'
require './liquid/tags/raw'
require './liquid/tags/unless'
require './extras/liquidView' | true | ###
Copyright (c) 2013 - 2014 PI:NAME:<NAME>END_PI PI:EMAIL:<EMAIL>END_PI
Copyright (c) 2005, 2006 PI:NAME:<NAME>END_PI
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
module.exports = class Liquid
@Liquid = Liquid # dereference for AMD
@FilterSeparator = ///\|///
@ArgumentSeparator = ','
@FilterArgumentSeparator = ':'
@VariableAttributeSeparator = '.'
@TagStart = ///\{\%///
@TagEnd = ///\%\}///
@VariableSignature = ///\(?[\w\-\.\[\]]\)?///
@VariableSegment = ///[\w\-]///
@VariableStart = ///\{\{///
@VariableEnd = ///\}\}///
@VariableIncompleteEnd = ///\}\}?///
@QuotedString = ///"[^"]*"|'[^']*'///
@QuotedFragment = ///#{@QuotedString.source}|(?:[^\s,\|'"]|#{@QuotedString.source})+///
@StrictQuotedFragment = ///"[^"]+"|'[^']+'|[^\s|:,]+///
@FirstFilterArgument = ///#{@FilterArgumentSeparator}(?:#{@StrictQuotedFragment.source})///
@OtherFilterArgument = ///#{@ArgumentSeparator}(?:#{@StrictQuotedFragment.source})///
@SpacelessFilter = ///^(?:'[^']+'|"[^"]+"|[^'"])*#{@FilterSeparator.source}(?:#{@StrictQuotedFragment.source})(?:#{@FirstFilterArgument.source}(?:#{@OtherFilterArgument.source})*)?///
@Expression = ///(?:#{@QuotedFragment.source}(?:#{@SpacelessFilter.source})*)///
@TagAttributes = ///(\w+)\s*\:\s*(#{@QuotedFragment.source})///
@AnyStartingTag = ///\{\{|\{\%///
@PartialTemplateParser = ///#{@TagStart.source}.*?#{@TagEnd.source}|#{@VariableStart.source}.*?#{@VariableIncompleteEnd.source}///
@TemplateParser = ///(#{@PartialTemplateParser.source}|#{@AnyStartingTag.source})///
@VariableParser = ///\[[^\]]+\]|#{@VariableSegment.source}+\??///
@LiteralShorthand = ///^(?:\{\{\{\s?)(.*?)(?:\s*\}\}\})$///
#
# Setting a path enables a simple disk based file system
#
@setPath = (path) ->
#
# Templates load their own extends and includes
#
Liquid.Template.fileSystem = new Liquid.LocalFileSystem(path)
return Liquid
#
# Hapi wants a compile function
#
@compile = (template, options) ->
t = Liquid.Template.parse(template)
(context, options) ->
t.render(context)
require './liquid/version'
require './liquid/drop'
require './liquid/errors'
require './liquid/interrupts'
require './liquid/strainer'
require './liquid/context'
require './liquid/tag'
require './liquid/block'
require './liquid/document'
require './liquid/variable'
require './liquid/filesystem'
require './liquid/template'
require './liquid/standardfilters'
require './liquid/condition'
class Liquid.Tags
require './liquid/tags/assign'
require './liquid/tags/block'
require './liquid/tags/break'
require './liquid/tags/capture'
require './liquid/tags/case'
require './liquid/tags/comment'
require './liquid/tags/continue'
require './liquid/tags/cycle'
require './liquid/tags/decrement'
require './liquid/tags/extends'
require './liquid/tags/for'
require './liquid/tags/if'
require './liquid/tags/ifchanged'
require './liquid/tags/include'
require './liquid/tags/increment'
require './liquid/tags/raw'
require './liquid/tags/unless'
require './extras/liquidView' |
[
{
"context": "dOptions = (namespace, name, value) ->\n keyPath = \"#{namespace}.#{name}\"\n title = getSettingTitle(keyPath, name)\n descri",
"end": 5731,
"score": 0.9590008854866028,
"start": 5709,
"tag": "KEY",
"value": "\"#{namespace}.#{name}\""
},
{
"context": "Checkbox = (nam... | lib/settings-panel.coffee | thedaniel/settings-view | 0 | {$, $$, View, TextEditorView} = require 'atom'
_ = require 'underscore-plus'
module.exports =
class SettingsPanel extends View
@content: ->
@div class: 'settings-panel'
initialize: (namespace, @options={}) ->
if @options.scopeName
namespace = 'editor'
scopedSettings = [
'autoIndent'
'autoIndentOnPaste'
'invisibles'
'nonWordCharacters'
'normalizeIndentOnPaste'
'preferredLineLength'
'scrollPastEnd'
'showIndentGuide'
'showInvisibles'
'softWrap'
'softWrapAtPreferredLineLength'
'tabLength'
]
settings = {}
for name in scopedSettings
settings[name] = atom.config.get([@options.scopeName], name)
else
settings = atom.config.getSettings()[namespace]
@appendSettings(namespace, settings)
@bindCheckboxFields()
@bindSelectFields()
@bindEditors()
appendSettings: (namespace, settings) ->
return if _.isEmpty(settings)
title = @options.title
includeTitle = @options.includeTitle ? true
if includeTitle
title ?= "#{_.undasherize(_.uncamelcase(namespace))} Settings"
else
title ?= "Settings"
icon = @options.icon ? 'gear'
sortedSettings = @sortSettings(namespace, settings)
@append $$ ->
@section class: 'config-section', =>
@div class: "block section-heading icon icon-#{icon}", title
@div class: 'section-body', =>
for name in sortedSettings
appendSetting.call(this, namespace, name, settings[name])
sortSettings: (namespace, settings) ->
_.chain(settings).keys().sortBy((name) -> name).sortBy((name) -> atom.config.getSchema("#{namespace}.#{name}")?.order).value()
bindCheckboxFields: ->
@find('input[id]').toArray().forEach (input) =>
input = $(input)
name = input.attr('id')
type = input.attr('type')
@observe name, (value) ->
if type is 'checkbox'
input.prop('checked', value)
else
input.val(value) if value
input.on 'change', =>
value = input.val()
if type == 'checkbox'
value = !!input.prop('checked')
else
value = @parseValue(type, value)
@set(name, value)
observe: (name, callback) ->
if @options.scopeName
@subscribe atom.config.observe([@options.scopeName], name, callback)
else
@subscribe atom.config.observe(name, callback)
isDefault: (name) ->
if @options.scopeName
atom.config.isDefault(@options.scopeName, name)
else
atom.config.isDefault(name)
getDefault: (name) ->
if @options.scopeName
atom.config.getDefault(@options.scopeName, name)
else
atom.config.getDefault(name)
set: (name, value) ->
if @options.scopeName
if value is undefined
atom.config.restoreDefault(@options.scopeName, name)
else
atom.config.set(@options.scopeName, name, value)
else
atom.config.set(name, value)
bindSelectFields: ->
@find('select[id]').toArray().forEach (select) =>
select = $(select)
name = select.attr('id')
@observe name, (value) ->
select.val(value)
select.change =>
@set(name, select.val())
bindEditors: ->
@find('.editor[id]').views().forEach (editorView) =>
name = editorView.attr('id')
type = editorView.attr('type')
if defaultValue = @valueToString(@getDefault(name))
editorView.setPlaceholderText("Default: #{defaultValue}")
@observe name, (value) =>
if @isDefault(name)
stringValue = ''
else
stringValue = @valueToString(value) ? ''
return if stringValue is editorView.getText()
return if _.isEqual(value, @parseValue(type, editorView.getText()))
editorView.setText(stringValue)
editorView.getEditor().getBuffer().on 'contents-modified', =>
@set(name, @parseValue(type, editorView.getText()))
valueToString: (value) ->
if _.isArray(value)
value.join(', ')
else
value?.toString()
parseValue: (type, value) ->
if value == ''
value = undefined
else if type == 'number'
floatValue = parseFloat(value)
value = floatValue unless isNaN(floatValue)
else if type == 'array'
arrayValue = (value or '').split(',')
value = (val.trim() for val in arrayValue when val)
value
###
# Space Pen Helpers
###
isEditableArray = (array) ->
for item in array
return false unless _.isString(item)
true
appendSetting = (namespace, name, value) ->
if namespace is 'core'
return if name is 'themes' # Handled in the Themes panel
return if name is 'disabledPackages' # Handled in the Packages panel
@div class: 'control-group', =>
@div class: 'controls', =>
schema = atom.config.getSchema("#{namespace}.#{name}")
if schema?.enum
appendOptions.call(this, namespace, name, value)
else if _.isBoolean(value) or schema?.type is 'boolean'
appendCheckbox.call(this, namespace, name, value)
else if _.isArray(value) or schema?.type is 'array'
appendArray.call(this, namespace, name, value) if isEditableArray(value)
else if _.isObject(value) or schema?.type is 'object'
appendObject.call(this, namespace, name, value)
else
appendEditor.call(this, namespace, name, value)
getSettingTitle = (keyPath, name='') ->
title = atom.config.getSchema(keyPath)?.title
title or _.uncamelcase(name).split('.').map(_.capitalize).join(' ')
getSettingDescription = (keyPath) ->
atom.config.getSchema(keyPath)?.description or ''
appendOptions = (namespace, name, value) ->
keyPath = "#{namespace}.#{name}"
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
options = atom.config.getSchema(keyPath)?.enum ? []
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@select id: keyPath, class: 'form-control', =>
for option in options
@option value: option, option
appendCheckbox = (namespace, name, value) ->
keyPath = "#{namespace}.#{name}"
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@div class: 'checkbox', =>
@label for: keyPath, =>
@input id: keyPath, type: 'checkbox'
@div class: 'setting-title', title
@div class: 'setting-description', description
appendEditor = (namespace, name, value) ->
keyPath = "#{namespace}.#{name}"
if _.isNumber(value)
type = 'number'
else
type = 'string'
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@div class: 'controls', =>
@div class: 'editor-container', =>
@subview keyPath.replace(/\./g, ''), new TextEditorView(mini: true, attributes: {id: keyPath, type: type})
appendArray = (namespace, name, value) ->
keyPath = "#{namespace}.#{name}"
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@div class: 'controls', =>
@div class: 'editor-container', =>
@subview keyPath.replace(/\./g, ''), new TextEditorView(mini: true, attributes: {id: keyPath, type: 'array'})
appendObject = (namespace, name, value) ->
for key in _.keys(value).sort()
appendSetting.call(this, namespace, "#{name}.#{key}", value[key])
| 137817 | {$, $$, View, TextEditorView} = require 'atom'
_ = require 'underscore-plus'
module.exports =
class SettingsPanel extends View
@content: ->
@div class: 'settings-panel'
initialize: (namespace, @options={}) ->
if @options.scopeName
namespace = 'editor'
scopedSettings = [
'autoIndent'
'autoIndentOnPaste'
'invisibles'
'nonWordCharacters'
'normalizeIndentOnPaste'
'preferredLineLength'
'scrollPastEnd'
'showIndentGuide'
'showInvisibles'
'softWrap'
'softWrapAtPreferredLineLength'
'tabLength'
]
settings = {}
for name in scopedSettings
settings[name] = atom.config.get([@options.scopeName], name)
else
settings = atom.config.getSettings()[namespace]
@appendSettings(namespace, settings)
@bindCheckboxFields()
@bindSelectFields()
@bindEditors()
appendSettings: (namespace, settings) ->
return if _.isEmpty(settings)
title = @options.title
includeTitle = @options.includeTitle ? true
if includeTitle
title ?= "#{_.undasherize(_.uncamelcase(namespace))} Settings"
else
title ?= "Settings"
icon = @options.icon ? 'gear'
sortedSettings = @sortSettings(namespace, settings)
@append $$ ->
@section class: 'config-section', =>
@div class: "block section-heading icon icon-#{icon}", title
@div class: 'section-body', =>
for name in sortedSettings
appendSetting.call(this, namespace, name, settings[name])
sortSettings: (namespace, settings) ->
_.chain(settings).keys().sortBy((name) -> name).sortBy((name) -> atom.config.getSchema("#{namespace}.#{name}")?.order).value()
bindCheckboxFields: ->
@find('input[id]').toArray().forEach (input) =>
input = $(input)
name = input.attr('id')
type = input.attr('type')
@observe name, (value) ->
if type is 'checkbox'
input.prop('checked', value)
else
input.val(value) if value
input.on 'change', =>
value = input.val()
if type == 'checkbox'
value = !!input.prop('checked')
else
value = @parseValue(type, value)
@set(name, value)
observe: (name, callback) ->
if @options.scopeName
@subscribe atom.config.observe([@options.scopeName], name, callback)
else
@subscribe atom.config.observe(name, callback)
isDefault: (name) ->
if @options.scopeName
atom.config.isDefault(@options.scopeName, name)
else
atom.config.isDefault(name)
getDefault: (name) ->
if @options.scopeName
atom.config.getDefault(@options.scopeName, name)
else
atom.config.getDefault(name)
set: (name, value) ->
if @options.scopeName
if value is undefined
atom.config.restoreDefault(@options.scopeName, name)
else
atom.config.set(@options.scopeName, name, value)
else
atom.config.set(name, value)
bindSelectFields: ->
@find('select[id]').toArray().forEach (select) =>
select = $(select)
name = select.attr('id')
@observe name, (value) ->
select.val(value)
select.change =>
@set(name, select.val())
bindEditors: ->
@find('.editor[id]').views().forEach (editorView) =>
name = editorView.attr('id')
type = editorView.attr('type')
if defaultValue = @valueToString(@getDefault(name))
editorView.setPlaceholderText("Default: #{defaultValue}")
@observe name, (value) =>
if @isDefault(name)
stringValue = ''
else
stringValue = @valueToString(value) ? ''
return if stringValue is editorView.getText()
return if _.isEqual(value, @parseValue(type, editorView.getText()))
editorView.setText(stringValue)
editorView.getEditor().getBuffer().on 'contents-modified', =>
@set(name, @parseValue(type, editorView.getText()))
valueToString: (value) ->
if _.isArray(value)
value.join(', ')
else
value?.toString()
parseValue: (type, value) ->
if value == ''
value = undefined
else if type == 'number'
floatValue = parseFloat(value)
value = floatValue unless isNaN(floatValue)
else if type == 'array'
arrayValue = (value or '').split(',')
value = (val.trim() for val in arrayValue when val)
value
###
# Space Pen Helpers
###
isEditableArray = (array) ->
for item in array
return false unless _.isString(item)
true
appendSetting = (namespace, name, value) ->
if namespace is 'core'
return if name is 'themes' # Handled in the Themes panel
return if name is 'disabledPackages' # Handled in the Packages panel
@div class: 'control-group', =>
@div class: 'controls', =>
schema = atom.config.getSchema("#{namespace}.#{name}")
if schema?.enum
appendOptions.call(this, namespace, name, value)
else if _.isBoolean(value) or schema?.type is 'boolean'
appendCheckbox.call(this, namespace, name, value)
else if _.isArray(value) or schema?.type is 'array'
appendArray.call(this, namespace, name, value) if isEditableArray(value)
else if _.isObject(value) or schema?.type is 'object'
appendObject.call(this, namespace, name, value)
else
appendEditor.call(this, namespace, name, value)
getSettingTitle = (keyPath, name='') ->
title = atom.config.getSchema(keyPath)?.title
title or _.uncamelcase(name).split('.').map(_.capitalize).join(' ')
getSettingDescription = (keyPath) ->
atom.config.getSchema(keyPath)?.description or ''
appendOptions = (namespace, name, value) ->
keyPath = <KEY>
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
options = atom.config.getSchema(keyPath)?.enum ? []
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@select id: keyPath, class: 'form-control', =>
for option in options
@option value: option, option
appendCheckbox = (namespace, name, value) ->
keyPath = <KEY>
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@div class: 'checkbox', =>
@label for: keyPath, =>
@input id: keyPath, type: 'checkbox'
@div class: 'setting-title', title
@div class: 'setting-description', description
appendEditor = (namespace, name, value) ->
keyPath = <KEY>
if _.isNumber(value)
type = 'number'
else
type = 'string'
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@div class: 'controls', =>
@div class: 'editor-container', =>
@subview keyPath.replace(/\./g, ''), new TextEditorView(mini: true, attributes: {id: keyPath, type: type})
appendArray = (namespace, name, value) ->
keyPath = <KEY>
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@div class: 'controls', =>
@div class: 'editor-container', =>
@subview keyPath.replace(/\./g, ''), new TextEditorView(mini: true, attributes: {id: keyPath, type: 'array'})
appendObject = (namespace, name, value) ->
for key in _.keys(value).sort()
appendSetting.call(this, namespace, "#{name}.#{key}", value[key])
| true | {$, $$, View, TextEditorView} = require 'atom'
_ = require 'underscore-plus'
module.exports =
class SettingsPanel extends View
@content: ->
@div class: 'settings-panel'
initialize: (namespace, @options={}) ->
if @options.scopeName
namespace = 'editor'
scopedSettings = [
'autoIndent'
'autoIndentOnPaste'
'invisibles'
'nonWordCharacters'
'normalizeIndentOnPaste'
'preferredLineLength'
'scrollPastEnd'
'showIndentGuide'
'showInvisibles'
'softWrap'
'softWrapAtPreferredLineLength'
'tabLength'
]
settings = {}
for name in scopedSettings
settings[name] = atom.config.get([@options.scopeName], name)
else
settings = atom.config.getSettings()[namespace]
@appendSettings(namespace, settings)
@bindCheckboxFields()
@bindSelectFields()
@bindEditors()
appendSettings: (namespace, settings) ->
return if _.isEmpty(settings)
title = @options.title
includeTitle = @options.includeTitle ? true
if includeTitle
title ?= "#{_.undasherize(_.uncamelcase(namespace))} Settings"
else
title ?= "Settings"
icon = @options.icon ? 'gear'
sortedSettings = @sortSettings(namespace, settings)
@append $$ ->
@section class: 'config-section', =>
@div class: "block section-heading icon icon-#{icon}", title
@div class: 'section-body', =>
for name in sortedSettings
appendSetting.call(this, namespace, name, settings[name])
sortSettings: (namespace, settings) ->
_.chain(settings).keys().sortBy((name) -> name).sortBy((name) -> atom.config.getSchema("#{namespace}.#{name}")?.order).value()
bindCheckboxFields: ->
@find('input[id]').toArray().forEach (input) =>
input = $(input)
name = input.attr('id')
type = input.attr('type')
@observe name, (value) ->
if type is 'checkbox'
input.prop('checked', value)
else
input.val(value) if value
input.on 'change', =>
value = input.val()
if type == 'checkbox'
value = !!input.prop('checked')
else
value = @parseValue(type, value)
@set(name, value)
observe: (name, callback) ->
if @options.scopeName
@subscribe atom.config.observe([@options.scopeName], name, callback)
else
@subscribe atom.config.observe(name, callback)
isDefault: (name) ->
if @options.scopeName
atom.config.isDefault(@options.scopeName, name)
else
atom.config.isDefault(name)
getDefault: (name) ->
if @options.scopeName
atom.config.getDefault(@options.scopeName, name)
else
atom.config.getDefault(name)
set: (name, value) ->
if @options.scopeName
if value is undefined
atom.config.restoreDefault(@options.scopeName, name)
else
atom.config.set(@options.scopeName, name, value)
else
atom.config.set(name, value)
bindSelectFields: ->
@find('select[id]').toArray().forEach (select) =>
select = $(select)
name = select.attr('id')
@observe name, (value) ->
select.val(value)
select.change =>
@set(name, select.val())
bindEditors: ->
@find('.editor[id]').views().forEach (editorView) =>
name = editorView.attr('id')
type = editorView.attr('type')
if defaultValue = @valueToString(@getDefault(name))
editorView.setPlaceholderText("Default: #{defaultValue}")
@observe name, (value) =>
if @isDefault(name)
stringValue = ''
else
stringValue = @valueToString(value) ? ''
return if stringValue is editorView.getText()
return if _.isEqual(value, @parseValue(type, editorView.getText()))
editorView.setText(stringValue)
editorView.getEditor().getBuffer().on 'contents-modified', =>
@set(name, @parseValue(type, editorView.getText()))
valueToString: (value) ->
if _.isArray(value)
value.join(', ')
else
value?.toString()
parseValue: (type, value) ->
if value == ''
value = undefined
else if type == 'number'
floatValue = parseFloat(value)
value = floatValue unless isNaN(floatValue)
else if type == 'array'
arrayValue = (value or '').split(',')
value = (val.trim() for val in arrayValue when val)
value
###
# Space Pen Helpers
###
isEditableArray = (array) ->
for item in array
return false unless _.isString(item)
true
appendSetting = (namespace, name, value) ->
if namespace is 'core'
return if name is 'themes' # Handled in the Themes panel
return if name is 'disabledPackages' # Handled in the Packages panel
@div class: 'control-group', =>
@div class: 'controls', =>
schema = atom.config.getSchema("#{namespace}.#{name}")
if schema?.enum
appendOptions.call(this, namespace, name, value)
else if _.isBoolean(value) or schema?.type is 'boolean'
appendCheckbox.call(this, namespace, name, value)
else if _.isArray(value) or schema?.type is 'array'
appendArray.call(this, namespace, name, value) if isEditableArray(value)
else if _.isObject(value) or schema?.type is 'object'
appendObject.call(this, namespace, name, value)
else
appendEditor.call(this, namespace, name, value)
getSettingTitle = (keyPath, name='') ->
title = atom.config.getSchema(keyPath)?.title
title or _.uncamelcase(name).split('.').map(_.capitalize).join(' ')
getSettingDescription = (keyPath) ->
atom.config.getSchema(keyPath)?.description or ''
appendOptions = (namespace, name, value) ->
keyPath = PI:KEY:<KEY>END_PI
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
options = atom.config.getSchema(keyPath)?.enum ? []
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@select id: keyPath, class: 'form-control', =>
for option in options
@option value: option, option
appendCheckbox = (namespace, name, value) ->
keyPath = PI:KEY:<KEY>END_PI
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@div class: 'checkbox', =>
@label for: keyPath, =>
@input id: keyPath, type: 'checkbox'
@div class: 'setting-title', title
@div class: 'setting-description', description
appendEditor = (namespace, name, value) ->
keyPath = PI:KEY:<KEY>END_PI
if _.isNumber(value)
type = 'number'
else
type = 'string'
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@div class: 'controls', =>
@div class: 'editor-container', =>
@subview keyPath.replace(/\./g, ''), new TextEditorView(mini: true, attributes: {id: keyPath, type: type})
appendArray = (namespace, name, value) ->
keyPath = PI:KEY:<KEY>END_PI
title = getSettingTitle(keyPath, name)
description = getSettingDescription(keyPath)
@label class: 'control-label', =>
@div class: 'setting-title', title
@div class: 'setting-description', description
@div class: 'controls', =>
@div class: 'editor-container', =>
@subview keyPath.replace(/\./g, ''), new TextEditorView(mini: true, attributes: {id: keyPath, type: 'array'})
appendObject = (namespace, name, value) ->
for key in _.keys(value).sort()
appendSetting.call(this, namespace, "#{name}.#{key}", value[key])
|
[
{
"context": "tionName}/distinct\"\n json:\n keyName: 'propTwo'\n (err, res, body) ->\n res.statusCode",
"end": 2130,
"score": 0.991424560546875,
"start": 2123,
"tag": "KEY",
"value": "propTwo"
},
{
"context": " query:\n propOne: 2\n keyName... | test/lib/collection-access-commands/distinct.test.coffee | enterstudio/business-logic-mock-proxy | 2 | #
# Copyright 2016 Kinvey, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
config = require 'config'
should = require 'should'
request = require 'request'
BSON = require('bson').BSONPure.BSON
testUtils = require '../../testUtils'
req = request.defaults {}
baseUrl = "http://#{config.server.address}:#{config.server.port}"
collectionName = "testCollection"
describe 'collectionAccess / distinct', () ->
before (done) ->
testUtils.startServer baseUrl, (forkedProcess) ->
done()
after (done) ->
testUtils.stopServer ->
done()
beforeEach (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/insert"
json:
entity: [{ propOne: 1, propTwo: 2, unique: 0 }
{ propOne: 1, propTwo: 2, unique: 1 }
{ propOne: 2, propTwo: 1, unique: 2 }
{ propOne: 2, propTwo: 1, unique: 3 }
{ propOne: 2, propTwo: 3, unique: 4 }]
(err, res, body) ->
done()
afterEach (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/remove"
json:
query: {}
(err, res, body) ->
done err
it 'fails when no keyName included in the request', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json: {}
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'MissingRequiredParameter'
done()
it 'fails when query is not specified', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
keyName: 'propTwo'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'MissingRequiredParameter'
done()
it 'correctly performs a distinct when query is specified', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
propOne: 2
keyName: 'propTwo'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 2
body[0].should.eql 1
body[1].should.eql 3
done()
it 'correctly performs a distinct by Mongo ObjectID', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/findOne"
json:
query:
unique: 0
(err, res, body) ->
return done err if err
objectId = body._id.toString()
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
_id: objectId
keyName: 'unique'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 1
body[0].should.eql 0
done()
it 'correctly performs a distinct by an array of Mongo ObjectIDs', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/find"
json:
query:
$or: [{ unique: 0 }, { unique: 1 }]
(err, res, body) ->
return done err if err
object1Id = body[0]._id.toString()
object2Id = body[1]._id.toString()
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
_id:
$in: [object1Id, object2Id]
keyName: 'unique'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 2
body[0].should.eql 0
body[1].should.eql 1
done()
describe 'edge cases', () ->
it "returns empty array when the collection doesn't exist", (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query: {}
keyName: 'propTwo'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 0
done()
describe 'restrictions', () ->
it 'fails when the query includes the $where operator', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query:
$where:
propOne: 2
keyName: 'propTwo'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'DisallowedQuerySyntax'
done()
it 'fails when the query includes the $query operator', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query:
$query:
propOne: 2
keyName: 'propTwo'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'DisallowedQuerySyntax'
done()
| 40558 | #
# Copyright 2016 Kinvey, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
config = require 'config'
should = require 'should'
request = require 'request'
BSON = require('bson').BSONPure.BSON
testUtils = require '../../testUtils'
req = request.defaults {}
baseUrl = "http://#{config.server.address}:#{config.server.port}"
collectionName = "testCollection"
describe 'collectionAccess / distinct', () ->
before (done) ->
testUtils.startServer baseUrl, (forkedProcess) ->
done()
after (done) ->
testUtils.stopServer ->
done()
beforeEach (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/insert"
json:
entity: [{ propOne: 1, propTwo: 2, unique: 0 }
{ propOne: 1, propTwo: 2, unique: 1 }
{ propOne: 2, propTwo: 1, unique: 2 }
{ propOne: 2, propTwo: 1, unique: 3 }
{ propOne: 2, propTwo: 3, unique: 4 }]
(err, res, body) ->
done()
afterEach (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/remove"
json:
query: {}
(err, res, body) ->
done err
it 'fails when no keyName included in the request', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json: {}
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'MissingRequiredParameter'
done()
it 'fails when query is not specified', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
keyName: '<KEY>'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'MissingRequiredParameter'
done()
it 'correctly performs a distinct when query is specified', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
propOne: 2
keyName: '<KEY>'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 2
body[0].should.eql 1
body[1].should.eql 3
done()
it 'correctly performs a distinct by Mongo ObjectID', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/findOne"
json:
query:
unique: 0
(err, res, body) ->
return done err if err
objectId = body._id.toString()
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
_id: objectId
keyName: '<KEY>'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 1
body[0].should.eql 0
done()
it 'correctly performs a distinct by an array of Mongo ObjectIDs', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/find"
json:
query:
$or: [{ unique: 0 }, { unique: 1 }]
(err, res, body) ->
return done err if err
object1Id = body[0]._id.toString()
object2Id = body[1]._id.toString()
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
_id:
$in: [object1Id, object2Id]
keyName: 'unique'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 2
body[0].should.eql 0
body[1].should.eql 1
done()
describe 'edge cases', () ->
it "returns empty array when the collection doesn't exist", (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query: {}
keyName: 'propTwo'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 0
done()
describe 'restrictions', () ->
it 'fails when the query includes the $where operator', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query:
$where:
propOne: 2
keyName: 'propTwo'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'DisallowedQuerySyntax'
done()
it 'fails when the query includes the $query operator', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query:
$query:
propOne: 2
keyName: 'propTwo'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'DisallowedQuerySyntax'
done()
| true | #
# Copyright 2016 Kinvey, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
config = require 'config'
should = require 'should'
request = require 'request'
BSON = require('bson').BSONPure.BSON
testUtils = require '../../testUtils'
req = request.defaults {}
baseUrl = "http://#{config.server.address}:#{config.server.port}"
collectionName = "testCollection"
describe 'collectionAccess / distinct', () ->
before (done) ->
testUtils.startServer baseUrl, (forkedProcess) ->
done()
after (done) ->
testUtils.stopServer ->
done()
beforeEach (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/insert"
json:
entity: [{ propOne: 1, propTwo: 2, unique: 0 }
{ propOne: 1, propTwo: 2, unique: 1 }
{ propOne: 2, propTwo: 1, unique: 2 }
{ propOne: 2, propTwo: 1, unique: 3 }
{ propOne: 2, propTwo: 3, unique: 4 }]
(err, res, body) ->
done()
afterEach (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/remove"
json:
query: {}
(err, res, body) ->
done err
it 'fails when no keyName included in the request', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json: {}
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'MissingRequiredParameter'
done()
it 'fails when query is not specified', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
keyName: 'PI:KEY:<KEY>END_PI'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'MissingRequiredParameter'
done()
it 'correctly performs a distinct when query is specified', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
propOne: 2
keyName: 'PI:KEY:<KEY>END_PI'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 2
body[0].should.eql 1
body[1].should.eql 3
done()
it 'correctly performs a distinct by Mongo ObjectID', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/findOne"
json:
query:
unique: 0
(err, res, body) ->
return done err if err
objectId = body._id.toString()
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
_id: objectId
keyName: 'PI:KEY:<KEY>END_PI'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 1
body[0].should.eql 0
done()
it 'correctly performs a distinct by an array of Mongo ObjectIDs', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/find"
json:
query:
$or: [{ unique: 0 }, { unique: 1 }]
(err, res, body) ->
return done err if err
object1Id = body[0]._id.toString()
object2Id = body[1]._id.toString()
req.post
url: "#{baseUrl}/collectionAccess/#{collectionName}/distinct"
json:
query:
_id:
$in: [object1Id, object2Id]
keyName: 'unique'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 2
body[0].should.eql 0
body[1].should.eql 1
done()
describe 'edge cases', () ->
it "returns empty array when the collection doesn't exist", (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query: {}
keyName: 'propTwo'
(err, res, body) ->
return done err if err
res.statusCode.should.eql 200
Array.isArray(body).should.be.true
body.length.should.eql 0
done()
describe 'restrictions', () ->
it 'fails when the query includes the $where operator', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query:
$where:
propOne: 2
keyName: 'propTwo'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'DisallowedQuerySyntax'
done()
it 'fails when the query includes the $query operator', (done) ->
req.post
url: "#{baseUrl}/collectionAccess/fakeCollectionName/distinct"
json:
query:
$query:
propOne: 2
keyName: 'propTwo'
(err, res, body) ->
res.statusCode.should.eql 400
body.code.should.eql 'DisallowedQuerySyntax'
done()
|
[
{
"context": " : msg.description\n key : \"#{msg.code}/#{msg.key}\"\n\n #----------\n\n # Log the PagerDuty",
"end": 9425,
"score": 0.9522616267204285,
"start": 9401,
"tag": "KEY",
"value": "\"#{msg.code}/#{msg.key}\""
}
] | src/streammachine/alerts.coffee | firebrandv2/FirebrandNetwork.ga | 342 | nconf = require "nconf"
_ = require "underscore"
nodemailer = require "nodemailer"
pagerduty = require "pagerduty"
ALERT_TYPES =
sourceless:
description: "A monitored stream has lost its only source connection."
wait_for: 30
slave_disconnected:
description: "A slave server has lost its connection to the master server."
wait_for: 30
slave_unresponsive:
description: "A slave server has stopped responding to our status queries."
wait_for: 30
slave_unsynced:
description: "A slave server is out of sync with master."
wait_for: 30
# Alerts module is responsible for understanding how long we should wait
# before saying something about alert conditions. Code calls the alert
# class with a code, a key and a state.
module.exports = class Alerts extends require("events").EventEmitter
constructor: (@opts) ->
@logger = @opts.logger
@email = new Alerts.Email @, nconf.get("alerts:email") if nconf.get("alerts:email")
@pagerduty = new Alerts.PagerDuty @, nconf.get("alerts:pagerduty") if nconf.get("alerts:pagerduty")
@_states = {}
#----------
update: (code,key,active) ->
# make sure we know what this is...
if !ALERT_TYPES[code]
console.log "Unknown alert type sent: #{code} / #{key}"
return false
if !@_states[ code ]
@_states[ code ] = {}
# are we setting or unsetting?
if active
if s = @_states[ code ][ key ]
# update our timestamp
s.last_seen_at = new Date
# make sure there isn't an all-clear waiting to fire
clearTimeout s.c_timeout if s.c_timeout
delete s.c_timeout
else
# setting for the first time...
s = @_states[ code ][ key ] =
code: code
key: key
triggered_at: new Date
last_seen_at: new Date
alert_sent: false
a_timeout: null
c_timeout: null
# -- should we set a timeout for triggering an alarm? -- #
if !s.alert_sent && !s.a_timeout
s.a_timeout = setTimeout =>
@_fireAlert(s)
, ALERT_TYPES[ code ].wait_for * 1000
else
# clear an alert state if it is set
if s = @_states[ code ][ key ]
# -- is there an alert timeout set? -- #
clearTimeout s.a_timeout if s.a_timeout
delete s.a_timeout
if s.alert_sent && !s.c_timeout
# we had sent an alert, so send a note that the alert has cleared
s.c_timeout = setTimeout =>
@_fireAllClear(s)
, ALERT_TYPES[ code ].wait_for * 1000
else
# no harm, no foul
else
# they've always been good...
#----------
_fireAlert: (obj) ->
alert =
code: obj.code
key: obj.key
triggered_at: obj.triggered_at
description: ALERT_TYPES[ obj.code ].description
@logger.alert "Alert: #{obj.key} : #{ alert.description }", alert
@emit "alert", alert
# mark our alert as sent
obj.alert_sent = true
#----------
_fireAllClear: (obj) ->
alert =
code: obj.code
key: obj.key
triggered_at: obj.triggered_at
last_seen_at: obj.last_seen_at
description: ALERT_TYPES[ obj.code ].description
@logger.alert "Alert Cleared: #{obj.key} : #{ alert.description }", alert
@emit "alert_cleared", alert
# we need to delete the alert now that it has been cleared. If the
# condition returns, it will be as a new event
delete @_states[ obj.code ][ obj.key ]
#----------
class @Email
constructor: (@alerts,@opts) ->
# -- set up the transport -- #
@transport = nodemailer.createTransport(@opts.mailer_type,@opts.mailer_options)
# -- register our listener -- #
@alerts.on "alert", (msg) => @_sendAlert(msg)
@alerts.on "alert_cleared", (msg) => @_sendAllClear(msg)
#----------
_sendAlert: (msg) ->
email = _.extend {}, @opts.email_options,
subject: "[StreamMachine/#{msg.key}] #{msg.code} Alert"
generateTextFromHTML: true
html: """
<p>StreamMachine has detected an alert condition of <b>#{msg.code}</b> for <b>#{msg.key}</b>.</p>
<p>#{msg.description}</p>
<p>Condition was first detected at <b>#{msg.triggered_at}</b>.</p>
"""
@transport.sendMail email, (err,resp) =>
if err
@alerts.logger.error "Error sending alert email: #{err}", error:err
return false
@alerts.logger.debug "Alert email sent to #{email.to}.", code:msg.code, key:msg.key
#----------
_sendAllClear: (msg) ->
email = _.extend {}, @opts.email_options,
subject: "[StreamMachine/#{msg.key}] #{msg.code} Cleared"
generateTextFromHTML: true
html: """
<p>StreamMachine has cleared an alert condition of <b>#{msg.code}</b> for <b>#{msg.key}</b>.</p>
<p>#{msg.description}</p>
<p>Condition was first detected at <b>#{msg.triggered_at}</b>.</p>
<p>Condition was last seen at <b>#{msg.last_seen_at}</b>.</p>
"""
@transport.sendMail email, (err,resp) =>
if err
@alerts.logger.error "Error sending all clear email: #{err}", error:err
return false
@alerts.logger.debug "All clear email sent to #{email.to}.", code:msg.code, key:msg.key
#----------
class @PagerDuty
constructor: (@alerts, @opts) ->
@pager = new pagerduty serviceKey:@opts.serviceKey
@incidentKeys = {}
@alerts.on "alert", (msg) => @_sendAlert(msg)
@alerts.on "alert_cleared", (msg) => @_sendAllClear(msg)
#----------
# Create the initial alert in PagerDuty.
# In the callback, if the response contained an incident key,
# then we'll hold on to that so we can later resolve the alert
# using the same key.
_sendAlert: (msg) ->
details = @_details(msg)
@alerts.logger.debug "Sending alert to PagerDuty.", details:details
@pager.create
description : "[StreamMachine/#{msg.key}] #{msg.code} Alert"
details : details
callback: (error, response) =>
if response.incident_key
@incidentKeys[details.key] = response.incident_key
else
@alerts.logger.error "PagerDuty response did not include an incident key.", response:response, error:error
@_logResponse error, response,
"Alert sent to PagerDuty.", msg
#----------
# Mark the alert as "Resolved" in PagerDuty
# In the callback, whether it was an error or success, we will
# delete the incident key from the stored keys.
_sendAllClear: (msg) ->
details = @_details(msg)
@alerts.logger.debug "Sending allClear to PagerDuty.", details:details
if @incidentKeys[details.key]
@pager.resolve
incidentKey : @incidentKeys[details.key],
description : "[StreamMachine/#{msg.key}] #{msg.code} Cleared"
details : details
callback: (error, response) =>
delete @incidentKeys[details.key]
@_logResponse error, response,
"Alert marked as Resolved in PagerDuty.", msg
else
@alerts.logger.error "Could not send allClear to PagerDuty. No incident key in system.", keys:@incidentKeys
#----------
# Details to send to PagerDuty. The properties are arbitrary
# * via - Just so we know.
# * code - The alert code ("sourceless", "disconnected").
# * msg - The alert description.
# * key - A key to identify this alert. This is to help us find the
# correct incidentKey when resolving an alert. It's possible
# (but unlikely) that two alerts with the same key could
# exist at the same time, which would result in the first
# alert never being marked as "resolved" in PagerDuty.
_details: (msg) ->
via : "StreamMachine Alerts"
code : msg.code
description : msg.description
key : "#{msg.code}/#{msg.key}"
#----------
# Log the PagerDuty response, whether it was a success or an error.
_logResponse: (error, response, logText, msg) ->
if error
@alerts.logger.error "Error sending alert to PagerDuty: #{error}", error:error
else
@alerts.logger.debug logText, code:msg.code, key:msg.key | 155083 | nconf = require "nconf"
_ = require "underscore"
nodemailer = require "nodemailer"
pagerduty = require "pagerduty"
ALERT_TYPES =
sourceless:
description: "A monitored stream has lost its only source connection."
wait_for: 30
slave_disconnected:
description: "A slave server has lost its connection to the master server."
wait_for: 30
slave_unresponsive:
description: "A slave server has stopped responding to our status queries."
wait_for: 30
slave_unsynced:
description: "A slave server is out of sync with master."
wait_for: 30
# Alerts module is responsible for understanding how long we should wait
# before saying something about alert conditions. Code calls the alert
# class with a code, a key and a state.
module.exports = class Alerts extends require("events").EventEmitter
constructor: (@opts) ->
@logger = @opts.logger
@email = new Alerts.Email @, nconf.get("alerts:email") if nconf.get("alerts:email")
@pagerduty = new Alerts.PagerDuty @, nconf.get("alerts:pagerduty") if nconf.get("alerts:pagerduty")
@_states = {}
#----------
update: (code,key,active) ->
# make sure we know what this is...
if !ALERT_TYPES[code]
console.log "Unknown alert type sent: #{code} / #{key}"
return false
if !@_states[ code ]
@_states[ code ] = {}
# are we setting or unsetting?
if active
if s = @_states[ code ][ key ]
# update our timestamp
s.last_seen_at = new Date
# make sure there isn't an all-clear waiting to fire
clearTimeout s.c_timeout if s.c_timeout
delete s.c_timeout
else
# setting for the first time...
s = @_states[ code ][ key ] =
code: code
key: key
triggered_at: new Date
last_seen_at: new Date
alert_sent: false
a_timeout: null
c_timeout: null
# -- should we set a timeout for triggering an alarm? -- #
if !s.alert_sent && !s.a_timeout
s.a_timeout = setTimeout =>
@_fireAlert(s)
, ALERT_TYPES[ code ].wait_for * 1000
else
# clear an alert state if it is set
if s = @_states[ code ][ key ]
# -- is there an alert timeout set? -- #
clearTimeout s.a_timeout if s.a_timeout
delete s.a_timeout
if s.alert_sent && !s.c_timeout
# we had sent an alert, so send a note that the alert has cleared
s.c_timeout = setTimeout =>
@_fireAllClear(s)
, ALERT_TYPES[ code ].wait_for * 1000
else
# no harm, no foul
else
# they've always been good...
#----------
_fireAlert: (obj) ->
alert =
code: obj.code
key: obj.key
triggered_at: obj.triggered_at
description: ALERT_TYPES[ obj.code ].description
@logger.alert "Alert: #{obj.key} : #{ alert.description }", alert
@emit "alert", alert
# mark our alert as sent
obj.alert_sent = true
#----------
_fireAllClear: (obj) ->
alert =
code: obj.code
key: obj.key
triggered_at: obj.triggered_at
last_seen_at: obj.last_seen_at
description: ALERT_TYPES[ obj.code ].description
@logger.alert "Alert Cleared: #{obj.key} : #{ alert.description }", alert
@emit "alert_cleared", alert
# we need to delete the alert now that it has been cleared. If the
# condition returns, it will be as a new event
delete @_states[ obj.code ][ obj.key ]
#----------
class @Email
constructor: (@alerts,@opts) ->
# -- set up the transport -- #
@transport = nodemailer.createTransport(@opts.mailer_type,@opts.mailer_options)
# -- register our listener -- #
@alerts.on "alert", (msg) => @_sendAlert(msg)
@alerts.on "alert_cleared", (msg) => @_sendAllClear(msg)
#----------
_sendAlert: (msg) ->
email = _.extend {}, @opts.email_options,
subject: "[StreamMachine/#{msg.key}] #{msg.code} Alert"
generateTextFromHTML: true
html: """
<p>StreamMachine has detected an alert condition of <b>#{msg.code}</b> for <b>#{msg.key}</b>.</p>
<p>#{msg.description}</p>
<p>Condition was first detected at <b>#{msg.triggered_at}</b>.</p>
"""
@transport.sendMail email, (err,resp) =>
if err
@alerts.logger.error "Error sending alert email: #{err}", error:err
return false
@alerts.logger.debug "Alert email sent to #{email.to}.", code:msg.code, key:msg.key
#----------
_sendAllClear: (msg) ->
email = _.extend {}, @opts.email_options,
subject: "[StreamMachine/#{msg.key}] #{msg.code} Cleared"
generateTextFromHTML: true
html: """
<p>StreamMachine has cleared an alert condition of <b>#{msg.code}</b> for <b>#{msg.key}</b>.</p>
<p>#{msg.description}</p>
<p>Condition was first detected at <b>#{msg.triggered_at}</b>.</p>
<p>Condition was last seen at <b>#{msg.last_seen_at}</b>.</p>
"""
@transport.sendMail email, (err,resp) =>
if err
@alerts.logger.error "Error sending all clear email: #{err}", error:err
return false
@alerts.logger.debug "All clear email sent to #{email.to}.", code:msg.code, key:msg.key
#----------
class @PagerDuty
constructor: (@alerts, @opts) ->
@pager = new pagerduty serviceKey:@opts.serviceKey
@incidentKeys = {}
@alerts.on "alert", (msg) => @_sendAlert(msg)
@alerts.on "alert_cleared", (msg) => @_sendAllClear(msg)
#----------
# Create the initial alert in PagerDuty.
# In the callback, if the response contained an incident key,
# then we'll hold on to that so we can later resolve the alert
# using the same key.
_sendAlert: (msg) ->
details = @_details(msg)
@alerts.logger.debug "Sending alert to PagerDuty.", details:details
@pager.create
description : "[StreamMachine/#{msg.key}] #{msg.code} Alert"
details : details
callback: (error, response) =>
if response.incident_key
@incidentKeys[details.key] = response.incident_key
else
@alerts.logger.error "PagerDuty response did not include an incident key.", response:response, error:error
@_logResponse error, response,
"Alert sent to PagerDuty.", msg
#----------
# Mark the alert as "Resolved" in PagerDuty
# In the callback, whether it was an error or success, we will
# delete the incident key from the stored keys.
_sendAllClear: (msg) ->
details = @_details(msg)
@alerts.logger.debug "Sending allClear to PagerDuty.", details:details
if @incidentKeys[details.key]
@pager.resolve
incidentKey : @incidentKeys[details.key],
description : "[StreamMachine/#{msg.key}] #{msg.code} Cleared"
details : details
callback: (error, response) =>
delete @incidentKeys[details.key]
@_logResponse error, response,
"Alert marked as Resolved in PagerDuty.", msg
else
@alerts.logger.error "Could not send allClear to PagerDuty. No incident key in system.", keys:@incidentKeys
#----------
# Details to send to PagerDuty. The properties are arbitrary
# * via - Just so we know.
# * code - The alert code ("sourceless", "disconnected").
# * msg - The alert description.
# * key - A key to identify this alert. This is to help us find the
# correct incidentKey when resolving an alert. It's possible
# (but unlikely) that two alerts with the same key could
# exist at the same time, which would result in the first
# alert never being marked as "resolved" in PagerDuty.
_details: (msg) ->
via : "StreamMachine Alerts"
code : msg.code
description : msg.description
key : <KEY>
#----------
# Log the PagerDuty response, whether it was a success or an error.
_logResponse: (error, response, logText, msg) ->
if error
@alerts.logger.error "Error sending alert to PagerDuty: #{error}", error:error
else
@alerts.logger.debug logText, code:msg.code, key:msg.key | true | nconf = require "nconf"
_ = require "underscore"
nodemailer = require "nodemailer"
pagerduty = require "pagerduty"
ALERT_TYPES =
sourceless:
description: "A monitored stream has lost its only source connection."
wait_for: 30
slave_disconnected:
description: "A slave server has lost its connection to the master server."
wait_for: 30
slave_unresponsive:
description: "A slave server has stopped responding to our status queries."
wait_for: 30
slave_unsynced:
description: "A slave server is out of sync with master."
wait_for: 30
# Alerts module is responsible for understanding how long we should wait
# before saying something about alert conditions. Code calls the alert
# class with a code, a key and a state.
module.exports = class Alerts extends require("events").EventEmitter
constructor: (@opts) ->
@logger = @opts.logger
@email = new Alerts.Email @, nconf.get("alerts:email") if nconf.get("alerts:email")
@pagerduty = new Alerts.PagerDuty @, nconf.get("alerts:pagerduty") if nconf.get("alerts:pagerduty")
@_states = {}
#----------
update: (code,key,active) ->
# make sure we know what this is...
if !ALERT_TYPES[code]
console.log "Unknown alert type sent: #{code} / #{key}"
return false
if !@_states[ code ]
@_states[ code ] = {}
# are we setting or unsetting?
if active
if s = @_states[ code ][ key ]
# update our timestamp
s.last_seen_at = new Date
# make sure there isn't an all-clear waiting to fire
clearTimeout s.c_timeout if s.c_timeout
delete s.c_timeout
else
# setting for the first time...
s = @_states[ code ][ key ] =
code: code
key: key
triggered_at: new Date
last_seen_at: new Date
alert_sent: false
a_timeout: null
c_timeout: null
# -- should we set a timeout for triggering an alarm? -- #
if !s.alert_sent && !s.a_timeout
s.a_timeout = setTimeout =>
@_fireAlert(s)
, ALERT_TYPES[ code ].wait_for * 1000
else
# clear an alert state if it is set
if s = @_states[ code ][ key ]
# -- is there an alert timeout set? -- #
clearTimeout s.a_timeout if s.a_timeout
delete s.a_timeout
if s.alert_sent && !s.c_timeout
# we had sent an alert, so send a note that the alert has cleared
s.c_timeout = setTimeout =>
@_fireAllClear(s)
, ALERT_TYPES[ code ].wait_for * 1000
else
# no harm, no foul
else
# they've always been good...
#----------
_fireAlert: (obj) ->
alert =
code: obj.code
key: obj.key
triggered_at: obj.triggered_at
description: ALERT_TYPES[ obj.code ].description
@logger.alert "Alert: #{obj.key} : #{ alert.description }", alert
@emit "alert", alert
# mark our alert as sent
obj.alert_sent = true
#----------
_fireAllClear: (obj) ->
alert =
code: obj.code
key: obj.key
triggered_at: obj.triggered_at
last_seen_at: obj.last_seen_at
description: ALERT_TYPES[ obj.code ].description
@logger.alert "Alert Cleared: #{obj.key} : #{ alert.description }", alert
@emit "alert_cleared", alert
# we need to delete the alert now that it has been cleared. If the
# condition returns, it will be as a new event
delete @_states[ obj.code ][ obj.key ]
#----------
class @Email
constructor: (@alerts,@opts) ->
# -- set up the transport -- #
@transport = nodemailer.createTransport(@opts.mailer_type,@opts.mailer_options)
# -- register our listener -- #
@alerts.on "alert", (msg) => @_sendAlert(msg)
@alerts.on "alert_cleared", (msg) => @_sendAllClear(msg)
#----------
_sendAlert: (msg) ->
email = _.extend {}, @opts.email_options,
subject: "[StreamMachine/#{msg.key}] #{msg.code} Alert"
generateTextFromHTML: true
html: """
<p>StreamMachine has detected an alert condition of <b>#{msg.code}</b> for <b>#{msg.key}</b>.</p>
<p>#{msg.description}</p>
<p>Condition was first detected at <b>#{msg.triggered_at}</b>.</p>
"""
@transport.sendMail email, (err,resp) =>
if err
@alerts.logger.error "Error sending alert email: #{err}", error:err
return false
@alerts.logger.debug "Alert email sent to #{email.to}.", code:msg.code, key:msg.key
#----------
_sendAllClear: (msg) ->
email = _.extend {}, @opts.email_options,
subject: "[StreamMachine/#{msg.key}] #{msg.code} Cleared"
generateTextFromHTML: true
html: """
<p>StreamMachine has cleared an alert condition of <b>#{msg.code}</b> for <b>#{msg.key}</b>.</p>
<p>#{msg.description}</p>
<p>Condition was first detected at <b>#{msg.triggered_at}</b>.</p>
<p>Condition was last seen at <b>#{msg.last_seen_at}</b>.</p>
"""
@transport.sendMail email, (err,resp) =>
if err
@alerts.logger.error "Error sending all clear email: #{err}", error:err
return false
@alerts.logger.debug "All clear email sent to #{email.to}.", code:msg.code, key:msg.key
#----------
class @PagerDuty
constructor: (@alerts, @opts) ->
@pager = new pagerduty serviceKey:@opts.serviceKey
@incidentKeys = {}
@alerts.on "alert", (msg) => @_sendAlert(msg)
@alerts.on "alert_cleared", (msg) => @_sendAllClear(msg)
#----------
# Create the initial alert in PagerDuty.
# In the callback, if the response contained an incident key,
# then we'll hold on to that so we can later resolve the alert
# using the same key.
_sendAlert: (msg) ->
details = @_details(msg)
@alerts.logger.debug "Sending alert to PagerDuty.", details:details
@pager.create
description : "[StreamMachine/#{msg.key}] #{msg.code} Alert"
details : details
callback: (error, response) =>
if response.incident_key
@incidentKeys[details.key] = response.incident_key
else
@alerts.logger.error "PagerDuty response did not include an incident key.", response:response, error:error
@_logResponse error, response,
"Alert sent to PagerDuty.", msg
#----------
# Mark the alert as "Resolved" in PagerDuty
# In the callback, whether it was an error or success, we will
# delete the incident key from the stored keys.
_sendAllClear: (msg) ->
details = @_details(msg)
@alerts.logger.debug "Sending allClear to PagerDuty.", details:details
if @incidentKeys[details.key]
@pager.resolve
incidentKey : @incidentKeys[details.key],
description : "[StreamMachine/#{msg.key}] #{msg.code} Cleared"
details : details
callback: (error, response) =>
delete @incidentKeys[details.key]
@_logResponse error, response,
"Alert marked as Resolved in PagerDuty.", msg
else
@alerts.logger.error "Could not send allClear to PagerDuty. No incident key in system.", keys:@incidentKeys
#----------
# Details to send to PagerDuty. The properties are arbitrary
# * via - Just so we know.
# * code - The alert code ("sourceless", "disconnected").
# * msg - The alert description.
# * key - A key to identify this alert. This is to help us find the
# correct incidentKey when resolving an alert. It's possible
# (but unlikely) that two alerts with the same key could
# exist at the same time, which would result in the first
# alert never being marked as "resolved" in PagerDuty.
_details: (msg) ->
via : "StreamMachine Alerts"
code : msg.code
description : msg.description
key : PI:KEY:<KEY>END_PI
#----------
# Log the PagerDuty response, whether it was a success or an error.
_logResponse: (error, response, logText, msg) ->
if error
@alerts.logger.error "Error sending alert to PagerDuty: #{error}", error:error
else
@alerts.logger.debug logText, code:msg.code, key:msg.key |
[
{
"context": ": 'Imperium Galaktyczne'\n '#rebelTab' : 'Sojusz Rebeliancki'\n '#scumTab' : 'Szumowiny i Ni",
"end": 5359,
"score": 0.6836172938346863,
"start": 5355,
"tag": "NAME",
"value": "jusz"
},
{
"context": "rium Galaktyczne'\n '#rebelTab' : 'Sojusz Rebelia... | coffeescripts/cards-pl.coffee | michigun/xwing | 0 | exportObj = exports ? this
exportObj.codeToLanguage ?= {}
exportObj.codeToLanguage.pl = 'Polski'
exportObj.translations ?= {}
# This is here mostly as a template for other languages.
exportObj.translations['Polski'] =
action :
"Barrel Roll": "Beczka"
"Boost": "Dopalacz"
"Evade": "Unik"
"Focus": "Skupienie"
"Target Lock": "Namierzenie celu"
"Recover": "Naprawa"
"Reinforce": "Umocnienie"
"Jam": "Zakłócanie"
"Coordinate": "Koordynacja"
"Cloak": "Maskowanie"
slot:
"Astromech": "Astromech"
"Bomb": "Bomba"
"Cannon": "Działo"
"Crew": "Załoga"
"Elite": "Talent elitarny"
"Missile": "Rakiety"
"System": "System"
"Torpedo": "Torpedy"
"Turret": "Wieżyczka"
"Cargo": "Ładunek"
"Hardpoint": "Punkt konstrukcyjny"
"Team": "Drużyna"
"Illicit": "Kontrabanda"
"Salvaged Astromech": "Złomowane astromechy"
sources: # needed?
"Core": "Zestaw Podstawowy"
"A-Wing Expansion Pack": "Zestaw dodatkowy A-Wing"
"B-Wing Expansion Pack": "Zestaw dodatkowy B-Wing"
"X-Wing Expansion Pack": "Zestaw dodatkowy X-Wing"
"Y-Wing Expansion Pack": "Zestaw dodatkowy Y-Wing"
"Millennium Falcon Expansion Pack": "Zestaw dodatkowy Sokół Millennium"
"HWK-290 Expansion Pack": "Zestaw dodatkowy HWK-290"
"TIE Fighter Expansion Pack": "Zestaw dodatkowy Myśliwiec TIE"
"TIE Interceptor Expansion Pack": "Zestaw dodatkowy TIE Interceptor"
"TIE Bomber Expansion Pack": "Zestaw dodatkowy Bombowiec TIE"
"TIE Advanced Expansion Pack": "Zestaw dodatkowy TIE Advanced"
"Lambda-Class Shuttle Expansion Pack": "Zestaw dodatkowy Prom typu Lambda"
"Slave I Expansion Pack": "Zestaw dodatkowy Slave I"
"Imperial Aces Expansion Pack": "Zestaw dodatkowy Asy Imperium"
"Rebel Transport Expansion Pack": "Zestaw dodatkowy Rebeliancki transportowiec"
"Z-95 Headhunter Expansion Pack": "Zestaw dodatkowy Z-95 Łowca Głów"
"TIE Defender Expansion Pack": "Zestaw dodatkowy TIE Defender"
"E-Wing Expansion Pack": "Zestaw dodatkowy E-Wing"
"TIE Phantom Expansion Pack": "Zestaw dodatkowy TIE Phantom"
"Tantive IV Expansion Pack": "Zestaw dodatkowy Tantive IV"
"Rebel Aces Expansion Pack": "Zestaw dodatkowy Asy Rebelii"
"YT-2400 Freighter Expansion Pack": "Zestaw dodatkowy YT-2400"
"VT-49 Decimator Expansion Pack": "Zestaw dodatkowy Decimator VT-49"
"StarViper Expansion Pack": "Zestaw dodatkowy StarViper"
"M3-A Interceptor Expansion Pack": "Zestaw dodatkowy M3-A Interceptor"
"IG-2000 Expansion Pack": "Zestaw dodatkowy IG-2000"
"Most Wanted Expansion Pack": "Zestaw dodatkowy Poszukiwani"
"Imperial Raider Expansion Pack": "Zestaw dodatkowy Imperialny Patrolowiec"
ui:
shipSelectorPlaceholder: "Wybór statków"
pilotSelectorPlaceholder: "Wybór pilotów"
upgradePlaceholder: (translator, language, slot) ->
"#{translator language, 'slot', slot}"
modificationPlaceholder: "Modyfikacje"
titlePlaceholder: "Tytuł"
upgradeHeader: (translator, language, slot) ->
"Amélioration #{translator language, 'slot', slot}"
unreleased: "niewydane"
epic: "epickie"
byCSSSelector:
# Card browser
'.xwing-card-browser .translate.sort-cards-by': 'Sortuj karty po'
'.xwing-card-browser option[value="name"]': 'nazwie'
'.xwing-card-browser option[value="source"]': 'źródle'
'.xwing-card-browser option[value="type-by-points"]': 'typie (po punktach)'
'.xwing-card-browser option[value="type-by-name"]': 'typie (po nazwie)'
'.xwing-card-browser .translate.select-a-card': 'Wybierz kartę z listy po prawej'
'.xwing-card-browser .info-range td': 'Zasięg’'
# Info well
'.info-well .info-ship td.info-header': 'Statek'
'.info-well .info-skill td.info-header': 'Umiejętność pilota'
'.info-well .info-actions td.info-header': 'Akcje'
'.info-well .info-upgrades td.info-header': 'Ulepszenia'
'.info-well .info-range td.info-header': 'Zasięg'
# Squadron edit buttons
'.clear-squad' : 'Wyczyść eskadrę'
'.save-list' : 'Zapisz'
'.save-list-as' : 'Zapisz jako ...'
'.delete-list' : 'Usuń'
'.backend-list-my-squads' : 'Lista eskadr'
'.view-as-text' : '<span class="hidden-phone"><i class="icon-print"></i> Drukuj \ Wyświetl jako </span>Tekst'
'.randomize' : 'randomizuj'
'.randomize-options' : 'Opcje ...'
# Print/View modal
'.bbcode-list' : 'Skopiuj BBCode poniżej i wklej go do swojego posta.<textarea></textarea>'
'.vertical-space-checkbox' : """Dodaj miejsce na karty ulepszeń \ uszkodzeń podczas drukowania <input type="checkbox" class="toggle-vertical-space" />"""
'.color-print-checkbox' : """Wydrukuj w kolorze <input type="checkbox" class="toggle-color-print" />"""
'.print-list' : '<i class="icon-print"></i> Drukuj'
# Randomizer options
'.do-randomize' : 'Generuj'
# Top tab bar
'#empireTab' : 'Imperium Galaktyczne'
'#rebelTab' : 'Sojusz Rebeliancki'
'#scumTab' : 'Szumowiny i Nikczemnicy'
'#browserTab' : 'Przeglądarka kart'
'#aboutTab' : 'O stronie'
singular:
'pilots': 'Pilot'
'modifications': 'Modyfikacja'
'titles': 'Tytuł'
types:
'Pilot': 'Pilot'
'Modification': 'Modyfikacja'
'Title': 'Tytuł'
exportObj.cardLoaders ?= {}
exportObj.cardLoaders['Polski'] = () ->
exportObj.cardLanguage = 'Polski'
# Assumes cards-common has been loaded
basic_cards = exportObj.basicCardData()
exportObj.canonicalizeShipNames basic_cards
# English names are loaded by default, so no update is needed
exportObj.ships = basic_cards.ships
# Rename ships
exportObj.renameShip 'TIE Fighter', 'Myśliwiec TIE'
exportObj.renameShip 'TIE Bomber', 'Bombowiec TIE'
exportObj.renameShip 'Lambda-Class Shuttle', 'Prom typu Lambda'
exportObj.renameShip 'VT-49 Decimator', 'Decimator VT-49'
exportObj.renameShip 'Z-95 Headhunter', 'Z-95 Łowca głów'
exportObj.renameShip 'CR90 Corvette (Aft)', 'Korweta CR90 (rufa)'
exportObj.renameShip 'CR90 Corvette (Fore)', 'Corvette CR90 (dziób)'
exportObj.renameShip 'GR-75 Medium Transport', 'Średni transportowiec GR-75'
# Names don't need updating, but text needs to be set
pilot_translations =
# Empire
# Myśliwiec TIE
"Academy Pilot":
ship: "Myśliwiec TIE"
name: "Pilot z Akademii"
"Obsidian Squadron Pilot":
ship: "Myśliwiec TIE"
name: "Pilot Eskadry Obsydianowych"
"Black Squadron Pilot":
ship: "Myśliwiec TIE"
name: "Pilot Eskadry Czarnych"
'"Winged Gundark"':
name: "Skrzydlaty Gundark"
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz w Zasięgu 1, możesz zmienić 1 ze swoich wyników %HIT% na wynik %CRIT%."""
'"Night Beast"':
name: "Nocny Potwór"
ship: "Myśliwiec TIE"
text: """Po wykonaniu zielonego manewru możesz wykonać darmową akcję skupienia."""
'"Backstabber"':
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz będąc poza polem rażenia broniącego się statku, rzucasz 1 dodatkową kością ataku."""
'"Dark Curse"':
name: "Mroczna Klątwa"
ship: "Myśliwiec TIE"
text: """Kiedy się bronisz statki które cię atakują nie mogą wydawać żetonów skupienia ani przerzucać kości ataku."""
'"Mauler Mithel"':
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz w Zasięgu 1, rzucasz 1 dodatkową kością ataku."""
'"Howlrunner"':
ship: "Myśliwiec TIE"
text: """Kiedy inny przyjazny statek w zasięgu 1 atakuje przy pomocy swojej podstawowej broni, może przerzucić 1 kość ataku."""
# TIE Interceptor
"Alpha Squadron Pilot":
name: "Pilot Eskadry Alfa"
"Avenger Squadron Pilot":
name: "Pilot Eskadry Mścicieli"
"Saber Squadron Pilot":
name: "Pilot Eskadry Szabel"
"Royal Guard Pilot":
name: "Pilot imperialny gwardzista"
"\"Fel's Wrath\"":
name: "\"Gniew Fel'a\""
text: """Zostajesz zniszczony dopiero pod koniec fazy walki, w której liczba kart uszkodzeń przypisanych do ciebie będzie równa, lub wyższa od twojej wartości kadłuba."""
"Lieutenant Lorrir":
name: "Porucznik Lorrir"
text: """Kiedy wykonujesz akcję "beczka", możesz otrzymać 1 żeton stresu, aby zamiast wzornika manewru (%STRAIGHT% 1) użyć (%BANKLEFT% 1) lub (%BANKRIGHT% 1)."""
"Kir Kanos":
text: """Kiedy atakujesz w Zasięgu 2-3, możesz wydać 1 żeton uników, aby dodać 1 %HIT% do swojego wyniku."""
"Tetran Cowall":
text: """Kiedy ujawnisz manewr %UTURN% możesz traktować go tak, jakby jego prędkość wynosiła "1", "3" lub "5"."""
"Turr Phennir":
text: """Po wykonaniu ataku możesz wykonać darmową akcję "dopalacz" lub "beczka"."""
"Carnor Jax":
text: """Wrogie statki w Zasięgu 1 nie mogą wykonywać akcji "skupienie" oraz "unik", ani wydawać żetonów skupienia i uników."""
"Soontir Fel":
text: """Kiedy otrzymujesz żeton stresu, możesz przypisać do swojego statku 1 żeton skupienia."""
# TIE Phantom
"Sigma Squadron Pilot":
name: "Pilot Eskadry Sigma"
"Shadow Squadron Pilot":
name: "Pilot Eskadry Cieni"
'"Echo"':
text: """Kiedy się demaskujesz musisz użyć wzornika manewru (%BANKLEFT% 2) lub (%BANKRIGHT% 2) zamiast wzornika (%STRAIGHT% 2)."""
'"Whisper"':
name: "Szept"
text: """Po tym jak wykonasz atak, który trafi cel, możesz przypisać do swojego statku 1 żeton skupienia."""
# TIE Defender
"Onyx Squadron Pilot":
name: "Pilot Eskadry Onyx"
"Delta Squadron Pilot":
name: "Pilot Eskadry Delta"
"Colonel Vessery":
name: "Pułkownik Vessery"
text: """Kiedy atakujesz, zaraz po swoim rzucie kośćmi ataku możesz namierzyć obrońcę, jeśli ma na sobie czerwony żeton namierzonego celu."""
"Rexler Brath":
text: """Po tym jak wykonasz atak, który zada obrońcy co najmniej jedną kartę uszkodzenia, możesz wydać żeton skupienia aby odkryć te karty."""
# Bombowiec TIE
"Scimitar Squadron Pilot":
ship: "Bombowiec TIE"
name: "Pilot Eskadry Sejmitarów"
"Gamma Squadron Pilot":
ship: "Bombowiec TIE"
name: "Pilot Eskadry Gamma"
"Captain Jonus":
ship: "Bombowiec TIE"
name: "Kapitan Jonus"
text: "Kiedy inny przyjazny statek w Zasięgu 1 atakuje przy pomocy dodatkowej broni, może przerzucić maksymalnie 2 kości ataku."
"Major Rhymer":
ship: "Bombowiec TIE"
text: "Kiedy atakujesz przy pomocy dodatkowej broni, możesz zwiększyć lub zmniejszyć zasięg broni o 1. Musisz przy tym zachować limit zasięgu 1-3."
# TIE Advanced
"Tempest Squadron Pilot":
name: "Pilot Eskadry Burzy"
"Storm Squadron Pilot":
name: "Pilot Eskadry Szturmu"
"Maarek Stele":
text: """Kiedy twój atak zadaje obrońcy odkrytą kartę uszkodzenia, wylosuj 3 karty uszkodzeń, wybierz 1 z nich, którą zadajesz, a pozostałe odrzuć."""
"Darth Vader":
text: """Podczas swojego kroku "Wykonywania akcji" możesz wykonać 2 akcje."""
"Commander Alozen":
name: "Komandor Alozen"
text: """Na początku fazy walki możesz namierzyć wrogi statek znajdujący się w Zasięgu 1 od ciebie."""
# Firespray
"Bounty Hunter":
name:"Łowca nagród"
"Kath Scarlet":
text: """Kiedy atakujesz, obrońca otrzymuje 1 żeton stresu, jeśli anuluje co najmniej jeden wynik %CRIT%."""
"Boba Fett":
text: """Kiedy ujawniasz manewr skrętu (%BANKLEFT% lub %BANKRIGHT%) możesz przestawić swój wskaźnik manewrów na drugi manewr skrętu o tej samej prędkości."""
"Krassis Trelix":
text: """Kiedy atakujesz przy pomocy dodatkowej broni, możesz przerzucić 1 kość ataku."""
# Prom typu Lambda
"Captain Kagi":
ship: "Prom typu Lambda"
name: "Kapitan Kagi"
text: """Kiedy wrogi statek namierza cel, musi namierzyć twój statek, jeśli to możliwe."""
"Colonel Jendon":
name: "Pułkownik Jendon"
ship: "Prom typu Lambda"
text: """Na początku fazy walki możesz przypisać 1 ze swoich niebieskich żetonów namierzonego celu do przyjaznego statku w Zasięgu 1, jeśli ten nie ma niebieskiego żetonu namierzonego celu."""
"Captain Yorr":
ship: "Prom typu Lambda"
name: "Kapitan Yorr"
text: """Kiedy inny przyjazny statek w Zasięgu 1-2 ma otrzymać żeton stresu, gdy ty masz 2 lub mniej żetonów stresu, możesz przypisać do siebie ten żeton stresu."""
"Omicron Group Pilot":
ship: "Prom typu Lambda"
name: "Pilot grupy Omicron"
# VT-49 Decimator
"Captain Oicunn":
ship: "Decimator VT-49"
name: "Kapitan Oicunn"
text: """Po wykonaniu manewru, każdy wrogi statek z którym się stykasz, otrzymuje 1 uszkodzenie."""
"Rear Admiral Chiraneau":
ship: "Decimator VT-49"
name: "Kontradmirał Chiraneau"
text: """Kiedy atakujesz w Zasięgu 1-2, możesz zmienić jeden ze swoich wyników %FOCUS% na wynik %CRIT%."""
"Patrol Leader":
ship: "Decimator VT-49"
name: "Dowódca Patrolu"
"Commander Kenkirk":
ship: "Decimator VT-49"
name: "Commandant Kenkirk"
text: """Jeśli nie masz żadnych osłon i masz przypisaną co najmniej 1 kartę uszkodzenia, wartość twojej zwrotności wzrasta o 1."""
"Juno Eclipse":
text: """When you reveal your maneuver, you may increase or decrease its speed by 1 (to a minimum of 1)."""
"Zertik Strom":
text: """Enemy ships at Range 1 cannot add their range combat bonus when attacking."""
"Lieutenant Colzet":
text: """At the start of the End phase, you may spend a target lock you have on an enemy ship to flip 1 random facedown Damage card assigned to it faceup."""
# Rebels
# X-Wing
"Red Squadron Pilot":
name: "Pilot Eskadry Czerwonych"
"Rookie Pilot":
name: "Niedoświadczony pilot"
"Wedge Antilles":
text: """Kiedy atakujesz zredukuj wartość zwrotności obrońcy o 1 (do minimum 0)."""
"Garven Dreis":
text: """Po tym jak wydasz żeton skupienia możesz umieścić dany żeton na dowolnym innym przyjaznym statku w Zasięgu 1-2 (zamiast go odrzucać)."""
"Biggs Darklighter":
text: """Inne przyjazne statki w Zasięgu 1 nie mogą być wybierane na cel ataku, jeśli atakujący może na cel wybrać ciebie."""
"Luke Skywalker":
text: """Kiedy się bronisz, możesz zmienić 1 ze swoich wyników %FOCUS% na wynik %EVADE%."""
"Wes Janson":
text: """Po wykonaniu ataku możesz usunąć z obrońcy 1 żeton skupienia, uników, lub niebieski żeton namierzonego celu."""
"Jek Porkins":
text: """Kiedy otrzymujesz żeton stresu możesz usunąć go i rzucić 1 kością ataku. Jeśli wypadnie %HIT%, ten statek otrzymuje 1 zakrytą kartę uszkodzenia."""
'"Hobbie" Klivian':
text: """Kiedy zdobywasz lub wydajesz żeton namierzonego celu, możesz usunąć ze swojego statku 1 żeton stresu."""
"Tarn Mison":
text: """Kiedy wrogi statek wybiera cię na cel ataku, możesz namierzyć ten statek."""
# Y-Wing
"Gold Squadron Pilot":
name: "Pilot Eskadry Złotych"
"Gray Squadron Pilot":
name: "Pilot Eskadry Szarych"
'"Dutch" Vander':
text: """Po namierzeniu celu wybierz przyjazny statek w Zasięgu 1-2. Wybrany statek może natychmiast namierzyć cel."""
"Horton Salm":
text: """Kiedy atakujesz w Zasięgu 2-3, możesz przerzucić dowolne ze swoich kości, na których wypadły puste ścianki."""
# A-Wing
"Green Squadron Pilot":
name: "Pilot Eskadry Zielonych"
"Prototype Pilot":
name: "Pilot prototypu"
"Tycho Celchu":
text: """Możesz wykonywać akcje nawet jeśli posiadasz żetony stresu."""
"Arvel Crynyd":
text: """Możesz wybrać na cel swojego ataku wrogi statek, z którym się stykasz, jeżeli ten znajduje się w twoim polu rażenia."""
# YT-1300
"Outer Rim Smuggler":
name: "Przemytnik z Zewnętrznych Rubierzy"
"Chewbacca":
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia, natychmiast ją zakryj (bez rozpatrywania jej efektu)."""
"Lando Calrissian":
text: """Po wykonaniu zielonego manewru wybierz jeden inny przyjazny statek w Zasięgu 1. Statek ten może wykonać 1 darmową akcję przedstawioną na jego pasku akcji."""
"Han Solo":
text: """Kiedy atakujesz możesz przerzucić wszystkie swoje kości ataku. Jeśli zdecydujesz się to zrobić musisz przerzucić tyle ze swoich kości, ile możesz."""
# B-Wing
"Dagger Squadron Pilot":
name: "Pilot Eskadry Sztyletów"
"Blue Squadron Pilot":
name: "Pilot Eskadry Niebieskich"
"Ten Numb":
text: """Kiedy atakujesz, 1 z twoich wyników [crt-hit] nie może być anulowany przy pomocy kości obrony."""
"Ibtisam":
text: """Kiedy atakujesz lub się bronisz mając co najmniej 1 żeton stresu, możesz przerzucić jedną ze swoich kości."""
# HWK-290
"Rebel Operative":
name: "Agent rebeliantów"
"Roark Garnet":
text: '''Na początku fazy walki wybierz 1 inny przyjazny statek w zasięgu 1-3. Do końca tej fazy traktuj wartość umiejętności tego pilota jakby wynosiła "12".'''
"Kyle Katarn":
text: """Na początku fazy walki możesz przypisać 1 ze swoich żetonów skupienia do innego przyjaznego statku w Zasięgu 1-3."""
"Jan Ors":
text: """Kiedy inny przyjazny statek w Zasięgu 1-3 atakuje, gdy nie masz żadnych żetonów stresu, możesz otrzymać 1 żeton stresu aby umożliwić mu rzut 1 dodatkową kością ataku."""
# Z-95
"Bandit Squadron Pilot":
name: "Pilot Eskadry Bandytów"
ship: "Z-95 Łowca głów"
"Tala Squadron Pilot":
name: "Pilot Eskadry Tala"
ship: "Z-95 Łowca głów"
"Lieutenant Blount":
name: "Porucznik Blount"
ship: "Z-95 Łowca głów"
text: "Kiedy atakujesz, obrońca zostaje trafiony twoim atakiem nawet jeśli nie otrzymał żadnych uszkodzeń."
"Airen Cracken":
ship: "Z-95 Łowca głów"
text: """Po wykonaniu ataku możesz wybrać inny przyjazny statek w Zasięgu 1. Dany statek może wykonać 1 darmową akcję."""
# E-Wing
"Knave Squadron Pilot":
name: "Pilot Eskadry Szelm"
"Blackmoon Squadron Pilot":
name: "Pilot Eskadry Czarnego Księżyca"
"Etahn A'baht":
text: """Kiedy wrogi statek w twoim polu rażenia, w Zasięgu 1-3 się broni, atakujący może zmienić 1 z jego wyników %HIT% na wynik %CRIT%."""
"Corran Horn":
text: """Na początku fazy końcowej możesz wykonać jeden atak. Nie możesz atakować w następnej rundzie."""
# Asy Rebelii
"Jake Farrell":
text: """Po tym jak wykonasz akcję skupienia lub zostanie ci przypisany żeton skupienia, możesz wykonać darmową akcję "dopalacz" lub "beczka"."""
"Gemmer Sojan":
text: """Dopóki znajdujesz się w Zasięgu 1 od co najmniej 1 wrogiego statku, zwiększ swoją wartość zwrotności o 1."""
"Keyan Farlander":
text: """Kiedy atakujesz możesz usunąć 1 żeton stresu aby zmienić wszystkie swoje wyniki %FOCUS% na %HIT%."""
"Nera Dantels":
text: """Możesz wykonać atak dodatkową bronią %TORPEDO%, skierowany przeciwko wrogim statkom znajdującym się poza twoim polem rażenia."""
# Transportowiec Rebelii
"GR-75 Medium Transport":
name: "Średni transportowiec GR-75"
ship: "Średni transportowiec GR-75"
# CR90
"CR90 Corvette (Fore)":
ship: "Korweta CR90 (dziób)"
name: "Korweta CR90 (dziób)"
text: """Kiedy atakujesz przy pomocy swojej głównej broni, możesz wydać 1 żeton energii aby rzucać 1 dodatkową kością ataku."""
"CR90 Corvette (Aft)":
ship: "Korweta CR90 (rufa)"
name: "Korweta CR90 (rufa)"
# YT-2400
"Dash Rendar":
text: """Podczas fazy aktywacji i w czasie wykonywania akcji możesz ignorować przeszkody."""
'"Leebo"':
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia, dobierz 1 dodatkową kartę uszkodzenia. Rozpatrz jedną z nich a drugą odrzuć."""
"Eaden Vrill":
text: """Podczas wykonywania ataku przy pomocy broni podstawowej, którego celem jest statek z żetonem stresu, rzucasz 1 dodatkową kością."""
"Wild Space Fringer":
name: "Outsider z Dzikiej Przestrzeni"
# Scum and Villainy
# Starviper
"Prince Xizor":
name: "Książe Xizor"
text: """Kiedy się bronisz, przyjazny statek w Zasięgu 1 może otrzymać 1 nieanulowany wynik %HIT% lub %CRIT% (zamiast ciebie)."""
"Guri":
text: """Na początku fazy walki, jeśli jesteś w Zasięgu 1 od wrogiego statku, możesz przypisać do swojego statku 1 żeton skupienia."""
"Black Sun Vigo":
name: "Vigo Czarnego Słońca"
"Black Sun Enforcer":
name: "Wysłannik Czarnego Słońca"
# Interceptor M3-A
"Cartel Spacer":
name: "Pilot kartelu"
"Tansarii Point Veteran":
name: "Weteran Tansarii Point"
"Serissu":
text: """Kiedy inny przyjazny statek w Zasięgu 1 się broni, może przerzucić 1 kość obrony."""
"Laetin A'shera":
text: "Po tym jak obronisz się przed atakiem, jeśli atak nie trafił, możesz przypisać do swojego statku 1 żeton uniku."
# Agressor
"IG-88A":
text: """Po tym jak wykonasz atak, który zniszczy obrońcę, możesz odzyskać 1 osłonę."""
"IG-88B":
text: """Raz na rundę, po tym jak wykonasz atak, który nie trafi w wybrany cel, możesz wykonać atak przy pomocy dodatkowej broni %CANON%, w którą jesteś wyposażony."""
"IG-88C":
text: """Po tym jak wykonasz akcję „dopalacz” możesz przypisać do swojego statku 1 żeton uniku."""
"IG-88D":
text: """Możesz wykonać manewr (%SLOOPLEFT% 3) lub (%SLOOPRIGHT% 3) używając odpowiednio wzornika (%TURNLEFT% 3) lub (%TURNRIGHT% 3)."""
# Firespray
"Boba Fett (Scum)":
name: """Boba Fett (Szumowiny)"""
text: """Kiedy atakujesz lub się bronisz możesz przerzucić 1 ze swoich kości za każdy wrogi statek w Zasięgu 1."""
"Kath Scarlet (Scum)":
name: """Kath Scarlet (Szumowiny)"""
text: """Kiedy atakujesz statek znajdujący się w twoim pomocniczym polu rażenia, rzucasz 1 dodatkową kością ataku."""
"Emon Azzameen":
text: """Kiedy zrzucasz bombę, możesz użyć wzornika [%TURNLEFT% 3], [%STRAIGHT% 3], lub [%TURNRIGHT% 3] (zamiast wzornika [%STRAIGHT% 1])."""
"Mandalorian Mercenary":
name: "Mandaloriański najemnik"
# Y-Wing
"Kavil":
text: """Kiedy atakujesz statek znajdujący się poza twoim polem rażenia, rzucasz 1 dodatkową kością ataku."""
"Drea Renthal":
text: """Po tym jak wydasz żeton namierzonego celu, możesz otrzymać 1 żeton stresu, aby namierzyć cel."""
"Hired Gun" :
name: "Najemnik"
"Syndicate Thug":
name: "Zbir z syndykatu"
# HWK
"Dace Bonearm":
text: """Kiedy wrogi statek w Zasięgu 1-3 otrzyma co najmniej jeden żeton jonów, a ty nie masz żetonu stresu, możesz otrzymać 1 żeton stresu aby sprawić, żeby dany statek otrzymał 1 uszkodzenie."""
"Palob Godalhi":
text: """Na początku fazy walki możesz usunąć 1 żeton skupienia lub uników z wrogiego statku w Zasięgu 1-2 i przypisać go do siebie."""
"Torkil Mux":
text: """Na koniec fazy aktywacji wybierz jeden wrogi statek w Zasięgu 1-2. Do końca fazy walki wartość umiejętności pilota wybranego statku wynosi "0"."""
"Spice Runner":
name: "Przemytnik przyprawy"
# Z-95
"Black Sun Soldier":
name: "Żolnierz Czarnego Słońca"
ship: "Z-95 Łowca głów"
"Binayre Pirate":
name: "Pirat z Binayre"
ship: "Z-95 Łowca głów"
"N'Dru Suhlak":
ship: "Z-95 Łowca głów"
text: """Kiedy atakujesz rzucasz 1 dodatkową kością ataku, jeśli w Zasięgu 1-2 nie ma żadnych innych przyjaznych statków."""
"Kaa'To Leeachos":
ship: "Z-95 Łowca głów"
text: """Na początku fazy walki możesz usunąć 1 żeton skupienia lub uników z innego przyjaznego statku w Zasięgu 1-2 i przypisać go do siebie."""
"Latts Razzi":
text: """When a friendly ship declares an attack, you may spend a target lock you have on the defender to reduce its agility by 1 for that attack."""
"Graz the Hunter":
text: """When defending, if the attacker is inside your firing arc, roll 1 additional defense die."""
"Esege Tuketu":
text: """When another friendly ship at Range 1-2 is attacking, it may treat your focus tokens as its own."""
'"Redline"':
text: """You may maintain 2 target locks on the same ship. When you acquire a target lock, you may acquire a second lock on that ship."""
'"Deathrain"':
text: """When dropping a bomb, you may use the front guides of your ship. After dropping a bomb, you may perform a free barrel roll action."""
"Moralo Eval":
text: """You can perform %CANNON% secondary attacks against ships inside your auxiliary firing arc."""
'Gozanti-class Cruiser':
text: """After you execute a maneuver, you may deploy up to 2 attached ships."""
'"Scourge"':
text: """When attacking a defender that has 1 or more Damage cards, roll 1 additional attack die."""
"The Inquisitor":
text: """When attacking with your primary weapon at Range 2-3, treat the range of the attack as Range 1."""
"Zuckuss":
text: """When attacking, you may roll 1 additional attack die. If you do, the defender rolls 1 additional defense die."""
"Dengar":
text: """Once per round after defending, if the attacker is inside your firing arc, you may perform an attack against the that ship."""
upgrade_translations =
#Elitarna zdolność
"Determination":
name: "Determinacja"
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia z cechą "Pilot", natychmiast ją odrzuć bez rozpatrywania jej efektu."""
"Swarm Tactics":
name: "Taktyka roju"
text: """Na początku fazy walki wybierz 1 przyjazny statek w Zasięgu 1. Do końca tej fazy traktuj wybrany statek, jakby jego wartość umiejętności pilota była równa twojej."""
"Squad Leader":
name: "Dowódca eskadry"
text: """<strong>Akcja:</strong> Wybierz 1 statek w Zasięgu 1-2, który ma niższą wartość umiejętności pilota niż ty. %LINEBREAK% Wybrany statek może natychmiast wykonać 1 darmową akcję."""
"Expert Handling":
name: "Mistrzowskie manewrowanie"
text: """„<strong>Akcja:</strong> Wykonaj darmową akcję „beczka”. Jeśli nie masz symbolu akcji %BARRELROLL%, otrzymujesz 1 żeton stresu. Następnie możesz usunąć jeden wrogi żeton namierzonego celu znajdujący się na Twoim statku."""
"Marksmanship":
name: "Celność"
text: """<strong>Akcja:</strong> Kiedy atakujesz w tej rundzie, możesz zamienić 1 swój wynik %FOCUS% na %CRIT%, a wszystkie pozostałe wyniki %FOCUS% na %HIT%."""
"Daredevil":
name: "Ryzykant"
text: """<strong>Akcja:</strong> Wykonaj biały manewr (%TURNLEFT% 1) lub (%TURNRIGHT% 1)". Następnie otrzymujesz żeton stresu. %LINEBREAK% Następnie, jeśli nie masz symbolu akcji %BOOST%, rzuć 2 kośćmi ataku. Otrzymujesz wszystkie wyrzucone uszkodzenia %HIT% i uszkodzenia krytyczne %CRIT%."""
"Elusiveness":
name: "Nieuchwytność"
text: """Kiedy się bronisz możesz otrzymać 1 żeton stresu, aby wybrać jedną kość ataku. Atakujący musi przerzucić tą kość. Nie możesz skorzystać z tej zdolności, jeśli jeśli masz co najmniej 1 żeton stresu."""
"Push the Limit":
name: "Na granicy ryzyka"
text: """Raz na rundę po wykonaniu akcji, możesz wykonać 1 darmową akcję przedstawioną na twoim pasku akcji. Następnie otrzymujesz 1 żeton stresu."""
"Deadeye":
name: "Strzelec wyborowy"
text: """Możesz traktować nagłówek <strong>"Atak (namierzony cel):"</strong> jako <strong>"Atak (skupienie):"</strong>. %LINEBREAK% Kiedy atak zmusza cię do wydania żetonu namierzonego celu, możesz zamiast niego wydać żeton skupienia."""
"Expose":
name: "Odsłonięcie"
text: """<strong>Akcja:</strong> Do końca rundy zwiększ wartość swojej podstawowej broni o 1 i zmniejsz wartość zwrotności o 1."""
"Wingman":
name: "Skrzydłowy"
text: """Na początku fazy walki usuń 1 żeton stresu z innego przyjaznego statku w Zasięgu 1."""
"Decoy":
name: "Zmyłka"
text: """Na początku fazy walki możesz wybrać 1 przyjazny statek w Zasięgu 1-2. Do końca fazy zamieniasz się z danym statkiem wartościami umiejętności pilota."""
"Outmaneuver":
name: "Wymanewrowanie"
text: """Kiedy atakujesz statek w swoim polu rażenia, a nie znajdujesz się w polu rażenia danego statku, zmniejsz jego wartość zwrotności o 1 (do minimum 0)."""
"Predator":
name: "Drapieżnik"
text: """Kiedy atakujesz, możesz przerzucić 1 kość ataku. Jeśli wartość umiejętnosci pilota obrońcy wynosi "2" lub mniej, możesz przerzucić maksymalnie 2 kości ataku (zamiast 1)."""
"Draw Their Fire":
name: "Ściągnięcie ognia"
text: """Kiedy przyjazny statek w Zasięgu 1 zostaje trafiony podczas ataku, możesz zamiast niego otrzymać 1 z nie anulowanych %CRIT%."""
"Adrenaline Rush":
name: "Zastrzyk adrenaliny"
text: """Kiedy ujawnisz czerwony manewr, możesz odrzucić tę kartę, aby do końca fazy aktywacji traktować ten manewr jako biały."""
"Veteran Instincts":
name: "Instynkt weterana"
text: """Zwiększ swoją wartość umiejętności pilota o 2."""
"Opportunist":
name: "Oportunista"
text: """Kiedy atakujesz, jeśli obrońca nie ma żadnych żetonów skupienia ani uników, możesz otrzymać 1 żeton stresu aby rzucić 1 dodatkową kością ataku.%LINEBREAK%Nie możesz skorzystać z tej zdolności, jeśli posiadasz żetony stresu."""
"Lone Wolf":
name : "Samotny wilk"
text: """Jeśli w zasięgu 1-2 nie ma żadnych innych przyjaznych statków, kiedy się bronisz lub atakujesz, możesz przerzucić 1 wynik z pustą ścianką."""
"Stay On Target":
name: "Utrzymać cel"
text: """Kiedy ujawnisz swój manewr możesz obrócić swój wskaźnik na inny manewr o tej samej prędkości.%LINEBREAK%Traktuj ten manewr jako czerwony."""
"Ruthlessness":
name: "Bezwzględność"
text: """%PL_IMPERIALONLY%%LINEBREAK% Po tym jak przeprowadzisz atak, który trafi w cel, musisz wybrać 1 inny statek w Zasięgu 1 od obrońcy (nie siebie). Statek ten otrzymuje 1 uszkodzenie."""
"Intimidation":
name: "Zastraszenie"
text: """Dopóki stykasz się z wrogim statkiem, jego zwrotność zostaje zmniejszona o 1."""
"Calculation":
name: "Kalkulacje"
text: """Kiedy atakujesz, możesz wydać żeton skupienia, aby zmienić jeden ze swoich wyników %FOCUS% na wynik %CRIT%."""
"Bodyguard":
name : "Ochroniarz"
text: """%PL_SCUMONLY%%LINEBREAK% Na początku fazy walki możesz wydać żeton skupienia aby wybrać przyjazny statek w Zasięgu 1 o wartości umiejętności pilota wyższej od ciebie. Do końca rundy zwiększ jego wartość zwrotności o 1."""
# Astromechs
"R2 Astromech":
name: "Astromech R2"
text: """Możesz traktować wszystkie manewry o prędkości 1 i 2, jakby były to zielone manewry."""
"R2-D2":
text: """Po wykonaniu zielonego manewru możesz odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"R2-F2":
text: """<strong>Akcja:</strong> Do końca tej rundy zwiększ swoją wartość zwrotności o 1."""
"R5-D8":
text: """<strong>Akcja:</strong> Rzuć jedną kością obrony.%LINEBREAK% Jeżeli wypadnie wynik %EVADE% lub %FOCUS%, odrzuć jedną ze swoich zakrytych kart uszkodzeń."""
"R5-K6":
text: """Po wydaniu swojego rzetonu namierzonego celu rzuć 1 kością obrony.%LINEBREAK% Jeżeli wypadnie %EVADE% natychmiast zdobywasz żeton namierzonego celu dla tego samego statku. Nie możesz wydać nowego żetonu namierzonego celu podczas tego ataku."""
"R5 Astromech":
name: "Astromech R5"
text: """Podczas fazy końcowej możesz wybrać 1 ze swoich odkrytych kart z cechą "Statek" i ją zakryć."""
"R7 Astromech":
name: "Astromech R7"
text: """Raz na rundę kiedy się bronisz, jeśli namierzasz atakującego, możesz wydać żeton namierzonego celu aby wybrać dowolną liczbę kości ataku. Atakujący musi przerzucić wybrane kości."""
"R7-T1":
text: """<strong>Akcja:</strong> Wybierz wrogi statek w Zasięgu 1-2. Jeżeli znajdujesz się w polu rażenia wybranego statku, możesz namierzyć dany statek. Następnie możesz wykonać darmową akcję "dopalacz"."""
"R4-D6":
text: """Kiedy zostaniesz trafiony w wyniku ataku, a pośród wyników rzutu są co najmniej 3 nieaulowalne wyniki %HIT% możesz wybrać i anulować wszystkie poza 2. Za każdy wynik anulowany w ten sposób otrzymujesz 1 żeton stresu."""
"R5-P9":
text: """Na koniec fazy walki możesz wydać jeden ze swoich żetonów skupienia, aby odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"R3-A2":
text: """Kiedy wybierzesz cel ataku, jeżeli obrońca znajduje się w twoim polu rażenia, możesz otrzymać 1 żeton stresu, aby sprawić żeby obrońca otrzymał 1 żeton stresu."""
"R2-D6":
text: """Twój pasek rowinięć zyskuje symbol %ELITE%.%LINEBREAK% Nie możesz przypisać tej karty rozwinięcia do swojego statku jeżeli masz już symbol rozwinięcia %ELITE% lub jeżeli wartość umiejętności pilota wynosi 2 lub mniej."""
# Torpilles
"Proton Torpedoes":
name: "Torpedy protonowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić 1 ze swoich wyników %FOCUS% na wynik %CRIT%."""
"Advanced Proton Torpedoes":
name: "Zaaw. torpedy protonowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić maksymalnie 3 swoje puste ścianki na wyniki %FOCUS%."""
"Flechette Torpedoes":
name: "Torpedy rozpryskowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Po wykonaniu tego ataku obrońca otrzymuje 1 żeton stresu jeżeli jego wartość kadłuba wynosi 4 lub mniej."""
"Ion Torpedoes":
name: "Torpedy jonowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeżeli ten atak trafi w wybrany cel, obrońca oraz każdy statek w Zasięgu 1 od niego otrzymuje 1 żeton jonów."""
"Bomb Loadout":
name: "Ładunek bomb"
text: """<span class="card-restriction">Tylko Y-wing. Ograniczenie.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol %BOMB%."""
# Tourelles
"Ion Cannon Turret":
name: "Wieżyczka z działem jonowym"
text: """<strong>Atak:</strong> Zaatakuj 1 statek (nawet poza twoim polem rażenia). %LINEBREAK%Jeśli atak ten trafi w wybrany statek, otrzymuje on 1 uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystkie wyniki kości."""
"Blaster Turret":
name: "Wieżyczka blasterowa"
text: """<strong>Atak (skupienie):</strong> Wydaj 1 żeton skupienia, aby zaatakować 1 statek (nawet poza twoim polem rażenia)."""
"Autoblaster Turret":
name: "Wieżyczka autoblasterowa"
text: """<strong>Atak: Zaatakuj 1 statek (nawet poza twoim polem rażenia). %LINEBREAK%Twoje wyniki %HIT% nie mogą być anulowane przy pomocy kości obrony. Obrońca może anulować wyniki %CRIT% przed %HIT%."""
# Missiles
"Concussion Missiles":
name: "Rakiety wstrząsowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić 1 ze swoich wyników z pustą ścianką na wynik %HIT%."""
"Cluster Missiles":
name: "Rakiety kasetonowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu o odrzuć tę kartę, aby przeprowadzić ten atak dwukrotnie."""
"Homing Missiles":
name: "Rakiety samonaprowadzające"
text: """<strong>Atak (namierzony cel):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Podczas tego ataku obrońca nie może wydawać żetonów uniku."""
"Assault Missiles":
name: "Rakiety szturmowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeśli ten atak trafi w wybrany cel, każdy inny statek w Zasięgu 1 od obrońcy otrzymuje 1 uszkodzenie."""
"Ion Pulse Missiles":
name: "Jonowe rakiety pulsacyjne"
text: """<strong>Atak (namierzony cel):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeśli ten atak trafi, obrońca otrzymuje 1 uszkodzenie oraz 2 żetony jonów."""
"Chardaan Refit":
name: "Naprawy na Chardaanie"
text: """<span class="card-restriction">Tylko A-wing.</span>%LINEBREAK%Ta karta ma ujemny koszt w punktach eskadry."""
"Proton Rockets":
name: "Rakiety protonowe"
text: """<strong>Atak (skupienie):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz rzucić dodatkowymi kośćmi ataku w liczbie równej twojej zwrotności (nie wiecej niż 3)."""
# Bombes
"Seismic Charges":
name: "Ładunki sejsmiczne"
text: """Kiedy odkrywasz swój wskaźnik manewrów, możesz odrzucić tą kartę aby zrzucić 1 żeton ładunku sejsmicznego. %LINEBREAK%Żeton ten zostanie zdetonowany na koniec fazy aktywacji."""
"Proximity Mines":
name: "Miny zbliżeniowe"
text: """<strong>Akcja:</strong> odrzuć tę kartę aby zrzucić 1 żeton miny zbliżeniowej. Kiedy statek wykona manewr w wyniku którego podstawka statku lub wzornik manewru będzie nachodzić na ten żeton, żeton ten zostaje zdetonowany."""
"Proton Bombs":
name: "Bomby protonowe"
text: """Kiedy odkrywasz swój wskaźnik manewrów, możesz odrzucić tą kartę aby zrzucić 1 żeton bomby protonowej. %LINEBREAK%Żeton ten zostanie zdetonowany na koniec fazy aktywacji."""
# Canons
"Ion Cannon":
name: "Działo Jonowe"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Jeżeli ten atak trafi wybrany cel, obrońca otrzymuje 1 uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystie wyniki kości."""
"Heavy Laser Cannon":
name: "Ciężkie działo laserowe"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Natychmiast po rzucie swoimi kośćmi ataku musisz zmienić wszystkie swoje wyniki %CRIT% na wyniki %HIT%."""
"Autoblaster":
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Twoje wyniki %HIT% nie mogą być anulowane przez kości obrony. Obrońca może anulować wyniki %CRIT% przed wynikami %HIT%."""
"Flechette Cannon":
name: "Działo rozpryskowe"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Jeżeli ten atak trafi, obrońca otrzymuje 1 uszkodzenie i, jeśli nie jest zestresowany, otrzymuje także 1 żeton stresu. Następnie anuluj wszystkie wyniki kości."""
'"Mangler" Cannon':
name: "Działo typu Mangler"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Kiedy atakujesz, możesz zmienić jeden ze swoich wyników %HIT% na wynik %CRIT%."""
# Systèmes
"Enhanced Scopes":
name: "Wzmocnione radary"
text: """Podczas fazy aktywacji traktuj swoją wartość umiejętności pilota jakby wynosiła "0"."""
"Fire-Control System":
name: "System kontroli ognia"
text: """Po tym jak wykonasz atak, możesz namierzyć obroncę."""
"Advanced Sensors":
name: "Zaawanswowane sensory"
text: """Zaraz przed tym jak ujawnisz swój manewr, możesz wykonać 1 darmową akcję. %LINEBREAK%Jeżeli skorzystawsz z tej zdolności, musisz w tej rundzie pominąć swój krok "Wykonywania akcji"."""
"Sensor Jammer":
name: "Zakłócacz sensorów"
text: """Kiedy się bronisz możesz zmienić 1 z wyników %HIT% atakującego na wynik %FOCUS%. Atakujący nie może przerzucić kości ze zmienionym wynikiem."""
"Accuracy Corrector":
name: "Korektor celności"
text: """Kiedy atakujesz, możesz anulować wszystkie swoje wyniki kości. Następnie możesz dodać 2 wyniki %HIT%.%LINEBREAK% Podczas tego ataku nie można ponownie modyfikować twoich kości."""
"Advanced Targeting Computer":
name: "Zaawansowany komputer celowniczy"
text: """<span class="card-restriction">Tylko TIE Advanced.</span>%LINEBREAK% Kiedy atakujesz namierzonego przez siebie przeciwnika przy pomocy broni podstawowej, do wyniku rzutu kośćmi możesz dodać jeden wynik %CRIT%. Jeżeli to zrobisz, podczas tego ataku nie możesz wydać żetonu namierzonego celu."""
# Équipages
"Gunner":
name: "Artylerzysta"
text: """Po wykonaniu ataku, który nie trafił w wybrany cel, natychmiast wykonaj atak główną bronią. W tej rundzie nie możesz wykonać kolejnego ataku."""
"Mercenary Copilot":
name: "Najemny drugi pilot"
text: """Kiedy atakujesz w Zasiegu 3 możesz zmienić 1 ze swoich wyników %HIT% na wynik %CRIT%."""
"Weapons Engineer":
name: "Inżynier uzbrojenia"
text: """Możesz namierzać naraz 2 statki (każdy wrogi statek możesz namierzać tylko raz). %LINEBREAK%Kiedy namierzasz cel, możesz namierzyć 2 różne statki."""
"Luke Skywalker":
text: """%PL_REBELONLY%%LINEBREAK%Po wykonaniu ataku, który nie trafi w wybrany cel, natychmiast wykonaj atak główną bronią. Możesz zmienić 1 wynik %FOCUS% na %HIT%."""
"Nien Nunb":
text: """%PL_REBELONLY%%LINEBREAK%Możesz traktować wszystkie manewry %STRAIGHT%, jakby były to zielone manewry."""
"Chewbacca":
text: """%PL_REBELONLY%%LINEBREAK%Kiedy otrzymujesz kartę uszkodzenia, możesz natychmiast odrzucić tę kartę i odzyskać 1 żeton osłony. Następnie odrzuć tę kartę rozwinięcia."""
"Recon Specialist":
name: "Specjalista zwiadu"
text: """Kiedy wykonujesz akcję skupienia, przypisz do swojego statku 1 dodatkowy żeton skupienia."""
"Saboteur":
name: "Sabotażysta"
text: """<strong>Akcja:</strong> Wybierz 1 wrogi statek w Zasięgu 1 i rzuć 1 koscią ataku. Jeśli wypadnie %HIT% lub %CRIT%, wylosuj 1 zakrytą kartę uszkodzenia przypisaną do tego statku, odkryj ją i rozpatrz."""
"Intelligence Agent":
name: "Agent wywiadu"
text: """Na początku fazy aktywacji wybierz 1 wrogi statek w zasięgu 1-2. Możesz podejrzeć manewr wybrany przez ten statek."""
"Darth Vader":
text: """%PL_IMPERIALONLY%%LINEBREAK%Tylko Imperium. Po tym jak wykonasz atak skierowany przeciwko wrogiemu statkowi, możesz otrzymać 2 uszkodzenia, aby zadać temu statkowi 1 krytyczne uszkodzenie."""
"Rebel Captive":
name: "Rebeliancki jeniec"
text: """%PL_IMPERIALONLY%%LINEBREAK%Raz na rundę, pierwszy statek, który zadeklaruje ciebie jako cel ataku, natychmiast otrzymuje 1 żeton stresu."""
"Flight Instructor":
name: "Instruktor pilotażu"
text: """Kiedy się bronisz, możesz przerzucić 1 ze swoich wyników %FOCUS%. Jeśli wartość umiejętności atakującego pilota wynosi "2" lub mniej, zamiast tego przerzuć 1 ze swoich pustych scianek.%FOCUS%."""
"Navigator":
name: "Nawigator"
text: """Kiedy ujawnisz swój manewr, możesz obrócić swój wskaźnik na inny manewr tego samego kierunku. %LINEBREAK%Nie możesz przekręcić wskaźnika na czerwony manewr, jeśli posiadasz jakieś żetony stresu."""
"Lando Calrissian":
text: """%PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> Rzuć 2 koścmi obrony. Za kazdy uzyskany wynik %FOCUS% przypisz do swojego statku 1 żeton skupienia. Za każdy wynik %EVADE% przypisz do swojego statku 1 żeton uniku.%FOCUS%"""
"Mara Jade":
text: """%PL_IMPERIALONLY%%LINEBREAK% Na koniec fazy walki kazdy wrogi statek w Zasięgu 1, który nie ma żetonu stresu, otrzymuje żeton stresu."""
"Fleet Officer":
name: "Oficer floty"
text: """%PL_IMPERIALONLY%%LINEBREAK%<strong>Akcja:</strong> Wybierz maksymalnie 2 przyjazne statki w Zasięgu 1-2 i do każdego przypisz po 1 żetonie skupienia, następnie otrzymujesz 1 żeton stresu."""
"Han Solo":
text: """%PL_REBELONLY%%LINEBREAK%Tylko rebelianci. Kiedy atakujesz, jeśli namierzyłeś obrońcę, możesz wydać żeton namierzonego celu aby zmienić wszystkie swoje wyniki %FOCUS% na %HIT%."""
"Leia Organa":
text: """%PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji możesz odrzucić tę kartę, aby umożliwić wszystkim przyjaznym statkom, które ujawiniają czerwony manewr, traktowanie do końca fazy tego manewru jako białego."""
"WED-15 Repair Droid":
name: "Droid naprawczy WED-15"
text: """%PL_HUGESHIPONLY%%LINEBREAK%<strong>Akcja:</strong> Wydaj 1 żeton energii aby odrzucić 1 ze swoich zakrytych kart uszkodzeń albo wydaj 3 żetony energii aby odrzucić 1 ze swoich odkrytych kart uszkodzeń."""
"Carlist Rieekan":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji możesz odrzucić tę kartę aby do końca fazy traktować wartość umiejętności pilota każdego przyjaznego statku jakby wynosiła "12"."""
"Jan Dodonna":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Kiedy inny przyjazny statek w Zasięgu 1 wykonuje atak, możesz zmienić 1 z jego wyników %HIT% na %CRIT%."""
"Tactician":
name: "Taktyk"
text: "Po tym jak wykonasz atak przeciwko statkowi znajdującemu się w twoim polu rażenia w Zasiegu 2, statek ten otrzymuje 1 żeton stresu."
"R2-D2 (Crew)":
name: "R2-D2 (Załoga)"
text: """%PL_REBELONLY%%LINEBREAK%Na koniec fazy końcowej, jeśli nie masz żadnych osłon, możesz odzyskać 1 osłonę i rzucić 1 kością ataku. Jeśli wypadnie %HIT% odkryj 1 losową ze swoich zakrytych kart uszkodzeń i ją rozpatrz."""
"C-3PO":
text: """%PL_REBELONLY%%LINEBREAK%Raz na rundę, zanim wykonasz rzut co najmniej 1 koscią obrony, możesz na głos zgadnąć liczbę wyników %EVADE%. Jeśli wypadło tyle %EVADE% (przed modyfikacjami) dodaj 1 wynik %EVADE%."""
"Kyle Katarn":
text: """%PL_REBELONLY%%LINEBREAK%Po tym jak usuniesz ze swojego statku żeton stresu, możesz przypisać do swojego statku żeton skupienia."""
"Jan Ors":
text: """%PL_REBELONLY%%LINEBREAK%Raz na rundę, kiedy przyjazny statek w Zasięgu 1-3 wykonuje akcję skupienia lub miałby otrzymać żeton skupienia, możesz danemu statkowi przypisać żeton uniku (zamiast skupienia)."""
"Toryn Farr":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> Wydaj dowolną ilość żetonów energii aby wybrać taką samą liczbę wrogich statków w Zasiegu 1-2. Usuń z wybranych statków wszystkie żetony skupienia, uników i niebieskie żetony namierzonego celu."""
"Targeting Coordinator":
name: "Koordynator namierzania"
text: """<strong>Energia:</strong> Możesz wydać 1 żeton energii aby wybrać 1 przyjazny statek w Zasięgu 1-2. Namierz cel, a następnie przydziel do wybranego statku niebieski żeton namierzonego celu."""
"Raymus Antilles":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji wybierz 1 wrogi statek w Zasięgu 1-3. Możesz podejrzeć manewr wybrany dla tego statku. Jeżeli jest on biały, przydziel do niego 1 żeton stresu."""
'"Leebo"':
text: """%PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> wykonaj darmową akcję "dopalacz". Następnie otrzymujesz 1 żeton jonów."""
"Dash Rendar":
text: """%PL_REBELONLY%%LINEBREAK%Możesz wykonywać ataki kiedy nachodzisz na przeszkodę. %LINEBREAK%Twoje ataki nie mogą być przyblokowane."""
"Ysanne Isard":
text: """%PL_IMPERIALONLY%%LINEBREAK%Na początku fazy walki, jeśli nie masz żadnych osłon, a do twojego statku przypisana jest co najmniej 1 karta uszkodzenia, możesz wykonać darmową akcję unik."""
"Moff Jerjerrod":
text: """%PL_IMPERIALONLY%%LINEBREAK%Kiedy otrzymujesz odkrytą kartę uszkodzenia, możesz odrzucić to rozwinięcie lub inną kartę rozwinięcia [crew] aby zakryć tę kartę uszkodzenia (bez rozpatrywania jej efektu)."""
"Greedo":
text: """%PL_SCUMONLY%%LINEBREAK%Za pierwszym razem kiedy atakujesz lub bronisz sie w każdej rundzie, pierwsza przypisana karta uszkodzenia jest odkryta."""
"Outlaw Tech":
name: "Mechanik wyjęty spod prawa"
text: """%PL_SCUMONLY%%LINEBREAK%Po wykonaniu czerwonego manewru, możesz przypisać do swojego statku 1 żeton skupienia."""
"K4 Security Droid":
name: "Droid ochroniarz K4"
text: """%PL_SCUMONLY%%LINEBREAK%Po wykonaniu zielonego manewru możesz namierzyć cel."""
# Soute
"Frequency Jammer":
name: "Zakłócacz częstotliwości"
text: """Kiedy wykonujesz akcję Zakłócanie, wybierz 1 wrogi statek, który nie ma żetonu stresu i znajduje się w Zasięgu 1 od zakłócanego statku. Wybrany statek otrzymuje 1 żeton stresu."""
"Expanded Cargo Hold":
ship: "Średni transportowiec GR-75"
name: "Powiększona ładownia"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Raz na rundę, kiedy masz otrzymać odkrytą kartę uszkodznia, możesz dobrać te kartę z talii uszkodzeń dziobu lub rufy."""
"Comms Booster":
name: "Wzmacniacz łączności"
text: """<strong>Energia:</strong> Wydaj 1 żeton energii aby usunąć wszystkie żetony stresu z przyjaznego statku w Zasięgu 1-3, następnie przydziel do tego statku 1 żeton skupienia."""
"Slicer Tools":
name: "Narzędzia hakera"
text: """<strong>Akcja:</strong> Wybierz co najmniej 1 wrogi statek w Zasięgu 1-3, na ktorym znajduje się żeton stresu. Za każdy wybrany statek możesz wydać 1 żeton energii aby sprawić, żeby dany statek otrzymał 1 uszkodzenie."""
"Shield Projector":
name: "Projektor osłon"
text: """Kiedy wrogi statek stanie się podczas fazy walki, możesz wydać 3 żetony energii aby do końca fazy zmusić go do zaatakowania ciebie, jeśli to możliwe."""
"Tibanna Gas Supplies":
name: "Zapasy gazu Tibanna"
text: """<strong>Energia:</strong> Możesz odrzucić tę kartę aby otrzymać 3 żetony energii."""
"Ionization Reactor":
name: "Reaktor jonizacyjny"
text: """<strong>Energia:</strong> Wydaj 5 żetonów energii z tej karty i odrzuć tą kartę aby sprawić żeby każdy statek w Zasięgu 1 otrzymał 1 uszkodzneie i 1 żeton jonów."""
"Engine Booster":
name: "Dopalacz silnika"
text: """Tuż przed tym jak odkryjesz swój wskaźnik manewrów, możesz wydać 1 żeton energii aby wykonać biały manewr (%STRAIGHT% 1). Nie możesz skorzystać z tej zdolności, jeśli w jej wyniku będziesz nachodzić na inny statek."""
"Backup Shield Generator":
name: "Zapasowy generator osłon"
text: """Na koniec każdej rudny możesz wydać 1 żeton energii aby odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"EM Emitter":
name: "Emiter elektro-magnetyczny"
text: """Kiedy przyblokujesz atak, obrońca rzuca 3 dodatkowymi kośmi obrony (zamiast 1)."""
# Hardpiont
"Ion Cannon Battery":
name: "Bateria działa jonowego"
text: """<strong>Atak (energia):</strong> Aby wykonać ten atak, wydaj 2 żetony energii z tej karty. Jeżeli atak ten trafi w wybrany statek, otrzymuje on 1 krytyczne uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystkie wyniki kości."""
"Single Turbolasers":
name: "Pojedyńcze Turbolasery"
text: """<strong>Atak (energia):</strong> Wydaj 2 żetony energii z tej karty aby wykonać ten atak. Obronca podwaja swoją wartość zwrotności przeciwko temu atakowi. Możesz zmienić jeden ze swoich wyników %FOCUS% na %HIT%."""
"Quad Laser Cannons":
name: "Poczwórne działka laserowe"
text: """<strong>Atak (energia):</strong> Wydaj 1 żeton energii z tej karty aby wykonać ten atak. Jeśli ten atak nie trafi, możesz natychmiast wydać 1 żeton energii z tej karty aby ponownie przeprowadzić ten atak."""
# Équipe
"Gunnery Team":
name: "Zespół artylerzystów"
text: """Raz na rundę kiedy atakujesz przy pomocy daodatkowej broni, możesz wydać 1 żeton energii aby zmienić 1 ze swoich pustych wyników na %HIT%."""
"Sensor Team":
name: "Zespół obsługi sensorów"
text: """Kiedy namierzasz cel, możesz namierzyć wrogi statek w Zasięgu 1-5 (zamiast Zasięgu 1-3)."""
"Engineering Team":
name: "Zespół techników"
text: """Podczas fazy aktywacji, kiedy ujawnisz manewr %STRAIGHT%, otrzymujesz 1 dodatkowy żeton energii podczas kroku Otrzymywania energii."""
# Illégal
"Inertial Dampeners":
name: "Tłumiki inercyjne"
text: """Kiedy ujawniasz swój manewr, możesz odrzucić tę kartę żeby zamiast tego wykonać biały manewr [0%STOP%]. Następnie otrzymujesz 1 żeton stresu."""
"Dead Man's Switch":
name: "Włącznik samobójcy"
text: """Kiedy zostajesz zniszczony, każdy statek w Zasięgu 1 otrzymuje 1 uszkodzenie."""
"Feedback Array":
name: "System zwrotny"
text: """Podczas fazy walki, zamiast wykonywać jakiekolwiek ataki, możesz otrzymać 1 żeton jonów i 1 uszkodzenie aby wybrać wrogi statek w Zasięgu 1. Wybrany statek otrzymuje 1 uszkodzenie."""
'"Hot Shot" Blaster':
name: "Gorący strzał"
text: """<strong>Atak:</strong> Odrzuć tę kartę, aby zaatakować 1 statek (nawet poza twoim polem rażenia)."""
# Astromech récupéré
"Salvaged Astromech":
name: "Astromech z odzysku"
text: "Kiedy otrzymujesz kartę uszkodzenia z cechą Statek, natychmiast możesz ją odrzucić (przed rozpatrzeniem efektu). %LINEBREAK%Następnie odrzuć tę kartę rozwinięcia.%LINEBREAK%."
'"Genius"':
name: "Geniusz"
text: """Jeśli jesteś wyposażony w bombę, która może zostać zrzucona przed ujawnieniem twojego manewru, zamiast tego możesz ją zrzucić po tym jak wykonasz swój manewr."""
"Unhinged Astromech":
name: "Szalony astromech"
text: """Możesz traktować manewry o prędkości 3 jako zielone."""
"R4-B11":
text: """Kiedy atakujesz namierzonego przez siebie obrońcę, możesz wydać żeton namierzonego celu aby wybrać dowolne kości obrony (nawet wszystkie). Następnie obrońca musi przerzucić wybrane przez ciebie kości."""
"R4 Agromech":
name: "Agromech R4"
text: """Kiedy atakujesz, po wydaniu żetonu skupienia, możesz namierzyć obrońcę."""
"Emperor Palpatine":
text: """%IMPERIALONLY%%LINEBREAK%Once per round, you may change a friendly ship's die result to any other die result. That die result cannot be modified again."""
"Bossk":
text: """%SCUMONLY%%LINEBREAK%After you perform an attack that does not hit, if you are not stressed, you <strong>must</strong> receive 1 stress token. Then assign 1 focus token to your ship and acquire a target lock on the defender."""
"Lightning Reflexes":
text: """%SMALLSHIPONLY%%LINEBREAK%After you execute a white or green maneuver on your dial, you may discard this card to rotate your ship 180°. Then receive 1 stress token <strong>after</strong> the "Check Pilot Stress" step."""
"Twin Laser Turret":
text: """<strong>Attack:</strong> Perform this attack <strong>twice</strong> (even against a ship outside your firing arc).<br /><br />Each time this attack hits, the defender suffers 1 damage. Then cancel <strong>all</strong> dice results."""
"Plasma Torpedoes":
text: """<strong>Attack (target lock):</strong> Spend your target lock and discard this card to perform this attack.<br /><br />If this attack hits, after dealing damage, remove 1 shield token from the defender."""
"Ion Bombs":
text: """When you reveal your maneuver dial, you may discard this card to <strong>drop</strong> 1 ion bomb token.<br /><br />This token <strong>detonates</strong> at the end of the Activation phase.<br /><br /><strong>Ion Bombs Token:</strong> When this bomb token detonates, each ship at Range 1 of the token receives 2 ion tokens. Then discard this token."""
"Conner Net":
text: """<strong>Action:</strong> Discard this card to <strong>drop</strong> 1 Conner Net token.<br /><br />When a ship's base or maneuver template overlaps this token, this token <strong>detonates</strong>.<br /><br /><strong>Conner Net Token:</strong> When this bomb token detonates, the ship that moved through or overlapped this token suffers 1 damage, receives 2 ion tokens, and skips its "Perform Action" step. Then discard this token."""
"Bombardier":
text: """When dropping a bomb, you may use the (%STRAIGHT% 2) template instead of the (%STRAIGHT% 1) template."""
"Cluster Mines":
text: """<strong>Action:</strong> Discard this card to <strong>drop</strong> 3 cluster mine tokens.<br /><br />When a ship's base or maneuver template overlaps a cluster mine token, that token <strong>detonates</strong>.<br /><br /><strong>Cluster Mines Tokens:</strong> When one of these bomb tokens detonates, the ship that moved through or overlapped this token rolls 2 attack dice and suffers all damage (%HIT%) rolled. Then discard this token."""
'Crack Shot':
text: '''When attacking a ship inside your firing arc, you may discard this card to cancel 1 of the defender's %EVADE% results.'''
"Advanced Homing Missiles":
text: """<strong>Attack (target lock):</strong> Discard this card to perform this attack.%LINEBREAK%If this attack hits, deal 1 faceup Damage card to the defender. Then cancel <strong>all</strong> dice results."""
'Agent Kallus':
text: '''%IMPERIALONLY%%LINEBREAK%At the start of the first round, choose 1 enemy small or large ship. When attacking or defending against that ship, you may change 1 of your %FOCUS% results to a %HIT% or %EVADE% result.'''
'XX-23 S-Thread Tracers':
text: """<strong>Attack (focus):</strong> Discard this card to perform this attack. If this attack hits, each friendly ship at Range 1-2 of you may acquire a target lock on the defender. Then cancel <strong>all</strong> dice results."""
"Tractor Beam":
text: """<strong>Attack:</strong> Attack 1 ship.%LINEBREAK%If this attack hits, the defender receives 1 tractor beam token. Then cancel <strong>all</strong> dice results."""
"Cloaking Device":
text: """%SMALLSHIPONLY%%LINEBREAK%<strong>Action:</strong> Perform a free cloak action.%LINEBREAK%At the end of each round, if you are cloaked, roll 1 attack die. On a %FOCUS% result, discard this card, then decloak or discard your cloak token."""
modification_translations =
"Shield Upgrade":
name: "Ulepszenie osłon"
text: """Zwiększ wartość swoich osłon o 1."""
"Advanced Cloaking Device":
name: "Zaawansowany system maskowania"
text: """<span class="card-restriction">Tylko TIE Phantom.</span>%LINEBREAK%Po tym jak wykonasz atak, możesz wykonać darmową akcję maskowanie."""
ship: "TIE Phantom"
"Stealth Device":
name: "Urządzenie maskujące"
text: """Zwiększ wartość swojej zwrotności o 1. Jeśli zostaniesz trafiony podczas ataku, odrzuć tę kartę."""
"Engine Upgrade":
name : "Ulepszenie silnika"
text: """Twój pasek rozwinięć zyskuje symbol akcji %BOOST%."""
"Anti-Pursuit Lasers":
name: "Lasery antypościgowe"
text: """%PL_LARGESHIPONLY%Po tym jak wrogi statek wykona manewr, który sprawi że będzie zachodzić na ciebie, rzuć 1 kością ataku. Jeśli wypadnie %HIT% lub %CRIT%, wrogi statek otrzymuje 1 uszkodzenie."""
"Targeting Computer":
name: "Komputer celowniczy"
text: """Twój pasek akcji zyskuje symbol akcji %TARGETLOCK%."""
"Hull Upgrade":
name: "Ulepszenie kadłuba"
text: """Zwiększ wartość swojego kadłuba o 1."""
"Munitions Failsafe":
name: "Zabezpieczenie amunicji"
text: """Kiedy atakujesz przy pomocy broni dodatkowej, która nakazuje odrzucenie karty po wykonaniu ataku, nie odrzucasz jej jeśli atak nie trafi."""
"Stygium Particle Accelerator":
name: "Akcelerator cząsteczek stygium"
text: """Kiedy się demaskujesz lub wykonasz akcję maskowanie, możesz wykonać darmową akcję unik."""
"Combat Retrofit":
name: "Modyfikacja bojowa"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Zwiększ wartość swojego kadłuba o 2 i wartość swoich osłon o 1."""
ship: "Transport moyen GR-75"
"B-Wing/E2":
text: """<span class="card-restriction">Tylko B-wing.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %CREW%."""
"Countermeasures":
name: "Środki profilaktyczne"
text: """%PL_LARGESHIPONLY%%LINEBREAK%Na początku fazy walki możesz odrzucić tę kartę, aby do końca rundy zwiększyć swoją zwrotność o 1. Następnie możesz usunąć ze swojego statku 1 wrogi żeton namierzonego celu."""
"Experimental Interface":
name: "Eksperymentalny interfejs"
text: """Raz na rundę. Po tym jak wykonasz akcję możesz wykonać 1 darmową akcję z karty rozwinięcia z nagłówkiem <strong>Akcja:</strong>, w którą jesteś wyposażony. Następnie otrzymujesz 1 żeton stresu."""
"Tactical Jammer":
name: "Zakłócacz taktyczny"
text: """%PL_LARGESHIPONLY%%LINEBREAK%Twój statek może przyblokowywać wrogie ataki."""
"Autothrusters":
name: "Autodopalacze"
text: """Kiedy się bronisz, jeśli jesteś poza Zasięgiem 2 albo znajdujesz się poza polem rażenia atakującego, możesz zmienić 1 ze swoich pustych wyników na %EVADE%. Możesz wyposażyć swój statek w tę kartę tylko jeśli masz symbol akcji %BOOST%."""
"Twin Ion Engine Mk. II":
text: """You may treat all bank maneuvers (%BANKLEFT% and %BANKRIGHT%) as green maneuvers."""
"Maneuvering Fins":
text: """When you reveal a turn maneuver (%TURNLEFT% or %TURNRIGHT%), you may rotate your dial to the corresponding bank maneuver (%BANKLEFT% or %BANKRIGHT%) of the same speed."""
"Ion Projector":
text: """%LARGESHIPONLY%%LINEBREAK%After an enemy ship executes a maneuver that causes it to overlap your ship, roll 1 attack die. On a %HIT% or %CRIT% result, the enemy ship receives 1 ion token."""
title_translations =
"Slave I":
text: """<span class="card-restriction">Tylko Firespray-31.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol %TORPEDO%."""
"Millennium Falcon":
name: "Sokół Millenium"
text: """<span class="card-restriction">Tylko YT-1300.</span>%LINEBREAK% Twój pasek akcji zyskuje symbol akcji %EVADE%."""
"Moldy Crow":
text: """<span class="card-restriction">Tylko HWK-290.</span>%LINEBREAK%Podczas fazy końcowej nie usuwaj ze swojego statku niewykorzystanych żetonów skupienia."""
"ST-321":
ship: "Navette de classe Lambda"
text: """<span class="card-restriction">Navette de classe <em>Lambda</em> uniquement.</span>%LINEBREAK%Quand vous verrouillez une cible, vous pouvez verrouiller n'importe quel vaisseau ennemi situé dans la zone de jeu."""
"Royal Guard TIE":
ship: "TIE Interceptor"
name: "TIE Imperialnego Gwardzisty"
text: """<span class="card-restriction">Tylko TIE Interceptor.</span>%LINEBREAK%Możesz dołączyć do swojego statku maksymalnie 2 różne karty Modyfikacji (zamiast 1). Nie możesz dołączyć tej karty do swojego statku, jeśli wartość umiejętności pilota wynosi "4" lub mniej."""
"Dodonna's Pride":
name: "Duma Dodonny"
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Kiedy wykonujesz akcję "Koordynacja", możesz wybrać 2 przyjazne statki (zamiast 1). Statki te mogą wykonać po 1 darmowej akcji."""
"A-Wing Test Pilot":
name: "Pilot testowy A-winga"
text: """<span class="card-restriction">Tylko A-wing.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %ELITE%. Nie możesz wyposażyć się w 2 takie same karty rozwinięcia [elite talent]. Nie możesz wyposażyć się w tę kartę, jeśli twoja wartość umiejętności pilota wynosi "1" lub mniej."""
"Tantive IV":
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Twój pasek rozwinięć sekcji dziobowej zyskuje po 1 symbolu rozwinięcia %CREW% i %TEAM%."""
"Bright Hope":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Żetony wsparcia przypisane do twojej sekcji dziobowej dostają 2 wyniki %EVADE% (zamiast 1)."""
"Quantum Storm":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Na początku fazy końcowej, jeśli masz nie więcej niż 1 żeton energi, otrzymujesz 1 żeton energii."""
"Dutyfree":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Kiedy wykonujesz akcję Zakłócenie, możesz wybrać wrogi statek w Zasięgu 1-3 (zamiast Zasięgu 1-2)."""
"Jaina's Light":
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Kiedy się bronisz, raz na atak, jeśli otrzymujesz odkrytą kartę uszkodzenia, możesz ją odrzucić i dobrać nową odkrytą kartę uszkodzenia."""
"Outrider":
text: """<span class="card-restriction">Tylko YT-2400.</span>%LINEBREAK%Dopóki jesteś wyposażony w kartę rozwinięcia [cannon], nie możesz wykonywać ataków bronią podstawową. Przy pomocy dodatkowej broni [cannon] możesz wykonywać ataki skierowane przeciwko statkom znajdujacym się poza twoim polem rażenia. """
"Dauntless":
ship: "Décimateur VT-49"
text: """<span class="card-restriction">Tylko Decimator VT-49.</span>%LINEBREAK%Po tym jak wykonasz manewr, który sprawi że będziesz nachodzić na inny statek, możesz wykonać 1 darmową akcję. Następnie otrzymujesz 1 żeton stresu."""
"Virago":
text: """<span class="card-restriction">Tylko StarViper.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbole rozwinięć %SYSTEM% i %ILLICIT%. Nie możesz wyposażyć swojego statku w tę kartę jeśli wartość umiejętności twojego pilota wynosi „3” lub mniej."""
'"Heavy Scyk" Interceptor (Cannon)':
name: 'Interceptor typu Heavy Scyk (Działo)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
'"Heavy Scyk" Interceptor (Torpedo)':
name: 'Interceptor typu Heavy Scyk (Torpeda)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
'"Heavy Scyk" Interceptor (Missile)':
name: 'Intercepteur "Scyk Lourd" (Rakieta)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
"IG-2000":
text: """<span class="card-restriction">Tylko Aggressor.</span>%LINEBREAK%Masz zdolność pilota każdego innego przyjaznego statku z kartą ulepszenia IG-2000 (jako dodatek do swojej zdolności pilota)."""
"BTL-A4 Y-Wing":
text: """<span class="card-restriction">Tylko Y-wing.</span>%LINEBREAK%Nie możesz atakować statków znajdujących się poza twoim polem rażenia. Po wykonaniu ataku przy pomocy broni podstawowej, możesz natychmiast wykonać atak przy pomocy dodatkowej broni %TURRET%."""
"Andrasta":
text: """<span class="card-restriction">Tylko Firespray-31.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje dwa symbole %BOMB%."""
"TIE/x1":
text: """<span class="card-restriction">Tylko TIE Advanced.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %SYSTEM%. %LINEBREAK%Koszt przypisanej do tego statku karty rozwinięcia %SYSTEM% jest obniżony o 4 punkty (do minimum 0)."""
"Ghost":
text: """<span class="card-restriction">VCX-100 only.</span>%LINEBREAK%Equip the <em>Phantom</em> title card to a friendly Attack Shuttle and dock it to this ship.%LINEBREAK%After you execute a maneuver, you may deploy it from your rear guides."""
"Phantom":
text: """While you are docked, the <em>Ghost</em> can perform primary weapon attacks from its special firing arc, and, at the end of the Combat phase, it may perform an additional attack with an equipped %TURRET%. If it performs this attack, it cannot attack again this round."""
"TIE/v1":
text: """<span class="card-restriction">TIE Advanced Prototype only.</span>%LINEBREAK%After you acquire a target lock, you may perform a free evade action."""
"Mist Hunter":
text: """<span class="card-restriction">G-1A starfighter only.</span>%LINEBREAK%Your upgrade bar gains the %BARRELROLL% Upgrade icon.%LINEBREAK%You <strong>must</strong> equip 1 "Tractor Beam" Upgrade card (paying its squad point cost as normal)."""
"Punishing One":
text: """<span class="card-restriction">JumpMaster 5000 only.</span>%LINEBREAK%Increase your primary weapon value by 1."""
exportObj.setupCardData basic_cards, pilot_translations, upgrade_translations, modification_translations, title_translations
| 192824 | exportObj = exports ? this
exportObj.codeToLanguage ?= {}
exportObj.codeToLanguage.pl = 'Polski'
exportObj.translations ?= {}
# This is here mostly as a template for other languages.
exportObj.translations['Polski'] =
action :
"Barrel Roll": "Beczka"
"Boost": "Dopalacz"
"Evade": "Unik"
"Focus": "Skupienie"
"Target Lock": "Namierzenie celu"
"Recover": "Naprawa"
"Reinforce": "Umocnienie"
"Jam": "Zakłócanie"
"Coordinate": "Koordynacja"
"Cloak": "Maskowanie"
slot:
"Astromech": "Astromech"
"Bomb": "Bomba"
"Cannon": "Działo"
"Crew": "Załoga"
"Elite": "Talent elitarny"
"Missile": "Rakiety"
"System": "System"
"Torpedo": "Torpedy"
"Turret": "Wieżyczka"
"Cargo": "Ładunek"
"Hardpoint": "Punkt konstrukcyjny"
"Team": "Drużyna"
"Illicit": "Kontrabanda"
"Salvaged Astromech": "Złomowane astromechy"
sources: # needed?
"Core": "Zestaw Podstawowy"
"A-Wing Expansion Pack": "Zestaw dodatkowy A-Wing"
"B-Wing Expansion Pack": "Zestaw dodatkowy B-Wing"
"X-Wing Expansion Pack": "Zestaw dodatkowy X-Wing"
"Y-Wing Expansion Pack": "Zestaw dodatkowy Y-Wing"
"Millennium Falcon Expansion Pack": "Zestaw dodatkowy Sokół Millennium"
"HWK-290 Expansion Pack": "Zestaw dodatkowy HWK-290"
"TIE Fighter Expansion Pack": "Zestaw dodatkowy Myśliwiec TIE"
"TIE Interceptor Expansion Pack": "Zestaw dodatkowy TIE Interceptor"
"TIE Bomber Expansion Pack": "Zestaw dodatkowy Bombowiec TIE"
"TIE Advanced Expansion Pack": "Zestaw dodatkowy TIE Advanced"
"Lambda-Class Shuttle Expansion Pack": "Zestaw dodatkowy Prom typu Lambda"
"Slave I Expansion Pack": "Zestaw dodatkowy Slave I"
"Imperial Aces Expansion Pack": "Zestaw dodatkowy Asy Imperium"
"Rebel Transport Expansion Pack": "Zestaw dodatkowy Rebeliancki transportowiec"
"Z-95 Headhunter Expansion Pack": "Zestaw dodatkowy Z-95 Łowca Głów"
"TIE Defender Expansion Pack": "Zestaw dodatkowy TIE Defender"
"E-Wing Expansion Pack": "Zestaw dodatkowy E-Wing"
"TIE Phantom Expansion Pack": "Zestaw dodatkowy TIE Phantom"
"Tantive IV Expansion Pack": "Zestaw dodatkowy Tantive IV"
"Rebel Aces Expansion Pack": "Zestaw dodatkowy Asy Rebelii"
"YT-2400 Freighter Expansion Pack": "Zestaw dodatkowy YT-2400"
"VT-49 Decimator Expansion Pack": "Zestaw dodatkowy Decimator VT-49"
"StarViper Expansion Pack": "Zestaw dodatkowy StarViper"
"M3-A Interceptor Expansion Pack": "Zestaw dodatkowy M3-A Interceptor"
"IG-2000 Expansion Pack": "Zestaw dodatkowy IG-2000"
"Most Wanted Expansion Pack": "Zestaw dodatkowy Poszukiwani"
"Imperial Raider Expansion Pack": "Zestaw dodatkowy Imperialny Patrolowiec"
ui:
shipSelectorPlaceholder: "Wybór statków"
pilotSelectorPlaceholder: "Wybór pilotów"
upgradePlaceholder: (translator, language, slot) ->
"#{translator language, 'slot', slot}"
modificationPlaceholder: "Modyfikacje"
titlePlaceholder: "Tytuł"
upgradeHeader: (translator, language, slot) ->
"Amélioration #{translator language, 'slot', slot}"
unreleased: "niewydane"
epic: "epickie"
byCSSSelector:
# Card browser
'.xwing-card-browser .translate.sort-cards-by': 'Sortuj karty po'
'.xwing-card-browser option[value="name"]': 'nazwie'
'.xwing-card-browser option[value="source"]': 'źródle'
'.xwing-card-browser option[value="type-by-points"]': 'typie (po punktach)'
'.xwing-card-browser option[value="type-by-name"]': 'typie (po nazwie)'
'.xwing-card-browser .translate.select-a-card': 'Wybierz kartę z listy po prawej'
'.xwing-card-browser .info-range td': 'Zasięg’'
# Info well
'.info-well .info-ship td.info-header': 'Statek'
'.info-well .info-skill td.info-header': 'Umiejętność pilota'
'.info-well .info-actions td.info-header': 'Akcje'
'.info-well .info-upgrades td.info-header': 'Ulepszenia'
'.info-well .info-range td.info-header': 'Zasięg'
# Squadron edit buttons
'.clear-squad' : 'Wyczyść eskadrę'
'.save-list' : 'Zapisz'
'.save-list-as' : 'Zapisz jako ...'
'.delete-list' : 'Usuń'
'.backend-list-my-squads' : 'Lista eskadr'
'.view-as-text' : '<span class="hidden-phone"><i class="icon-print"></i> Drukuj \ Wyświetl jako </span>Tekst'
'.randomize' : 'randomizuj'
'.randomize-options' : 'Opcje ...'
# Print/View modal
'.bbcode-list' : 'Skopiuj BBCode poniżej i wklej go do swojego posta.<textarea></textarea>'
'.vertical-space-checkbox' : """Dodaj miejsce na karty ulepszeń \ uszkodzeń podczas drukowania <input type="checkbox" class="toggle-vertical-space" />"""
'.color-print-checkbox' : """Wydrukuj w kolorze <input type="checkbox" class="toggle-color-print" />"""
'.print-list' : '<i class="icon-print"></i> Drukuj'
# Randomizer options
'.do-randomize' : 'Generuj'
# Top tab bar
'#empireTab' : 'Imperium Galaktyczne'
'#rebelTab' : 'So<NAME> Re<NAME>iancki'
'#scumTab' : 'Szumowiny i Nikczemnicy'
'#browserTab' : 'Przeglądarka kart'
'#aboutTab' : 'O stronie'
singular:
'pilots': 'Pilot'
'modifications': 'Modyfikacja'
'titles': 'Tytuł'
types:
'Pilot': 'Pilot'
'Modification': 'Modyfikacja'
'Title': 'Tytuł'
exportObj.cardLoaders ?= {}
exportObj.cardLoaders['Polski'] = () ->
exportObj.cardLanguage = 'Polski'
# Assumes cards-common has been loaded
basic_cards = exportObj.basicCardData()
exportObj.canonicalizeShipNames basic_cards
# English names are loaded by default, so no update is needed
exportObj.ships = basic_cards.ships
# Rename ships
exportObj.renameShip 'TIE Fighter', 'Myśliwiec TIE'
exportObj.renameShip 'TIE Bomber', 'Bombowiec TIE'
exportObj.renameShip 'Lambda-Class Shuttle', 'Prom typu Lambda'
exportObj.renameShip 'VT-49 Decimator', 'Decimator VT-49'
exportObj.renameShip 'Z-95 Headhunter', 'Z-95 Łowca głów'
exportObj.renameShip 'CR90 Corvette (Aft)', 'Korweta CR90 (rufa)'
exportObj.renameShip 'CR90 Corvette (Fore)', 'Corvette CR90 (dziób)'
exportObj.renameShip 'GR-75 Medium Transport', 'Średni transportowiec GR-75'
# Names don't need updating, but text needs to be set
pilot_translations =
# Empire
# Myśliwiec TIE
"Academy Pilot":
ship: "Myśliwiec TIE"
name: "Pilot z Akademii"
"Obsidian Squadron Pilot":
ship: "Myśliwiec TIE"
name: "Pilot <NAME>"
"Black Squadron Pilot":
ship: "Myśliwiec TIE"
name: "Pilot <NAME> C<NAME>"
'"<NAME>"':
name: "<NAME>"
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz w Zasięgu 1, możesz zmienić 1 ze swoich wyników %HIT% na wynik %CRIT%."""
'"Night Beast"':
name: "<NAME>"
ship: "Myśliwiec TIE"
text: """Po wykonaniu zielonego manewru możesz wykonać darmową akcję skupienia."""
'"Backstabber"':
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz będąc poza polem rażenia broniącego się statku, rzucasz 1 dodatkową kością ataku."""
'"Dark Curse"':
name: "<NAME>"
ship: "Myśliwiec TIE"
text: """Kiedy się bronisz statki które cię atakują nie mogą wydawać żetonów skupienia ani przerzucać kości ataku."""
'"<NAME>"':
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz w Zasięgu 1, rzucasz 1 dodatkową kością ataku."""
'"<NAME>"':
ship: "Myśliwiec TIE"
text: """Kiedy inny przyjazny statek w zasięgu 1 atakuje przy pomocy swojej podstawowej broni, może przerzucić 1 kość ataku."""
# TIE Interceptor
"Alpha Squadron Pilot":
name: "Pilot <NAME>"
"Avenger Squadron Pilot":
name: "Pilot <NAME>"
"Saber Squadron Pilot":
name: "Pilot <NAME>"
"Royal Guard Pilot":
name: "Pilot imperialny gwardzista"
"\"<NAME>\"":
name: "\"<NAME>\""
text: """Zostajesz zniszczony dopiero pod koniec fazy walki, w której liczba kart uszkodzeń przypisanych do ciebie będzie równa, lub wyższa od twojej wartości kadłuba."""
"<NAME>":
name: "<NAME>"
text: """Kiedy wykonujesz akcję "beczka", możesz otrzymać 1 żeton stresu, aby zamiast wzornika manewru (%STRAIGHT% 1) użyć (%BANKLEFT% 1) lub (%BANKRIGHT% 1)."""
"<NAME>":
text: """Kiedy atakujesz w Zasięgu 2-3, możesz wydać 1 żeton uników, aby dodać 1 %HIT% do swojego wyniku."""
"<NAME>":
text: """Kiedy ujawnisz manewr %UTURN% możesz traktować go tak, jakby jego prędkość wynosiła "1", "3" lub "5"."""
"<NAME>":
text: """Po wykonaniu ataku możesz wykonać darmową akcję "dopalacz" lub "beczka"."""
"<NAME>":
text: """Wrogie statki w Zasięgu 1 nie mogą wykonywać akcji "skupienie" oraz "unik", ani wydawać żetonów skupienia i uników."""
"<NAME>":
text: """Kiedy otrzymujesz żeton stresu, możesz przypisać do swojego statku 1 żeton skupienia."""
# TIE Phantom
"Sigma Squadron Pilot":
name: "Pilot Eskadry Sigma"
"Shadow Squadron Pilot":
name: "Pilot Eskadry Cieni"
'"Echo"':
text: """Kiedy się demaskujesz musisz użyć wzornika manewru (%BANKLEFT% 2) lub (%BANKRIGHT% 2) zamiast wzornika (%STRAIGHT% 2)."""
'"Whisper"':
name: "<NAME>"
text: """Po tym jak wykonasz atak, który trafi cel, możesz przypisać do swojego statku 1 żeton skupienia."""
# TIE Defender
"Onyx Squadron Pilot":
name: "Pilot Eskadry Onyx"
"Delta Squadron Pilot":
name: "Pilot Eskadry Delta"
"<NAME>":
name: "<NAME>"
text: """Kiedy atakujesz, zaraz po swoim rzucie kośćmi ataku możesz namierzyć obrońcę, jeśli ma na sobie czerwony żeton namierzonego celu."""
"<NAME>":
text: """Po tym jak wykonasz atak, który zada obrońcy co najmniej jedną kartę uszkodzenia, możesz wydać żeton skupienia aby odkryć te karty."""
# Bombowiec TIE
"Scimitar Squadron Pilot":
ship: "Bombowiec TIE"
name: "Pilot Eskadry Sejmitarów"
"Gamma Squadron Pilot":
ship: "Bombowiec TIE"
name: "Pilot Eskadry Gamma"
"<NAME>":
ship: "Bombowiec TIE"
name: "<NAME>"
text: "Kiedy inny przyjazny statek w Zasięgu 1 atakuje przy pomocy dodatkowej broni, może przerzucić maksymalnie 2 kości ataku."
"Major Rhymer":
ship: "Bombowiec TIE"
text: "Kiedy atakujesz przy pomocy dodatkowej broni, możesz zwiększyć lub zmniejszyć zasięg broni o 1. Musisz przy tym zachować limit zasięgu 1-3."
# TIE Advanced
"Tempest Squadron Pilot":
name: "Pilot <NAME>"
"Storm Squadron Pilot":
name: "Pilot Eskadry Szturmu"
"<NAME>":
text: """Kiedy twój atak zadaje obrońcy odkrytą kartę uszkodzenia, wylosuj 3 karty uszkodzeń, wybierz 1 z nich, którą zadajesz, a pozostałe odrzuć."""
"<NAME>":
text: """Podczas swojego kroku "Wykonywania akcji" możesz wykonać 2 akcje."""
"<NAME>":
name: "<NAME>"
text: """Na początku fazy walki możesz namierzyć wrogi statek znajdujący się w Zasięgu 1 od ciebie."""
# Firespray
"<NAME>":
name:"<NAME>"
"<NAME>":
text: """Kiedy atakujesz, obrońca otrzymuje 1 żeton stresu, jeśli anuluje co najmniej jeden wynik %CRIT%."""
"<NAME>":
text: """Kiedy ujawniasz manewr skrętu (%BANKLEFT% lub %BANKRIGHT%) możesz przestawić swój wskaźnik manewrów na drugi manewr skrętu o tej samej prędkości."""
"<NAME>":
text: """Kiedy atakujesz przy pomocy dodatkowej broni, możesz przerzucić 1 kość ataku."""
# Prom typu Lambda
"<NAME>":
ship: "Prom typu Lambda"
name: "<NAME>"
text: """Kiedy wrogi statek namierza cel, musi namierzyć twój statek, jeśli to możliwe."""
"<NAME>":
name: "<NAME>"
ship: "Prom typu Lambda"
text: """Na początku fazy walki możesz przypisać 1 ze swoich niebieskich żetonów namierzonego celu do przyjaznego statku w Zasięgu 1, jeśli ten nie ma niebieskiego żetonu namierzonego celu."""
"<NAME>":
ship: "Prom typu Lambda"
name: "<NAME>"
text: """Kiedy inny przyjazny statek w Zasięgu 1-2 ma otrzymać żeton stresu, gdy ty masz 2 lub mniej żetonów stresu, możesz przypisać do siebie ten żeton stresu."""
"Omicron Group Pilot":
ship: "Prom typu Lambda"
name: "Pilot grupy Omicron"
# VT-49 Decimator
"<NAME>":
ship: "Decimator VT-49"
name: "<NAME>"
text: """Po wykonaniu manewru, każdy wrogi statek z którym się stykasz, otrzymuje 1 uszkodzenie."""
"<NAME>":
ship: "Decimator VT-49"
name: "<NAME>"
text: """Kiedy atakujesz w Zasięgu 1-2, możesz zmienić jeden ze swoich wyników %FOCUS% na wynik %CRIT%."""
"<NAME>":
ship: "Decimator VT-49"
name: "<NAME>"
"<NAME>":
ship: "Decimator VT-49"
name: "<NAME>"
text: """Jeśli nie masz żadnych osłon i masz przypisaną co najmniej 1 kartę uszkodzenia, wartość twojej zwrotności wzrasta o 1."""
"<NAME>":
text: """When you reveal your maneuver, you may increase or decrease its speed by 1 (to a minimum of 1)."""
"<NAME>":
text: """Enemy ships at Range 1 cannot add their range combat bonus when attacking."""
"<NAME>":
text: """At the start of the End phase, you may spend a target lock you have on an enemy ship to flip 1 random facedown Damage card assigned to it faceup."""
# Rebels
# X-Wing
"Red Squadron Pilot":
name: "Pilot <NAME>"
"Rookie Pilot":
name: "Niedoświadczony pilot"
"<NAME>":
text: """Kiedy atakujesz zredukuj wartość zwrotności obrońcy o 1 (do minimum 0)."""
"<NAME>":
text: """Po tym jak wydasz żeton skupienia możesz umieścić dany żeton na dowolnym innym przyjaznym statku w Zasięgu 1-2 (zamiast go odrzucać)."""
"<NAME>":
text: """Inne przyjazne statki w Zasięgu 1 nie mogą być wybierane na cel ataku, jeśli atakujący może na cel wybrać ciebie."""
"<NAME>":
text: """Kiedy się bronisz, możesz zmienić 1 ze swoich wyników %FOCUS% na wynik %EVADE%."""
"<NAME>":
text: """Po wykonaniu ataku możesz usunąć z obrońcy 1 żeton skupienia, uników, lub niebieski żeton namierzonego celu."""
"<NAME>":
text: """Kiedy otrzymujesz żeton stresu możesz usunąć go i rzucić 1 kością ataku. Jeśli wypadnie %HIT%, ten statek otrzymuje 1 zakrytą kartę uszkodzenia."""
'"<NAME>" <NAME>':
text: """Kiedy zdobywasz lub wydajesz żeton namierzonego celu, możesz usunąć ze swojego statku 1 żeton stresu."""
"<NAME>":
text: """Kiedy wrogi statek wybiera cię na cel ataku, możesz namierzyć ten statek."""
# Y-Wing
"Gold Squadron Pilot":
name: "Pilot Eskadry Złotych"
"Gray Squadron Pilot":
name: "Pilot Eskadry Szarych"
'"<NAME>" <NAME>':
text: """Po namierzeniu celu wybierz przyjazny statek w Zasięgu 1-2. Wybrany statek może natychmiast namierzyć cel."""
"<NAME>":
text: """Kiedy atakujesz w Zasięgu 2-3, możesz przerzucić dowolne ze swoich kości, na których wypadły puste ścianki."""
# A-Wing
"Green Squadron Pilot":
name: "Pilot <NAME>"
"Prototype Pilot":
name: "Pilot prototypu"
"<NAME>":
text: """Możesz wykonywać akcje nawet jeśli posiadasz żetony stresu."""
"<NAME>":
text: """Możesz wybrać na cel swojego ataku wrogi statek, z którym się stykasz, jeżeli ten znajduje się w twoim polu rażenia."""
# YT-1300
"<NAME>":
name: "<NAME>"
"<NAME>":
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia, natychmiast ją zakryj (bez rozpatrywania jej efektu)."""
"<NAME>":
text: """Po wykonaniu zielonego manewru wybierz jeden inny przyjazny statek w Zasięgu 1. Statek ten może wykonać 1 darmową akcję przedstawioną na jego pasku akcji."""
"Han <NAME>":
text: """Kiedy atakujesz możesz przerzucić wszystkie swoje kości ataku. Jeśli zdecydujesz się to zrobić musisz przerzucić tyle ze swoich kości, ile możesz."""
# B-Wing
"Dagger Squadron Pilot":
name: "Pilot Esk<NAME>"
"Blue Squadron Pilot":
name: "Pilot <NAME>"
"<NAME>":
text: """Kiedy atakujesz, 1 z twoich wyników [crt-hit] nie może być anulowany przy pomocy kości obrony."""
"<NAME>":
text: """Kiedy atakujesz lub się bronisz mając co najmniej 1 żeton stresu, możesz przerzucić jedną ze swoich kości."""
# HWK-290
"Rebel Operative":
name: "Agent rebeliantów"
"<NAME>":
text: '''Na początku fazy walki wybierz 1 inny przyjazny statek w zasięgu 1-3. Do końca tej fazy traktuj wartość umiejętności tego pilota jakby wynosiła "12".'''
"<NAME>":
text: """Na początku fazy walki możesz przypisać 1 ze swoich żetonów skupienia do innego przyjaznego statku w Zasięgu 1-3."""
"<NAME>":
text: """Kiedy inny przyjazny statek w Zasięgu 1-3 atakuje, gdy nie masz żadnych żetonów stresu, możesz otrzymać 1 żeton stresu aby umożliwić mu rzut 1 dodatkową kością ataku."""
# Z-95
"Bandit Squadron Pilot":
name: "Pilot <NAME>sk<NAME> Band<NAME>"
ship: "Z-95 Łowca głów"
"Tala Squadron Pilot":
name: "Pilot Eskad<NAME> T<NAME>"
ship: "Z-95 Łowca głów"
"<NAME>":
name: "<NAME>"
ship: "Z-95 Łowca gł<NAME>"
text: "Kiedy atakujesz, obrońca zostaje trafiony twoim atakiem nawet jeśli nie otrzymał żadnych uszkodzeń."
"<NAME>":
ship: "Z-95 Łowca głów"
text: """Po wykonaniu ataku możesz wybrać inny przyjazny statek w Zasięgu 1. Dany statek może wykonać 1 darmową akcję."""
# E-Wing
"Knave Squadron Pilot":
name: "Pilot <NAME>"
"Blackmoon Squadron Pilot":
name: "Pilot <NAME>"
"<NAME>":
text: """Kiedy wrogi statek w twoim polu rażenia, w Zasięgu 1-3 się broni, atakujący może zmienić 1 z jego wyników %HIT% na wynik %CRIT%."""
"<NAME>":
text: """Na początku fazy końcowej możesz wykonać jeden atak. Nie możesz atakować w następnej rundzie."""
# Asy Rebelii
"<NAME>":
text: """Po tym jak wykonasz akcję skupienia lub zostanie ci przypisany żeton skupienia, możesz wykonać darmową akcję "dopalacz" lub "beczka"."""
"<NAME>":
text: """Dopóki znajdujesz się w Zasięgu 1 od co najmniej 1 wrogiego statku, zwiększ swoją wartość zwrotności o 1."""
"<NAME>":
text: """Kiedy atakujesz możesz usunąć 1 żeton stresu aby zmienić wszystkie swoje wyniki %FOCUS% na %HIT%."""
"<NAME>":
text: """Możesz wykonać atak dodatkową bronią %TORPEDO%, skierowany przeciwko wrogim statkom znajdującym się poza twoim polem rażenia."""
# Transportowiec Rebelii
"GR-75 Medium Transport":
name: "Średni transportowiec GR-75"
ship: "Średni transportowiec GR-75"
# CR90
"CR90 Corvette (Fore)":
ship: "Korweta CR90 (dziób)"
name: "Korweta CR90 (dziób)"
text: """Kiedy atakujesz przy pomocy swojej głównej broni, możesz wydać 1 żeton energii aby rzucać 1 dodatkową kością ataku."""
"CR90 Corvette (Aft)":
ship: "Korweta CR90 (rufa)"
name: "Korweta CR90 (rufa)"
# YT-2400
"Dash Rendar":
text: """Podczas fazy aktywacji i w czasie wykonywania akcji możesz ignorować przeszkody."""
'"Leebo"':
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia, dobierz 1 dodatkową kartę uszkodzenia. Rozpatrz jedną z nich a drugą odrzuć."""
"Eaden Vrill":
text: """Podczas wykonywania ataku przy pomocy broni podstawowej, którego celem jest statek z żetonem stresu, rzucasz 1 dodatkową kością."""
"Wild Space Fringer":
name: "Outsider z D<NAME>i"
# Scum and Villainy
# Starviper
"<NAME>":
name: "<NAME>"
text: """Kiedy się bronisz, przyjazny statek w Zasięgu 1 może otrzymać 1 nieanulowany wynik %HIT% lub %CRIT% (zamiast ciebie)."""
"<NAME>":
text: """Na początku fazy walki, jeśli jesteś w Zasięgu 1 od wrogiego statku, możesz przypisać do swojego statku 1 żeton skupienia."""
"Black Sun Vigo":
name: "<NAME>"
"Black Sun Enforcer":
name: "<NAME>"
# Interceptor M3-A
"<NAME> Sp<NAME>":
name: "Pilot k<NAME>u"
"<NAME>arii Point Veteran":
name: "<NAME>"
"<NAME>":
text: """Kiedy inny przyjazny statek w Zasięgu 1 się broni, może przerzucić 1 kość obrony."""
"<NAME>":
text: "Po tym jak obronisz się przed atakiem, jeśli atak nie trafił, możesz przypisać do swojego statku 1 żeton uniku."
# Agressor
"IG-88A":
text: """Po tym jak wykonasz atak, który zniszczy obrońcę, możesz odzyskać 1 osłonę."""
"IG-88B":
text: """Raz na rundę, po tym jak wykonasz atak, który nie trafi w wybrany cel, możesz wykonać atak przy pomocy dodatkowej broni %CANON%, w którą jesteś wyposażony."""
"IG-88C":
text: """Po tym jak wykonasz akcję „dopalacz” możesz przypisać do swojego statku 1 żeton uniku."""
"IG-88D":
text: """Możesz wykonać manewr (%SLOOPLEFT% 3) lub (%SLOOPRIGHT% 3) używając odpowiednio wzornika (%TURNLEFT% 3) lub (%TURNRIGHT% 3)."""
# Firespray
"<NAME> (Scum)":
name: """<NAME> (Szumowiny)"""
text: """Kiedy atakujesz lub się bronisz możesz przerzucić 1 ze swoich kości za każdy wrogi statek w Zasięgu 1."""
"<NAME> (Scum)":
name: """<NAME> (Szumowiny)"""
text: """Kiedy atakujesz statek znajdujący się w twoim pomocniczym polu rażenia, rzucasz 1 dodatkową kością ataku."""
"<NAME>":
text: """Kiedy zrzucasz bombę, możesz użyć wzornika [%TURNLEFT% 3], [%STRAIGHT% 3], lub [%TURNRIGHT% 3] (zamiast wzornika [%STRAIGHT% 1])."""
"<NAME>":
name: "<NAME>"
# Y-Wing
"<NAME>":
text: """Kiedy atakujesz statek znajdujący się poza twoim polem rażenia, rzucasz 1 dodatkową kością ataku."""
"<NAME>":
text: """Po tym jak wydasz żeton namierzonego celu, możesz otrzymać 1 żeton stresu, aby namierzyć cel."""
"<NAME>" :
name: "<NAME>"
"<NAME>dicate Thug":
name: "Zbir z syndykatu"
# HWK
"<NAME>":
text: """Kiedy wrogi statek w Zasięgu 1-3 otrzyma co najmniej jeden żeton jonów, a ty nie masz żetonu stresu, możesz otrzymać 1 żeton stresu aby sprawić, żeby dany statek otrzymał 1 uszkodzenie."""
"<NAME>":
text: """Na początku fazy walki możesz usunąć 1 żeton skupienia lub uników z wrogiego statku w Zasięgu 1-2 i przypisać go do siebie."""
"<NAME>":
text: """Na koniec fazy aktywacji wybierz jeden wrogi statek w Zasięgu 1-2. Do końca fazy walki wartość umiejętności pilota wybranego statku wynosi "0"."""
"<NAME>":
name: "<NAME>"
# Z-95
"<NAME>":
name: "<NAME>"
ship: "Z-95 Łowca głów"
"<NAME>":
name: "<NAME>"
ship: "Z-95 Łowca głów"
"<NAME>":
ship: "Z-95 Łowca głów"
text: """Kiedy atakujesz rzucasz 1 dodatkową kością ataku, jeśli w Zasięgu 1-2 nie ma żadnych innych przyjaznych statków."""
"<NAME>":
ship: "Z-95 Łowca głów"
text: """Na początku fazy walki możesz usunąć 1 żeton skupienia lub uników z innego przyjaznego statku w Zasięgu 1-2 i przypisać go do siebie."""
"<NAME>":
text: """When a friendly ship declares an attack, you may spend a target lock you have on the defender to reduce its agility by 1 for that attack."""
"Gr<NAME>":
text: """When defending, if the attacker is inside your firing arc, roll 1 additional defense die."""
"Esege Tuketu":
text: """When another friendly ship at Range 1-2 is attacking, it may treat your focus tokens as its own."""
'"Redline"':
text: """You may maintain 2 target locks on the same ship. When you acquire a target lock, you may acquire a second lock on that ship."""
'"Deathrain"':
text: """When dropping a bomb, you may use the front guides of your ship. After dropping a bomb, you may perform a free barrel roll action."""
"Moralo Eval":
text: """You can perform %CANNON% secondary attacks against ships inside your auxiliary firing arc."""
'Gozanti-class Cruiser':
text: """After you execute a maneuver, you may deploy up to 2 attached ships."""
'"Scourge"':
text: """When attacking a defender that has 1 or more Damage cards, roll 1 additional attack die."""
"The Inquisitor":
text: """When attacking with your primary weapon at Range 2-3, treat the range of the attack as Range 1."""
"Zuckuss":
text: """When attacking, you may roll 1 additional attack die. If you do, the defender rolls 1 additional defense die."""
"Dengar":
text: """Once per round after defending, if the attacker is inside your firing arc, you may perform an attack against the that ship."""
upgrade_translations =
#Elitarna zdolność
"Determination":
name: "Determinacja"
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia z cechą "Pilot", natychmiast ją odrzuć bez rozpatrywania jej efektu."""
"Swarm Tactics":
name: "Taktyka roju"
text: """Na początku fazy walki wybierz 1 przyjazny statek w Zasięgu 1. Do końca tej fazy traktuj wybrany statek, jakby jego wartość umiejętności pilota była równa twojej."""
"Squad Leader":
name: "<NAME>"
text: """<strong>Akcja:</strong> Wybierz 1 statek w Zasięgu 1-2, który ma niższą wartość umiejętności pilota niż ty. %LINEBREAK% Wybrany statek może natychmiast wykonać 1 darmową akcję."""
"Expert Handling":
name: "<NAME>"
text: """„<strong>Akcja:</strong> Wykonaj darmową akcję „beczka”. Jeśli nie masz symbolu akcji %BARRELROLL%, otrzymujesz 1 żeton stresu. Następnie możesz usunąć jeden wrogi żeton namierzonego celu znajdujący się na Twoim statku."""
"Marksmanship":
name: "<NAME>"
text: """<strong>Akcja:</strong> Kiedy atakujesz w tej rundzie, możesz zamienić 1 swój wynik %FOCUS% na %CRIT%, a wszystkie pozostałe wyniki %FOCUS% na %HIT%."""
"<NAME>":
name: "<NAME>"
text: """<strong>Akcja:</strong> Wykonaj biały manewr (%TURNLEFT% 1) lub (%TURNRIGHT% 1)". Następnie otrzymujesz żeton stresu. %LINEBREAK% Następnie, jeśli nie masz symbolu akcji %BOOST%, rzuć 2 kośćmi ataku. Otrzymujesz wszystkie wyrzucone uszkodzenia %HIT% i uszkodzenia krytyczne %CRIT%."""
"Elusiveness":
name: "<NAME>"
text: """Kiedy się bronisz możesz otrzymać 1 żeton stresu, aby wybrać jedną kość ataku. Atakujący musi przerzucić tą kość. Nie możesz skorzystać z tej zdolności, jeśli jeśli masz co najmniej 1 żeton stresu."""
"Push the Limit":
name: "Na granicy ryzyka"
text: """Raz na rundę po wykonaniu akcji, możesz wykonać 1 darmową akcję przedstawioną na twoim pasku akcji. Następnie otrzymujesz 1 żeton stresu."""
"De<NAME>":
name: "<NAME>"
text: """Możesz traktować nagłówek <strong>"Atak (namierzony cel):"</strong> jako <strong>"Atak (skupienie):"</strong>. %LINEBREAK% Kiedy atak zmusza cię do wydania żetonu namierzonego celu, możesz zamiast niego wydać żeton skupienia."""
"Expose":
name: "<NAME>"
text: """<strong>Akcja:</strong> Do końca rundy zwiększ wartość swojej podstawowej broni o 1 i zmniejsz wartość zwrotności o 1."""
"<NAME>":
name: "<NAME>"
text: """Na początku fazy walki usuń 1 żeton stresu z innego przyjaznego statku w Zasięgu 1."""
"<NAME>":
name: "<NAME>"
text: """Na początku fazy walki możesz wybrać 1 przyjazny statek w Zasięgu 1-2. Do końca fazy zamieniasz się z danym statkiem wartościami umiejętności pilota."""
"<NAME>":
name: "<NAME>"
text: """Kiedy atakujesz statek w swoim polu rażenia, a nie znajdujesz się w polu rażenia danego statku, zmniejsz jego wartość zwrotności o 1 (do minimum 0)."""
"Predator":
name: "<NAME>"
text: """Kiedy atakujesz, możesz przerzucić 1 kość ataku. Jeśli wartość umiejętnosci pilota obrońcy wynosi "2" lub mniej, możesz przerzucić maksymalnie 2 kości ataku (zamiast 1)."""
"Draw Their Fire":
name: "<NAME>"
text: """Kiedy przyjazny statek w Zasięgu 1 zostaje trafiony podczas ataku, możesz zamiast niego otrzymać 1 z nie anulowanych %CRIT%."""
"<NAME>":
name: "<NAME>"
text: """Kiedy ujawnisz czerwony manewr, możesz odrzucić tę kartę, aby do końca fazy aktywacji traktować ten manewr jako biały."""
"Veteran Instincts":
name: "<NAME>"
text: """Zwiększ swoją wartość umiejętności pilota o 2."""
"Opportunist":
name: "<NAME>"
text: """Kiedy atakujesz, jeśli obrońca nie ma żadnych żetonów skupienia ani uników, możesz otrzymać 1 żeton stresu aby rzucić 1 dodatkową kością ataku.%LINEBREAK%Nie możesz skorzystać z tej zdolności, jeśli posiadasz żetony stresu."""
"Lone Wolf":
name : "<NAME>"
text: """Jeśli w zasięgu 1-2 nie ma żadnych innych przyjaznych statków, kiedy się bronisz lub atakujesz, możesz przerzucić 1 wynik z pustą ścianką."""
"Stay On Target":
name: "Utrzymać cel"
text: """Kiedy ujawnisz swój manewr możesz obrócić swój wskaźnik na inny manewr o tej samej prędkości.%LINEBREAK%Traktuj ten manewr jako czerwony."""
"Ruthlessness":
name: "Bezwzględność"
text: """%PL_IMPERIALONLY%%LINEBREAK% Po tym jak przeprowadzisz atak, który trafi w cel, musisz wybrać 1 inny statek w Zasięgu 1 od obrońcy (nie siebie). Statek ten otrzymuje 1 uszkodzenie."""
"Intimidation":
name: "Zastraszenie"
text: """Dopóki stykasz się z wrogim statkiem, jego zwrotność zostaje zmniejszona o 1."""
"Calculation":
name: "Kalkulacje"
text: """Kiedy atakujesz, możesz wydać żeton skupienia, aby zmienić jeden ze swoich wyników %FOCUS% na wynik %CRIT%."""
"Bodyguard":
name : "<NAME>"
text: """%PL_SCUMONLY%%LINEBREAK% Na początku fazy walki możesz wydać żeton skupienia aby wybrać przyjazny statek w Zasięgu 1 o wartości umiejętności pilota wyższej od ciebie. Do końca rundy zwiększ jego wartość zwrotności o 1."""
# Astromechs
"R2 Astromech":
name: "Astromech R2"
text: """Możesz traktować wszystkie manewry o prędkości 1 i 2, jakby były to zielone manewry."""
"R2-D2":
text: """Po wykonaniu zielonego manewru możesz odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"R2-F2":
text: """<strong>Akcja:</strong> Do końca tej rundy zwiększ swoją wartość zwrotności o 1."""
"R5-D8":
text: """<strong>Akcja:</strong> Rzuć jedną kością obrony.%LINEBREAK% Jeżeli wypadnie wynik %EVADE% lub %FOCUS%, odrzuć jedną ze swoich zakrytych kart uszkodzeń."""
"R5-K6":
text: """Po wydaniu swojego rzetonu namierzonego celu rzuć 1 kością obrony.%LINEBREAK% Jeżeli wypadnie %EVADE% natychmiast zdobywasz żeton namierzonego celu dla tego samego statku. Nie możesz wydać nowego żetonu namierzonego celu podczas tego ataku."""
"R5 Astromech":
name: "Astromech R5"
text: """Podczas fazy końcowej możesz wybrać 1 ze swoich odkrytych kart z cechą "Statek" i ją zakryć."""
"R7 Astromech":
name: "Astromech R7"
text: """Raz na rundę kiedy się bronisz, jeśli namierzasz atakującego, możesz wydać żeton namierzonego celu aby wybrać dowolną liczbę kości ataku. Atakujący musi przerzucić wybrane kości."""
"R7-T1":
text: """<strong>Akcja:</strong> Wybierz wrogi statek w Zasięgu 1-2. Jeżeli znajdujesz się w polu rażenia wybranego statku, możesz namierzyć dany statek. Następnie możesz wykonać darmową akcję "dopalacz"."""
"R4-D6":
text: """Kiedy zostaniesz trafiony w wyniku ataku, a pośród wyników rzutu są co najmniej 3 nieaulowalne wyniki %HIT% możesz wybrać i anulować wszystkie poza 2. Za każdy wynik anulowany w ten sposób otrzymujesz 1 żeton stresu."""
"R5-P9":
text: """Na koniec fazy walki możesz wydać jeden ze swoich żetonów skupienia, aby odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"R3-A2":
text: """Kiedy wybierzesz cel ataku, jeżeli obrońca znajduje się w twoim polu rażenia, możesz otrzymać 1 żeton stresu, aby sprawić żeby obrońca otrzymał 1 żeton stresu."""
"R2-D6":
text: """Twój pasek rowinięć zyskuje symbol %ELITE%.%LINEBREAK% Nie możesz przypisać tej karty rozwinięcia do swojego statku jeżeli masz już symbol rozwinięcia %ELITE% lub jeżeli wartość umiejętności pilota wynosi 2 lub mniej."""
# Torpilles
"<NAME>":
name: "<NAME>"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić 1 ze swoich wyników %FOCUS% na wynik %CRIT%."""
"Advanced Proton Torpedoes":
name: "Zaaw. torpedy protonowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić maksymalnie 3 swoje puste ścianki na wyniki %FOCUS%."""
"Flechette Torpedoes":
name: "Torpedy rozpryskowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Po wykonaniu tego ataku obrońca otrzymuje 1 żeton stresu jeżeli jego wartość kadłuba wynosi 4 lub mniej."""
"Ion Torpedoes":
name: "<NAME> jon<NAME>"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeżeli ten atak trafi w wybrany cel, obrońca oraz każdy statek w Zasięgu 1 od niego otrzymuje 1 żeton jonów."""
"Bomb Loadout":
name: "Ładunek bomb"
text: """<span class="card-restriction">Tylko Y-wing. Ograniczenie.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol %BOMB%."""
# Tourelles
"Ion Cannon Turret":
name: "Wieżyczka z działem jonowym"
text: """<strong>Atak:</strong> Zaatakuj 1 statek (nawet poza twoim polem rażenia). %LINEBREAK%Jeśli atak ten trafi w wybrany statek, otrzymuje on 1 uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystkie wyniki kości."""
"Blaster Turret":
name: "Wieżyczka blasterowa"
text: """<strong>Atak (skupienie):</strong> Wydaj 1 żeton skupienia, aby zaatakować 1 statek (nawet poza twoim polem rażenia)."""
"Autoblaster Turret":
name: "Wieżyczka autoblasterowa"
text: """<strong>Atak: Zaatakuj 1 statek (nawet poza twoim polem rażenia). %LINEBREAK%Twoje wyniki %HIT% nie mogą być anulowane przy pomocy kości obrony. Obrońca może anulować wyniki %CRIT% przed %HIT%."""
# Missiles
"Concussion Missiles":
name: "Rakiety wstrząsowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić 1 ze swoich wyników z pustą ścianką na wynik %HIT%."""
"Cluster Missiles":
name: "Rakiety kasetonowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu o odrzuć tę kartę, aby przeprowadzić ten atak dwukrotnie."""
"Homing Missiles":
name: "Rakiety samonaprowadzające"
text: """<strong>Atak (namierzony cel):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Podczas tego ataku obrońca nie może wydawać żetonów uniku."""
"Assault M<NAME>":
name: "<NAME>iety sz<NAME>owe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeśli ten atak trafi w wybrany cel, każdy inny statek w Zasięgu 1 od obrońcy otrzymuje 1 uszkodzenie."""
"Ion Pulse Missiles":
name: "Jon<NAME> rakiety pulsacyjne"
text: """<strong>Atak (namierzony cel):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeśli ten atak trafi, obrońca otrzymuje 1 uszkodzenie oraz 2 żetony jonów."""
"Ch<NAME>":
name: "<NAME>"
text: """<span class="card-restriction">Tylko A-wing.</span>%LINEBREAK%Ta karta ma ujemny koszt w punktach eskadry."""
"Proton Rockets":
name: "Rakiety protonowe"
text: """<strong>Atak (skupienie):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz rzucić dodatkowymi kośćmi ataku w liczbie równej twojej zwrotności (nie wiecej niż 3)."""
# Bombes
"Seismic Charges":
name: "<NAME>"
text: """Kiedy odkrywasz swój wskaźnik manewrów, możesz odrzucić tą kartę aby zrzucić 1 żeton ładunku sejsmicznego. %LINEBREAK%Żeton ten zostanie zdetonowany na koniec fazy aktywacji."""
"Proximity Mines":
name: "<NAME>"
text: """<strong>Akcja:</strong> odrzuć tę kartę aby zrzucić 1 żeton miny zbliżeniowej. Kiedy statek wykona manewr w wyniku którego podstawka statku lub wzornik manewru będzie nachodzić na ten żeton, żeton ten zostaje zdetonowany."""
"Proton Bombs":
name: "<NAME>"
text: """Kiedy odkrywasz swój wskaźnik manewrów, możesz odrzucić tą kartę aby zrzucić 1 żeton bomby protonowej. %LINEBREAK%Żeton ten zostanie zdetonowany na koniec fazy aktywacji."""
# Canons
"Ion Cannon":
name: "<NAME>"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Jeżeli ten atak trafi wybrany cel, obrońca otrzymuje 1 uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystie wyniki kości."""
"Heavy Laser Cannon":
name: "<NAME>"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Natychmiast po rzucie swoimi kośćmi ataku musisz zmienić wszystkie swoje wyniki %CRIT% na wyniki %HIT%."""
"Autoblaster":
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Twoje wyniki %HIT% nie mogą być anulowane przez kości obrony. Obrońca może anulować wyniki %CRIT% przed wynikami %HIT%."""
"<NAME>":
name: "<NAME> roz<NAME>"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Jeżeli ten atak trafi, obrońca otrzymuje 1 uszkodzenie i, jeśli nie jest zestresowany, otrzymuje także 1 żeton stresu. Następnie anuluj wszystkie wyniki kości."""
'"Mangler" Cannon':
name: "Działo typu Mangler"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Kiedy atakujesz, możesz zmienić jeden ze swoich wyników %HIT% na wynik %CRIT%."""
# Systèmes
"Enhanced Scopes":
name: "Wzmocnione radary"
text: """Podczas fazy aktywacji traktuj swoją wartość umiejętności pilota jakby wynosiła "0"."""
"Fire-Control System":
name: "System kontroli ognia"
text: """Po tym jak wykonasz atak, możesz namierzyć obroncę."""
"Advanced Sensors":
name: "Zaawanswowane sensory"
text: """Zaraz przed tym jak ujawnisz swój manewr, możesz wykonać 1 darmową akcję. %LINEBREAK%Jeżeli skorzystawsz z tej zdolności, musisz w tej rundzie pominąć swój krok "Wykonywania akcji"."""
"Sensor Jammer":
name: "Zakłócacz sensorów"
text: """Kiedy się bronisz możesz zmienić 1 z wyników %HIT% atakującego na wynik %FOCUS%. Atakujący nie może przerzucić kości ze zmienionym wynikiem."""
"Accuracy Corrector":
name: "Korektor celności"
text: """Kiedy atakujesz, możesz anulować wszystkie swoje wyniki kości. Następnie możesz dodać 2 wyniki %HIT%.%LINEBREAK% Podczas tego ataku nie można ponownie modyfikować twoich kości."""
"Advanced Targeting Computer":
name: "Zaawansowany komputer celowniczy"
text: """<span class="card-restriction">Tylko TIE Advanced.</span>%LINEBREAK% Kiedy atakujesz namierzonego przez siebie przeciwnika przy pomocy broni podstawowej, do wyniku rzutu kośćmi możesz dodać jeden wynik %CRIT%. Jeżeli to zrobisz, podczas tego ataku nie możesz wydać żetonu namierzonego celu."""
# Équipages
"<NAME>":
name: "<NAME>"
text: """Po wykonaniu ataku, który nie trafił w wybrany cel, natychmiast wykonaj atak główną bronią. W tej rundzie nie możesz wykonać kolejnego ataku."""
"<NAME>":
name: "<NAME>"
text: """Kiedy atakujesz w Zasiegu 3 możesz zmienić 1 ze swoich wyników %HIT% na wynik %CRIT%."""
"Weapons Engineer":
name: "<NAME>"
text: """Możesz namierzać naraz 2 statki (każdy wrogi statek możesz namierzać tylko raz). %LINEBREAK%Kiedy namierzasz cel, możesz namierzyć 2 różne statki."""
"<NAME>":
text: """%PL_REBELONLY%%LINEBREAK%Po wykonaniu ataku, który nie trafi w wybrany cel, natychmiast wykonaj atak główną b<NAME>ią. Możesz zmienić 1 wynik %FOCUS% na %HIT%."""
"<NAME>":
text: """%PL_REBELONLY%%LINEBREAK%Możesz traktować wszystkie manewry %STRAIGHT%, jakby były to zielone manewry."""
"Chewbacca":
text: """%PL_REBELONLY%%LINEBREAK%Kiedy otrzymujesz kartę uszkodzenia, możesz natychmiast odrzucić tę kartę i odzyskać 1 żeton osłony. Następnie odrzuć tę kartę rozwinięcia."""
"Recon Specialist":
name: "<NAME>"
text: """Kiedy wykonujesz akcję skupienia, przypisz do swojego statku 1 dodatkowy żeton skupienia."""
"S<NAME>oteur":
name: "<NAME>"
text: """<strong>Akcja:</strong> Wybierz 1 wrogi statek w Zasięgu 1 i rzuć 1 koscią ataku. Jeśli wypadnie %HIT% lub %CRIT%, wylosuj 1 zakrytą kartę uszkodzenia przypisaną do tego statku, odkryj ją i rozpatrz."""
"Intelligence Agent":
name: "Agent <NAME>"
text: """Na początku fazy aktywacji wybierz 1 wrogi statek w zasięgu 1-2. Możesz podejrzeć manewr wybrany przez ten statek."""
"D<NAME>":
text: """%PL_IMPERIALONLY%%LINEBREAK%Tylko Imperium. Po tym jak wykonasz atak skierowany przeciwko wrogiemu statkowi, możesz otrzymać 2 uszkodzenia, aby zadać temu statkowi 1 krytyczne uszkodzenie."""
"Rebel Captive":
name: "<NAME>"
text: """%PL_IMPERIALONLY%%LINEBREAK%Raz na rundę, pierwszy statek, który zadeklaruje ciebie jako cel ataku, natychmiast otrzymuje 1 żeton stresu."""
"Flight Instructor":
name: "<NAME>"
text: """Kiedy się bronisz, możesz przerzucić 1 ze swoich wyników %FOCUS%. Jeśli wartość umiejętności atakującego pilota wynosi "2" lub mniej, zamiast tego przerzuć 1 ze swoich pustych scianek.%FOCUS%."""
"Navigator":
name: "<NAME>"
text: """Kiedy ujawnisz swój manewr, możesz obrócić swój wskaźnik na inny manewr tego samego kierunku. %LINEBREAK%Nie możesz przekręcić wskaźnika na czerwony manewr, jeśli posiadasz jakieś żetony stresu."""
"<NAME>":
text: """%PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> Rzuć 2 koścmi obrony. Za kazdy uzyskany wynik %FOCUS% przypisz do swojego statku 1 żeton skupienia. Za każdy wynik %EVADE% przypisz do swojego statku 1 żeton uniku.%FOCUS%"""
"<NAME>":
text: """%PL_IMPERIALONLY%%LINEBREAK% Na koniec fazy walki kazdy wrogi statek w Zasięgu 1, który nie ma żetonu stresu, otrzymuje żeton stresu."""
"Fleet Officer":
name: "<NAME>"
text: """%PL_IMPERIALONLY%%LINEBREAK%<strong>Akcja:</strong> Wybierz maksymalnie 2 przyjazne statki w Zasięgu 1-2 i do każdego przypisz po 1 żetonie skupienia, następnie otrzymujesz 1 żeton stresu."""
"Han Solo":
text: """%PL_REBELONLY%%LINEBREAK%Tylko rebelianci. Kiedy atakujesz, jeśli namierzyłeś obrońcę, możesz wydać żeton namierzonego celu aby zmienić wszystkie swoje wyniki %FOCUS% na %HIT%."""
"Leia Organa":
text: """%PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji możesz odrzucić tę kartę, aby umożliwić wszystkim przyjaznym statkom, które ujawiniają czerwony manewr, traktowanie do końca fazy tego manewru jako białego."""
"WED-15 Repair Droid":
name: "Droid naprawczy WED-15"
text: """%PL_HUGESHIPONLY%%LINEBREAK%<strong>Akcja:</strong> Wydaj 1 żeton energii aby odrzucić 1 ze swoich zakrytych kart uszkodzeń albo wydaj 3 żetony energii aby odrzucić 1 ze swoich odkrytych kart uszkodzeń."""
"<NAME>":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji możesz odrzucić tę kartę aby do końca fazy traktować wartość umiejętności pilota każdego przyjaznego statku jakby wynosiła "12"."""
"<NAME>":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Kiedy inny przyjazny statek w Zasięgu 1 wykonuje atak, możesz zmienić 1 z jego wyników %HIT% na %CRIT%."""
"<NAME>":
name: "<NAME>"
text: "Po tym jak wykonasz atak przeciwko statkowi znajdującemu się w twoim polu rażenia w Zasiegu 2, statek ten otrzymuje 1 żeton stresu."
"R2-D2 (Crew)":
name: "R2-D2 (Załoga)"
text: """%PL_REBELONLY%%LINEBREAK%Na koniec fazy końcowej, jeśli nie masz żadnych osłon, możesz odzyskać 1 osłonę i rzucić 1 kością ataku. Jeśli wypadnie %HIT% odkryj 1 losową ze swoich zakrytych kart uszkodzeń i ją rozpatrz."""
"C-3PO":
text: """%PL_REBELONLY%%LINEBREAK%Raz na rundę, zanim wykonasz rzut co najmniej 1 koscią obrony, możesz na głos zgadnąć liczbę wyników %EVADE%. Jeśli wypadło tyle %EVADE% (przed modyfikacjami) dodaj 1 wynik %EVADE%."""
"<NAME>":
text: """%PL_REBELONLY%%LINEBREAK%Po tym jak usuniesz ze swojego statku żeton stresu, możesz przypisać do swojego statku żeton skupienia."""
"<NAME>":
text: """%PL_REBELONLY%%LINEBREAK%Raz na rundę, kiedy przyjazny statek w Zasięgu 1-3 wykonuje akcję skupienia lub miałby otrzymać żeton skupienia, możesz danemu statkowi przypisać żeton uniku (zamiast skupienia)."""
"<NAME>":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> Wydaj dowolną ilość żetonów energii aby wybrać taką samą liczbę wrogich statków w Zasiegu 1-2. Usuń z wybranych statków wszystkie żetony skupienia, uników i niebieskie żetony namierzonego celu."""
"Targeting Coordinator":
name: "<NAME>"
text: """<strong>Energia:</strong> Możesz wydać 1 żeton energii aby wybrać 1 przyjazny statek w Zasięgu 1-2. Namierz cel, a następnie przydziel do wybranego statku niebieski żeton namierzonego celu."""
"<NAME>":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji wybierz 1 wrogi statek w Zasięgu 1-3. Możesz podejrzeć manewr wybrany dla tego statku. Jeżeli jest on biały, przydziel do niego 1 żeton stresu."""
'"L<NAME>bo"':
text: """%PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> wykonaj darmową akcję "dopalacz". Następnie otrzymujesz 1 żeton jonów."""
"Dash Rendar":
text: """%PL_REBELONLY%%LINEBREAK%Możesz wykonywać ataki kiedy nachodzisz na przeszkodę. %LINEBREAK%Twoje ataki nie mogą być przyblokowane."""
"<NAME>":
text: """%PL_IMPERIALONLY%%LINEBREAK%Na początku fazy walki, jeśli nie masz żadnych osłon, a do twojego statku przypisana jest co najmniej 1 karta uszkodzenia, możesz wykonać darmową akcję unik."""
"<NAME>":
text: """%PL_IMPERIALONLY%%LINEBREAK%Kiedy otrzymujesz odkrytą kartę uszkodzenia, możesz odrzucić to rozwinięcie lub inną kartę rozwinięcia [crew] aby zakryć tę kartę uszkodzenia (bez rozpatrywania jej efektu)."""
"<NAME>":
text: """%PL_SCUMONLY%%LINEBREAK%Za pierwszym razem kiedy atakujesz lub bronisz sie w każdej rundzie, pierwsza przypisana karta uszkodzenia jest odkryta."""
"Outlaw Tech":
name: "Mechanik wyjęty spod prawa"
text: """%PL_SCUMONLY%%LINEBREAK%Po wykonaniu czerwonego manewru, możesz przypisać do swojego statku 1 żeton skupienia."""
"K4 Security Droid":
name: "Droid ochroniarz K4"
text: """%PL_SCUMONLY%%LINEBREAK%Po wykonaniu zielonego manewru możesz namierzyć cel."""
# Soute
"Frequency Jammer":
name: "Zakłócacz częstotliwości"
text: """Kiedy wykonujesz akcję Zakłócanie, wybierz 1 wrogi statek, który nie ma żetonu stresu i znajduje się w Zasięgu 1 od zakłócanego statku. Wybrany statek otrzymuje 1 żeton stresu."""
"Expanded Cargo Hold":
ship: "Średni transportowiec GR-75"
name: "<NAME>ięks<NAME> ład<NAME>"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Raz na rundę, kiedy masz otrzymać odkrytą kartę uszkodznia, możesz dobrać te kartę z talii uszkodzeń dziobu lub rufy."""
"Comms Booster":
name: "<NAME>"
text: """<strong>Energia:</strong> Wydaj 1 żeton energii aby usunąć wszystkie żetony stresu z przyjaznego statku w Zasięgu 1-3, następnie przydziel do tego statku 1 żeton skupienia."""
"Slicer Tools":
name: "<NAME>"
text: """<strong>Akcja:</strong> Wybierz co najmniej 1 wrogi statek w Zasięgu 1-3, na ktorym znajduje się żeton stresu. Za każdy wybrany statek możesz wydać 1 żeton energii aby sprawić, żeby dany statek otrzymał 1 uszkodzenie."""
"Shield Projector":
name: "<NAME>"
text: """Kiedy wrogi statek stanie się podczas fazy walki, możesz wydać 3 żetony energii aby do końca fazy zmusić go do zaatakowania ciebie, jeśli to możliwe."""
"Tibanna Gas Supplies":
name: "<NAME>"
text: """<strong>Energia:</strong> Możesz odrzucić tę kartę aby otrzymać 3 żetony energii."""
"Ionization Reactor":
name: "<NAME> jon<NAME>"
text: """<strong>Energia:</strong> Wydaj 5 żetonów energii z tej karty i odrzuć tą kartę aby sprawić żeby każdy statek w Zasięgu 1 otrzymał 1 uszkodzneie i 1 żeton jonów."""
"Engine Booster":
name: "<NAME>"
text: """Tuż przed tym jak odkryjesz swój wskaźnik manewrów, możesz wydać 1 żeton energii aby wykonać biały manewr (%STRAIGHT% 1). Nie możesz skorzystać z tej zdolności, jeśli w jej wyniku będziesz nachodzić na inny statek."""
"Backup Shield Generator":
name: "Zapasowy generator osłon"
text: """Na koniec każdej rudny możesz wydać 1 żeton energii aby odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"EM Emitter":
name: "Emiter elektro-magnetyczny"
text: """Kiedy przyblokujesz atak, obrońca rzuca 3 dodatkowymi kośmi obrony (zamiast 1)."""
# Hardpiont
"Ion Cannon Battery":
name: "Bateria działa jonowego"
text: """<strong>Atak (energia):</strong> Aby wykonać ten atak, wydaj 2 żetony energii z tej karty. Jeżeli atak ten trafi w wybrany statek, otrzymuje on 1 krytyczne uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystkie wyniki kości."""
"Single Turbolasers":
name: "<NAME>ń<NAME> T<NAME>"
text: """<strong>Atak (energia):</strong> Wydaj 2 żetony energii z tej karty aby wykonać ten atak. Obronca podwaja swoją wartość zwrotności przeciwko temu atakowi. Możesz zmienić jeden ze swoich wyników %FOCUS% na %HIT%."""
"Quad Laser Cannons":
name: "Poczwórne działka laserowe"
text: """<strong>Atak (energia):</strong> Wydaj 1 żeton energii z tej karty aby wykonać ten atak. Jeśli ten atak nie trafi, możesz natychmiast wydać 1 żeton energii z tej karty aby ponownie przeprowadzić ten atak."""
# Équipe
"Gunnery Team":
name: "<NAME>"
text: """Raz na rundę kiedy atakujesz przy pomocy daodatkowej broni, możesz wydać 1 żeton energii aby zmienić 1 ze swoich pustych wyników na %HIT%."""
"Sensor Team":
name: "<NAME> sensor<NAME>"
text: """Kiedy namierzasz cel, możesz namierzyć wrogi statek w Zasięgu 1-5 (zamiast Zasięgu 1-3)."""
"Engineering Team":
name: "<NAME>"
text: """Podczas fazy aktywacji, kiedy ujawnisz manewr %STRAIGHT%, otrzymujesz 1 dodatkowy żeton energii podczas kroku Otrzymywania energii."""
# Illégal
"Inertial Dampeners":
name: "<NAME>"
text: """Kiedy ujawniasz swój manewr, możesz odrzucić tę kartę żeby zamiast tego wykonać biały manewr [0%STOP%]. Następnie otrzymujesz 1 żeton stresu."""
"Dead Man's Switch":
name: "Włączn<NAME> sam<NAME>ó<NAME>"
text: """Kiedy zostajesz zniszczony, każdy statek w Zasięgu 1 otrzymuje 1 uszkodzenie."""
"Feedback Array":
name: "System zwrotny"
text: """Podczas fazy walki, zamiast wykonywać jakiekolwiek ataki, możesz otrzymać 1 żeton jonów i 1 uszkodzenie aby wybrać wrogi statek w Zasięgu 1. Wybrany statek otrzymuje 1 uszkodzenie."""
'"Hot Shot" Blaster':
name: "<NAME>"
text: """<strong>Atak:</strong> Odrzuć tę kartę, aby zaatakować 1 statek (nawet poza twoim polem rażenia)."""
# Astromech récupéré
"Salvaged Astromech":
name: "<NAME>"
text: "Kiedy otrzymujesz kartę uszkodzenia z cechą Statek, natychmiast możesz ją odrzucić (przed rozpatrzeniem efektu). %LINEBREAK%Następnie odrzuć tę kartę rozwinięcia.%LINEBREAK%."
'"<NAME>"':
name: "<NAME>"
text: """Jeśli jesteś wyposażony w bombę, która może zostać zrzucona przed ujawnieniem twojego manewru, zamiast tego możesz ją zrzucić po tym jak wykonasz swój manewr."""
"Unhinged Astromech":
name: "<NAME>"
text: """Możesz traktować manewry o prędkości 3 jako zielone."""
"R4-B11":
text: """Kiedy atakujesz namierzonego przez siebie obrońcę, możesz wydać żeton namierzonego celu aby wybrać dowolne kości obrony (nawet wszystkie). Następnie obrońca musi przerzucić wybrane przez ciebie kości."""
"<NAME>":
name: "<NAME>"
text: """Kiedy atakujesz, po wydaniu żetonu skupienia, możesz namierzyć obrońcę."""
"<NAME>":
text: """%IMPERIALONLY%%LINEBREAK%Once per round, you may change a friendly ship's die result to any other die result. That die result cannot be modified again."""
"Boss<NAME>":
text: """%SCUMONLY%%LINEBREAK%After you perform an attack that does not hit, if you are not stressed, you <strong>must</strong> receive 1 stress token. Then assign 1 focus token to your ship and acquire a target lock on the defender."""
"Lightning Reflexes":
text: """%SMALLSHIPONLY%%LINEBREAK%After you execute a white or green maneuver on your dial, you may discard this card to rotate your ship 180°. Then receive 1 stress token <strong>after</strong> the "Check Pilot Stress" step."""
"Twin Laser Turret":
text: """<strong>Attack:</strong> Perform this attack <strong>twice</strong> (even against a ship outside your firing arc).<br /><br />Each time this attack hits, the defender suffers 1 damage. Then cancel <strong>all</strong> dice results."""
"Pl<NAME>":
text: """<strong>Attack (target lock):</strong> Spend your target lock and discard this card to perform this attack.<br /><br />If this attack hits, after dealing damage, remove 1 shield token from the defender."""
"Ion Bombs":
text: """When you reveal your maneuver dial, you may discard this card to <strong>drop</strong> 1 ion bomb token.<br /><br />This token <strong>detonates</strong> at the end of the Activation phase.<br /><br /><strong>Ion Bombs Token:</strong> When this bomb token detonates, each ship at Range 1 of the token receives 2 ion tokens. Then discard this token."""
"Conner Net":
text: """<strong>Action:</strong> Discard this card to <strong>drop</strong> 1 Conner Net token.<br /><br />When a ship's base or maneuver template overlaps this token, this token <strong>detonates</strong>.<br /><br /><strong>Conner Net Token:</strong> When this bomb token detonates, the ship that moved through or overlapped this token suffers 1 damage, receives 2 ion tokens, and skips its "Perform Action" step. Then discard this token."""
"Bombardier":
text: """When dropping a bomb, you may use the (%STRAIGHT% 2) template instead of the (%STRAIGHT% 1) template."""
"Cluster Mines":
text: """<strong>Action:</strong> Discard this card to <strong>drop</strong> 3 cluster mine tokens.<br /><br />When a ship's base or maneuver template overlaps a cluster mine token, that token <strong>detonates</strong>.<br /><br /><strong>Cluster Mines Tokens:</strong> When one of these bomb tokens detonates, the ship that moved through or overlapped this token rolls 2 attack dice and suffers all damage (%HIT%) rolled. Then discard this token."""
'Crack Shot':
text: '''When attacking a ship inside your firing arc, you may discard this card to cancel 1 of the defender's %EVADE% results.'''
"Advanced Homing Missiles":
text: """<strong>Attack (target lock):</strong> Discard this card to perform this attack.%LINEBREAK%If this attack hits, deal 1 faceup Damage card to the defender. Then cancel <strong>all</strong> dice results."""
'Agent <NAME>':
text: '''%IMPERIALONLY%%LINEBREAK%At the start of the first round, choose 1 enemy small or large ship. When attacking or defending against that ship, you may change 1 of your %FOCUS% results to a %HIT% or %EVADE% result.'''
'XX-23 S-Thread Tracers':
text: """<strong>Attack (focus):</strong> Discard this card to perform this attack. If this attack hits, each friendly ship at Range 1-2 of you may acquire a target lock on the defender. Then cancel <strong>all</strong> dice results."""
"Tractor Beam":
text: """<strong>Attack:</strong> Attack 1 ship.%LINEBREAK%If this attack hits, the defender receives 1 tractor beam token. Then cancel <strong>all</strong> dice results."""
"Cloaking Device":
text: """%SMALLSHIPONLY%%LINEBREAK%<strong>Action:</strong> Perform a free cloak action.%LINEBREAK%At the end of each round, if you are cloaked, roll 1 attack die. On a %FOCUS% result, discard this card, then decloak or discard your cloak token."""
modification_translations =
"Shield Upgrade":
name: "Ulepszenie osłon"
text: """Zwiększ wartość swoich osłon o 1."""
"Advanced Cloaking Device":
name: "Zaawansowany system maskowania"
text: """<span class="card-restriction">Tylko TIE Phantom.</span>%LINEBREAK%Po tym jak wykonasz atak, możesz wykonać darmową akcję maskowanie."""
ship: "TIE Phantom"
"Stealth Device":
name: "Urządzenie maskujące"
text: """Zwiększ wartość swojej zwrotności o 1. Jeśli zostaniesz trafiony podczas ataku, odrzuć tę kartę."""
"Engine Upgrade":
name : "Ulepszenie silnika"
text: """Twój pasek rozwinięć zyskuje symbol akcji %BOOST%."""
"Anti-Pursuit Lasers":
name: "Lasery antypościgowe"
text: """%PL_LARGESHIPONLY%Po tym jak wrogi statek wykona manewr, który sprawi że będzie zachodzić na ciebie, rzuć 1 kością ataku. Jeśli wypadnie %HIT% lub %CRIT%, wrogi statek otrzymuje 1 uszkodzenie."""
"Targeting Computer":
name: "Komputer celowniczy"
text: """Twój pasek akcji zyskuje symbol akcji %TARGETLOCK%."""
"Hull Upgrade":
name: "Ulepszenie kadłuba"
text: """Zwiększ wartość swojego kadłuba o 1."""
"Munitions Failsafe":
name: "Zabezpieczenie amunicji"
text: """Kiedy atakujesz przy pomocy broni dodatkowej, która nakazuje odrzucenie karty po wykonaniu ataku, nie odrzucasz jej jeśli atak nie trafi."""
"Stygium Particle Accelerator":
name: "Akcelerator cząsteczek stygium"
text: """Kiedy się demaskujesz lub wykonasz akcję maskowanie, możesz wykonać darmową akcję unik."""
"Combat Retrofit":
name: "Modyfikacja bojowa"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Zwiększ wartość swojego kadłuba o 2 i wartość swoich osłon o 1."""
ship: "Transport moyen GR-75"
"B-Wing/E2":
text: """<span class="card-restriction">Tylko B-wing.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %CREW%."""
"Countermeasures":
name: "Środki profilaktyczne"
text: """%PL_LARGESHIPONLY%%LINEBREAK%Na początku fazy walki możesz odrzucić tę kartę, aby do końca rundy zwiększyć swoją zwrotność o 1. Następnie możesz usunąć ze swojego statku 1 wrogi żeton namierzonego celu."""
"Experimental Interface":
name: "Eksperymentalny interfejs"
text: """Raz na rundę. Po tym jak wykonasz akcję możesz wykonać 1 darmową akcję z karty rozwinięcia z nagłówkiem <strong>Akcja:</strong>, w którą jesteś wyposażony. Następnie otrzymujesz 1 żeton stresu."""
"Tactical Jammer":
name: "Zakłócacz taktyczny"
text: """%PL_LARGESHIPONLY%%LINEBREAK%Twój statek może przyblokowywać wrogie ataki."""
"Autothrusters":
name: "Autodopalacze"
text: """Kiedy się bronisz, jeśli jesteś poza Zasięgiem 2 albo znajdujesz się poza polem rażenia atakującego, możesz zmienić 1 ze swoich pustych wyników na %EVADE%. Możesz wyposażyć swój statek w tę kartę tylko jeśli masz symbol akcji %BOOST%."""
"Twin Ion Engine Mk. II":
text: """You may treat all bank maneuvers (%BANKLEFT% and %BANKRIGHT%) as green maneuvers."""
"Maneuvering Fins":
text: """When you reveal a turn maneuver (%TURNLEFT% or %TURNRIGHT%), you may rotate your dial to the corresponding bank maneuver (%BANKLEFT% or %BANKRIGHT%) of the same speed."""
"Ion Projector":
text: """%LARGESHIPONLY%%LINEBREAK%After an enemy ship executes a maneuver that causes it to overlap your ship, roll 1 attack die. On a %HIT% or %CRIT% result, the enemy ship receives 1 ion token."""
title_translations =
"Slave I":
text: """<span class="card-restriction">Tylko Firespray-31.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol %TORPEDO%."""
"<NAME>ium <NAME>":
name: "<NAME>"
text: """<span class="card-restriction">Tylko YT-1300.</span>%LINEBREAK% Twój pasek akcji zyskuje symbol akcji %EVADE%."""
"<NAME>":
text: """<span class="card-restriction">Tylko HWK-290.</span>%LINEBREAK%Podczas fazy końcowej nie usuwaj ze swojego statku niewykorzystanych żetonów skupienia."""
"ST-321":
ship: "Navette de classe Lambda"
text: """<span class="card-restriction">Navette de classe <em>Lambda</em> uniquement.</span>%LINEBREAK%Quand vous verrouillez une cible, vous pouvez verrouiller n'importe quel vaisseau ennemi situé dans la zone de jeu."""
"Royal Guard TIE":
ship: "TIE Interceptor"
name: "<NAME>"
text: """<span class="card-restriction">Tylko TIE Interceptor.</span>%LINEBREAK%Możesz dołączyć do swojego statku maksymalnie 2 różne karty Modyfikacji (zamiast 1). Nie możesz dołączyć tej karty do swojego statku, jeśli wartość umiejętności pilota wynosi "4" lub mniej."""
"<NAME>":
name: "<NAME>"
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Kiedy wykonujesz akcję "Koordynacja", możesz wybrać 2 przyjazne statki (zamiast 1). Statki te mogą wykonać po 1 darmowej akcji."""
"A-Wing Test Pilot":
name: "Pilot testowy <NAME>-<NAME>"
text: """<span class="card-restriction">Tylko A-wing.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %ELITE%. Nie możesz wyposażyć się w 2 takie same karty rozwinięcia [elite talent]. Nie możesz wyposażyć się w tę kartę, jeśli twoja wartość umiejętności pilota wynosi "1" lub mniej."""
"Tantive IV":
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Twój pasek rozwinięć sekcji dziobowej zyskuje po 1 symbolu rozwinięcia %CREW% i %TEAM%."""
"Bright Hope":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Żetony wsparcia przypisane do twojej sekcji dziobowej dostają 2 wyniki %EVADE% (zamiast 1)."""
"Quantum Storm":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Na początku fazy końcowej, jeśli masz nie więcej niż 1 żeton energi, otrzymujesz 1 żeton energii."""
"Dutyfree":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Kiedy wykonujesz akcję Zakłócenie, możesz wybrać wrogi statek w Zasięgu 1-3 (zamiast Zasięgu 1-2)."""
"Jaina's Light":
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Kiedy się bronisz, raz na atak, jeśli otrzymujesz odkrytą kartę uszkodzenia, możesz ją odrzucić i dobrać nową odkrytą kartę uszkodzenia."""
"Outrider":
text: """<span class="card-restriction">Tylko YT-2400.</span>%LINEBREAK%Dopóki jesteś wyposażony w kartę rozwinięcia [cannon], nie możesz wykonywać ataków bronią podstawową. Przy pomocy dodatkowej broni [cannon] możesz wykonywać ataki skierowane przeciwko statkom znajdujacym się poza twoim polem rażenia. """
"Dauntless":
ship: "Décimateur VT-49"
text: """<span class="card-restriction">Tylko Decimator VT-49.</span>%LINEBREAK%Po tym jak wykonasz manewr, który sprawi że będziesz nachodzić na inny statek, możesz wykonać 1 darmową akcję. Następnie otrzymujesz 1 żeton stresu."""
"Virago":
text: """<span class="card-restriction">Tylko StarViper.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbole rozwinięć %SYSTEM% i %ILLICIT%. Nie możesz wyposażyć swojego statku w tę kartę jeśli wartość umiejętności twojego pilota wynosi „3” lub mniej."""
'"Heavy Scyk" Interceptor (Cannon)':
name: 'Interceptor typu Heavy Scyk (Działo)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
'"Heavy Scyk" Interceptor (Torpedo)':
name: 'Interceptor typu Heavy Scyk (Torpeda)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
'"Heavy Scyk" Interceptor (Missile)':
name: 'Intercepteur "<NAME>" (Rakieta)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
"IG-2000":
text: """<span class="card-restriction">Tylko Aggressor.</span>%LINEBREAK%Masz zdolność pilota każdego innego przyjaznego statku z kartą ulepszenia IG-2000 (jako dodatek do swojej zdolności pilota)."""
"BTL-A4 Y-Wing":
text: """<span class="card-restriction">Tylko Y-wing.</span>%LINEBREAK%Nie możesz atakować statków znajdujących się poza twoim polem rażenia. Po wykonaniu ataku przy pomocy broni podstawowej, możesz natychmiast wykonać atak przy pomocy dodatkowej broni %TURRET%."""
"Andrasta":
text: """<span class="card-restriction">Tylko Firespray-31.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje dwa symbole %BOMB%."""
"TIE/x1":
text: """<span class="card-restriction">Tylko TIE Advanced.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %SYSTEM%. %LINEBREAK%Koszt przypisanej do tego statku karty rozwinięcia %SYSTEM% jest obniżony o 4 punkty (do minimum 0)."""
"Ghost":
text: """<span class="card-restriction">VCX-100 only.</span>%LINEBREAK%Equip the <em>Phantom</em> title card to a friendly Attack Shuttle and dock it to this ship.%LINEBREAK%After you execute a maneuver, you may deploy it from your rear guides."""
"Phantom":
text: """While you are docked, the <em>Ghost</em> can perform primary weapon attacks from its special firing arc, and, at the end of the Combat phase, it may perform an additional attack with an equipped %TURRET%. If it performs this attack, it cannot attack again this round."""
"TIE/v1":
text: """<span class="card-restriction">TIE Advanced Prototype only.</span>%LINEBREAK%After you acquire a target lock, you may perform a free evade action."""
"<NAME>":
text: """<span class="card-restriction">G-1A starfighter only.</span>%LINEBREAK%Your upgrade bar gains the %BARRELROLL% Upgrade icon.%LINEBREAK%You <strong>must</strong> equip 1 "Tractor Beam" Upgrade card (paying its squad point cost as normal)."""
"Punishing One":
text: """<span class="card-restriction">JumpMaster 5000 only.</span>%LINEBREAK%Increase your primary weapon value by 1."""
exportObj.setupCardData basic_cards, pilot_translations, upgrade_translations, modification_translations, title_translations
| true | exportObj = exports ? this
exportObj.codeToLanguage ?= {}
exportObj.codeToLanguage.pl = 'Polski'
exportObj.translations ?= {}
# This is here mostly as a template for other languages.
exportObj.translations['Polski'] =
action :
"Barrel Roll": "Beczka"
"Boost": "Dopalacz"
"Evade": "Unik"
"Focus": "Skupienie"
"Target Lock": "Namierzenie celu"
"Recover": "Naprawa"
"Reinforce": "Umocnienie"
"Jam": "Zakłócanie"
"Coordinate": "Koordynacja"
"Cloak": "Maskowanie"
slot:
"Astromech": "Astromech"
"Bomb": "Bomba"
"Cannon": "Działo"
"Crew": "Załoga"
"Elite": "Talent elitarny"
"Missile": "Rakiety"
"System": "System"
"Torpedo": "Torpedy"
"Turret": "Wieżyczka"
"Cargo": "Ładunek"
"Hardpoint": "Punkt konstrukcyjny"
"Team": "Drużyna"
"Illicit": "Kontrabanda"
"Salvaged Astromech": "Złomowane astromechy"
sources: # needed?
"Core": "Zestaw Podstawowy"
"A-Wing Expansion Pack": "Zestaw dodatkowy A-Wing"
"B-Wing Expansion Pack": "Zestaw dodatkowy B-Wing"
"X-Wing Expansion Pack": "Zestaw dodatkowy X-Wing"
"Y-Wing Expansion Pack": "Zestaw dodatkowy Y-Wing"
"Millennium Falcon Expansion Pack": "Zestaw dodatkowy Sokół Millennium"
"HWK-290 Expansion Pack": "Zestaw dodatkowy HWK-290"
"TIE Fighter Expansion Pack": "Zestaw dodatkowy Myśliwiec TIE"
"TIE Interceptor Expansion Pack": "Zestaw dodatkowy TIE Interceptor"
"TIE Bomber Expansion Pack": "Zestaw dodatkowy Bombowiec TIE"
"TIE Advanced Expansion Pack": "Zestaw dodatkowy TIE Advanced"
"Lambda-Class Shuttle Expansion Pack": "Zestaw dodatkowy Prom typu Lambda"
"Slave I Expansion Pack": "Zestaw dodatkowy Slave I"
"Imperial Aces Expansion Pack": "Zestaw dodatkowy Asy Imperium"
"Rebel Transport Expansion Pack": "Zestaw dodatkowy Rebeliancki transportowiec"
"Z-95 Headhunter Expansion Pack": "Zestaw dodatkowy Z-95 Łowca Głów"
"TIE Defender Expansion Pack": "Zestaw dodatkowy TIE Defender"
"E-Wing Expansion Pack": "Zestaw dodatkowy E-Wing"
"TIE Phantom Expansion Pack": "Zestaw dodatkowy TIE Phantom"
"Tantive IV Expansion Pack": "Zestaw dodatkowy Tantive IV"
"Rebel Aces Expansion Pack": "Zestaw dodatkowy Asy Rebelii"
"YT-2400 Freighter Expansion Pack": "Zestaw dodatkowy YT-2400"
"VT-49 Decimator Expansion Pack": "Zestaw dodatkowy Decimator VT-49"
"StarViper Expansion Pack": "Zestaw dodatkowy StarViper"
"M3-A Interceptor Expansion Pack": "Zestaw dodatkowy M3-A Interceptor"
"IG-2000 Expansion Pack": "Zestaw dodatkowy IG-2000"
"Most Wanted Expansion Pack": "Zestaw dodatkowy Poszukiwani"
"Imperial Raider Expansion Pack": "Zestaw dodatkowy Imperialny Patrolowiec"
ui:
shipSelectorPlaceholder: "Wybór statków"
pilotSelectorPlaceholder: "Wybór pilotów"
upgradePlaceholder: (translator, language, slot) ->
"#{translator language, 'slot', slot}"
modificationPlaceholder: "Modyfikacje"
titlePlaceholder: "Tytuł"
upgradeHeader: (translator, language, slot) ->
"Amélioration #{translator language, 'slot', slot}"
unreleased: "niewydane"
epic: "epickie"
byCSSSelector:
# Card browser
'.xwing-card-browser .translate.sort-cards-by': 'Sortuj karty po'
'.xwing-card-browser option[value="name"]': 'nazwie'
'.xwing-card-browser option[value="source"]': 'źródle'
'.xwing-card-browser option[value="type-by-points"]': 'typie (po punktach)'
'.xwing-card-browser option[value="type-by-name"]': 'typie (po nazwie)'
'.xwing-card-browser .translate.select-a-card': 'Wybierz kartę z listy po prawej'
'.xwing-card-browser .info-range td': 'Zasięg’'
# Info well
'.info-well .info-ship td.info-header': 'Statek'
'.info-well .info-skill td.info-header': 'Umiejętność pilota'
'.info-well .info-actions td.info-header': 'Akcje'
'.info-well .info-upgrades td.info-header': 'Ulepszenia'
'.info-well .info-range td.info-header': 'Zasięg'
# Squadron edit buttons
'.clear-squad' : 'Wyczyść eskadrę'
'.save-list' : 'Zapisz'
'.save-list-as' : 'Zapisz jako ...'
'.delete-list' : 'Usuń'
'.backend-list-my-squads' : 'Lista eskadr'
'.view-as-text' : '<span class="hidden-phone"><i class="icon-print"></i> Drukuj \ Wyświetl jako </span>Tekst'
'.randomize' : 'randomizuj'
'.randomize-options' : 'Opcje ...'
# Print/View modal
'.bbcode-list' : 'Skopiuj BBCode poniżej i wklej go do swojego posta.<textarea></textarea>'
'.vertical-space-checkbox' : """Dodaj miejsce na karty ulepszeń \ uszkodzeń podczas drukowania <input type="checkbox" class="toggle-vertical-space" />"""
'.color-print-checkbox' : """Wydrukuj w kolorze <input type="checkbox" class="toggle-color-print" />"""
'.print-list' : '<i class="icon-print"></i> Drukuj'
# Randomizer options
'.do-randomize' : 'Generuj'
# Top tab bar
'#empireTab' : 'Imperium Galaktyczne'
'#rebelTab' : 'SoPI:NAME:<NAME>END_PI RePI:NAME:<NAME>END_PIiancki'
'#scumTab' : 'Szumowiny i Nikczemnicy'
'#browserTab' : 'Przeglądarka kart'
'#aboutTab' : 'O stronie'
singular:
'pilots': 'Pilot'
'modifications': 'Modyfikacja'
'titles': 'Tytuł'
types:
'Pilot': 'Pilot'
'Modification': 'Modyfikacja'
'Title': 'Tytuł'
exportObj.cardLoaders ?= {}
exportObj.cardLoaders['Polski'] = () ->
exportObj.cardLanguage = 'Polski'
# Assumes cards-common has been loaded
basic_cards = exportObj.basicCardData()
exportObj.canonicalizeShipNames basic_cards
# English names are loaded by default, so no update is needed
exportObj.ships = basic_cards.ships
# Rename ships
exportObj.renameShip 'TIE Fighter', 'Myśliwiec TIE'
exportObj.renameShip 'TIE Bomber', 'Bombowiec TIE'
exportObj.renameShip 'Lambda-Class Shuttle', 'Prom typu Lambda'
exportObj.renameShip 'VT-49 Decimator', 'Decimator VT-49'
exportObj.renameShip 'Z-95 Headhunter', 'Z-95 Łowca głów'
exportObj.renameShip 'CR90 Corvette (Aft)', 'Korweta CR90 (rufa)'
exportObj.renameShip 'CR90 Corvette (Fore)', 'Corvette CR90 (dziób)'
exportObj.renameShip 'GR-75 Medium Transport', 'Średni transportowiec GR-75'
# Names don't need updating, but text needs to be set
pilot_translations =
# Empire
# Myśliwiec TIE
"Academy Pilot":
ship: "Myśliwiec TIE"
name: "Pilot z Akademii"
"Obsidian Squadron Pilot":
ship: "Myśliwiec TIE"
name: "Pilot PI:NAME:<NAME>END_PI"
"Black Squadron Pilot":
ship: "Myśliwiec TIE"
name: "Pilot PI:NAME:<NAME>END_PI CPI:NAME:<NAME>END_PI"
'"PI:NAME:<NAME>END_PI"':
name: "PI:NAME:<NAME>END_PI"
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz w Zasięgu 1, możesz zmienić 1 ze swoich wyników %HIT% na wynik %CRIT%."""
'"Night Beast"':
name: "PI:NAME:<NAME>END_PI"
ship: "Myśliwiec TIE"
text: """Po wykonaniu zielonego manewru możesz wykonać darmową akcję skupienia."""
'"Backstabber"':
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz będąc poza polem rażenia broniącego się statku, rzucasz 1 dodatkową kością ataku."""
'"Dark Curse"':
name: "PI:NAME:<NAME>END_PI"
ship: "Myśliwiec TIE"
text: """Kiedy się bronisz statki które cię atakują nie mogą wydawać żetonów skupienia ani przerzucać kości ataku."""
'"PI:NAME:<NAME>END_PI"':
ship: "Myśliwiec TIE"
text: """Kiedy atakujesz w Zasięgu 1, rzucasz 1 dodatkową kością ataku."""
'"PI:NAME:<NAME>END_PI"':
ship: "Myśliwiec TIE"
text: """Kiedy inny przyjazny statek w zasięgu 1 atakuje przy pomocy swojej podstawowej broni, może przerzucić 1 kość ataku."""
# TIE Interceptor
"Alpha Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"Avenger Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"Saber Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"Royal Guard Pilot":
name: "Pilot imperialny gwardzista"
"\"PI:NAME:<NAME>END_PI\"":
name: "\"PI:NAME:<NAME>END_PI\""
text: """Zostajesz zniszczony dopiero pod koniec fazy walki, w której liczba kart uszkodzeń przypisanych do ciebie będzie równa, lub wyższa od twojej wartości kadłuba."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy wykonujesz akcję "beczka", możesz otrzymać 1 żeton stresu, aby zamiast wzornika manewru (%STRAIGHT% 1) użyć (%BANKLEFT% 1) lub (%BANKRIGHT% 1)."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz w Zasięgu 2-3, możesz wydać 1 żeton uników, aby dodać 1 %HIT% do swojego wyniku."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy ujawnisz manewr %UTURN% możesz traktować go tak, jakby jego prędkość wynosiła "1", "3" lub "5"."""
"PI:NAME:<NAME>END_PI":
text: """Po wykonaniu ataku możesz wykonać darmową akcję "dopalacz" lub "beczka"."""
"PI:NAME:<NAME>END_PI":
text: """Wrogie statki w Zasięgu 1 nie mogą wykonywać akcji "skupienie" oraz "unik", ani wydawać żetonów skupienia i uników."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy otrzymujesz żeton stresu, możesz przypisać do swojego statku 1 żeton skupienia."""
# TIE Phantom
"Sigma Squadron Pilot":
name: "Pilot Eskadry Sigma"
"Shadow Squadron Pilot":
name: "Pilot Eskadry Cieni"
'"Echo"':
text: """Kiedy się demaskujesz musisz użyć wzornika manewru (%BANKLEFT% 2) lub (%BANKRIGHT% 2) zamiast wzornika (%STRAIGHT% 2)."""
'"Whisper"':
name: "PI:NAME:<NAME>END_PI"
text: """Po tym jak wykonasz atak, który trafi cel, możesz przypisać do swojego statku 1 żeton skupienia."""
# TIE Defender
"Onyx Squadron Pilot":
name: "Pilot Eskadry Onyx"
"Delta Squadron Pilot":
name: "Pilot Eskadry Delta"
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy atakujesz, zaraz po swoim rzucie kośćmi ataku możesz namierzyć obrońcę, jeśli ma na sobie czerwony żeton namierzonego celu."""
"PI:NAME:<NAME>END_PI":
text: """Po tym jak wykonasz atak, który zada obrońcy co najmniej jedną kartę uszkodzenia, możesz wydać żeton skupienia aby odkryć te karty."""
# Bombowiec TIE
"Scimitar Squadron Pilot":
ship: "Bombowiec TIE"
name: "Pilot Eskadry Sejmitarów"
"Gamma Squadron Pilot":
ship: "Bombowiec TIE"
name: "Pilot Eskadry Gamma"
"PI:NAME:<NAME>END_PI":
ship: "Bombowiec TIE"
name: "PI:NAME:<NAME>END_PI"
text: "Kiedy inny przyjazny statek w Zasięgu 1 atakuje przy pomocy dodatkowej broni, może przerzucić maksymalnie 2 kości ataku."
"Major Rhymer":
ship: "Bombowiec TIE"
text: "Kiedy atakujesz przy pomocy dodatkowej broni, możesz zwiększyć lub zmniejszyć zasięg broni o 1. Musisz przy tym zachować limit zasięgu 1-3."
# TIE Advanced
"Tempest Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"Storm Squadron Pilot":
name: "Pilot Eskadry Szturmu"
"PI:NAME:<NAME>END_PI":
text: """Kiedy twój atak zadaje obrońcy odkrytą kartę uszkodzenia, wylosuj 3 karty uszkodzeń, wybierz 1 z nich, którą zadajesz, a pozostałe odrzuć."""
"PI:NAME:<NAME>END_PI":
text: """Podczas swojego kroku "Wykonywania akcji" możesz wykonać 2 akcje."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Na początku fazy walki możesz namierzyć wrogi statek znajdujący się w Zasięgu 1 od ciebie."""
# Firespray
"PI:NAME:<NAME>END_PI":
name:"PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz, obrońca otrzymuje 1 żeton stresu, jeśli anuluje co najmniej jeden wynik %CRIT%."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy ujawniasz manewr skrętu (%BANKLEFT% lub %BANKRIGHT%) możesz przestawić swój wskaźnik manewrów na drugi manewr skrętu o tej samej prędkości."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz przy pomocy dodatkowej broni, możesz przerzucić 1 kość ataku."""
# Prom typu Lambda
"PI:NAME:<NAME>END_PI":
ship: "Prom typu Lambda"
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy wrogi statek namierza cel, musi namierzyć twój statek, jeśli to możliwe."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
ship: "Prom typu Lambda"
text: """Na początku fazy walki możesz przypisać 1 ze swoich niebieskich żetonów namierzonego celu do przyjaznego statku w Zasięgu 1, jeśli ten nie ma niebieskiego żetonu namierzonego celu."""
"PI:NAME:<NAME>END_PI":
ship: "Prom typu Lambda"
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy inny przyjazny statek w Zasięgu 1-2 ma otrzymać żeton stresu, gdy ty masz 2 lub mniej żetonów stresu, możesz przypisać do siebie ten żeton stresu."""
"Omicron Group Pilot":
ship: "Prom typu Lambda"
name: "Pilot grupy Omicron"
# VT-49 Decimator
"PI:NAME:<NAME>END_PI":
ship: "Decimator VT-49"
name: "PI:NAME:<NAME>END_PI"
text: """Po wykonaniu manewru, każdy wrogi statek z którym się stykasz, otrzymuje 1 uszkodzenie."""
"PI:NAME:<NAME>END_PI":
ship: "Decimator VT-49"
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy atakujesz w Zasięgu 1-2, możesz zmienić jeden ze swoich wyników %FOCUS% na wynik %CRIT%."""
"PI:NAME:<NAME>END_PI":
ship: "Decimator VT-49"
name: "PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI":
ship: "Decimator VT-49"
name: "PI:NAME:<NAME>END_PI"
text: """Jeśli nie masz żadnych osłon i masz przypisaną co najmniej 1 kartę uszkodzenia, wartość twojej zwrotności wzrasta o 1."""
"PI:NAME:<NAME>END_PI":
text: """When you reveal your maneuver, you may increase or decrease its speed by 1 (to a minimum of 1)."""
"PI:NAME:<NAME>END_PI":
text: """Enemy ships at Range 1 cannot add their range combat bonus when attacking."""
"PI:NAME:<NAME>END_PI":
text: """At the start of the End phase, you may spend a target lock you have on an enemy ship to flip 1 random facedown Damage card assigned to it faceup."""
# Rebels
# X-Wing
"Red Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"Rookie Pilot":
name: "Niedoświadczony pilot"
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz zredukuj wartość zwrotności obrońcy o 1 (do minimum 0)."""
"PI:NAME:<NAME>END_PI":
text: """Po tym jak wydasz żeton skupienia możesz umieścić dany żeton na dowolnym innym przyjaznym statku w Zasięgu 1-2 (zamiast go odrzucać)."""
"PI:NAME:<NAME>END_PI":
text: """Inne przyjazne statki w Zasięgu 1 nie mogą być wybierane na cel ataku, jeśli atakujący może na cel wybrać ciebie."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy się bronisz, możesz zmienić 1 ze swoich wyników %FOCUS% na wynik %EVADE%."""
"PI:NAME:<NAME>END_PI":
text: """Po wykonaniu ataku możesz usunąć z obrońcy 1 żeton skupienia, uników, lub niebieski żeton namierzonego celu."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy otrzymujesz żeton stresu możesz usunąć go i rzucić 1 kością ataku. Jeśli wypadnie %HIT%, ten statek otrzymuje 1 zakrytą kartę uszkodzenia."""
'"PI:NAME:<NAME>END_PI" PI:NAME:<NAME>END_PI':
text: """Kiedy zdobywasz lub wydajesz żeton namierzonego celu, możesz usunąć ze swojego statku 1 żeton stresu."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy wrogi statek wybiera cię na cel ataku, możesz namierzyć ten statek."""
# Y-Wing
"Gold Squadron Pilot":
name: "Pilot Eskadry Złotych"
"Gray Squadron Pilot":
name: "Pilot Eskadry Szarych"
'"PI:NAME:<NAME>END_PI" PI:NAME:<NAME>END_PI':
text: """Po namierzeniu celu wybierz przyjazny statek w Zasięgu 1-2. Wybrany statek może natychmiast namierzyć cel."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz w Zasięgu 2-3, możesz przerzucić dowolne ze swoich kości, na których wypadły puste ścianki."""
# A-Wing
"Green Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"Prototype Pilot":
name: "Pilot prototypu"
"PI:NAME:<NAME>END_PI":
text: """Możesz wykonywać akcje nawet jeśli posiadasz żetony stresu."""
"PI:NAME:<NAME>END_PI":
text: """Możesz wybrać na cel swojego ataku wrogi statek, z którym się stykasz, jeżeli ten znajduje się w twoim polu rażenia."""
# YT-1300
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI":
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia, natychmiast ją zakryj (bez rozpatrywania jej efektu)."""
"PI:NAME:<NAME>END_PI":
text: """Po wykonaniu zielonego manewru wybierz jeden inny przyjazny statek w Zasięgu 1. Statek ten może wykonać 1 darmową akcję przedstawioną na jego pasku akcji."""
"Han PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz możesz przerzucić wszystkie swoje kości ataku. Jeśli zdecydujesz się to zrobić musisz przerzucić tyle ze swoich kości, ile możesz."""
# B-Wing
"Dagger Squadron Pilot":
name: "Pilot EskPI:NAME:<NAME>END_PI"
"Blue Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz, 1 z twoich wyników [crt-hit] nie może być anulowany przy pomocy kości obrony."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz lub się bronisz mając co najmniej 1 żeton stresu, możesz przerzucić jedną ze swoich kości."""
# HWK-290
"Rebel Operative":
name: "Agent rebeliantów"
"PI:NAME:<NAME>END_PI":
text: '''Na początku fazy walki wybierz 1 inny przyjazny statek w zasięgu 1-3. Do końca tej fazy traktuj wartość umiejętności tego pilota jakby wynosiła "12".'''
"PI:NAME:<NAME>END_PI":
text: """Na początku fazy walki możesz przypisać 1 ze swoich żetonów skupienia do innego przyjaznego statku w Zasięgu 1-3."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy inny przyjazny statek w Zasięgu 1-3 atakuje, gdy nie masz żadnych żetonów stresu, możesz otrzymać 1 żeton stresu aby umożliwić mu rzut 1 dodatkową kością ataku."""
# Z-95
"Bandit Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PIskPI:NAME:<NAME>END_PI BandPI:NAME:<NAME>END_PI"
ship: "Z-95 Łowca głów"
"Tala Squadron Pilot":
name: "Pilot EskadPI:NAME:<NAME>END_PI TPI:NAME:<NAME>END_PI"
ship: "Z-95 Łowca głów"
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
ship: "Z-95 Łowca głPI:NAME:<NAME>END_PI"
text: "Kiedy atakujesz, obrońca zostaje trafiony twoim atakiem nawet jeśli nie otrzymał żadnych uszkodzeń."
"PI:NAME:<NAME>END_PI":
ship: "Z-95 Łowca głów"
text: """Po wykonaniu ataku możesz wybrać inny przyjazny statek w Zasięgu 1. Dany statek może wykonać 1 darmową akcję."""
# E-Wing
"Knave Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"Blackmoon Squadron Pilot":
name: "Pilot PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI":
text: """Kiedy wrogi statek w twoim polu rażenia, w Zasięgu 1-3 się broni, atakujący może zmienić 1 z jego wyników %HIT% na wynik %CRIT%."""
"PI:NAME:<NAME>END_PI":
text: """Na początku fazy końcowej możesz wykonać jeden atak. Nie możesz atakować w następnej rundzie."""
# Asy Rebelii
"PI:NAME:<NAME>END_PI":
text: """Po tym jak wykonasz akcję skupienia lub zostanie ci przypisany żeton skupienia, możesz wykonać darmową akcję "dopalacz" lub "beczka"."""
"PI:NAME:<NAME>END_PI":
text: """Dopóki znajdujesz się w Zasięgu 1 od co najmniej 1 wrogiego statku, zwiększ swoją wartość zwrotności o 1."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz możesz usunąć 1 żeton stresu aby zmienić wszystkie swoje wyniki %FOCUS% na %HIT%."""
"PI:NAME:<NAME>END_PI":
text: """Możesz wykonać atak dodatkową bronią %TORPEDO%, skierowany przeciwko wrogim statkom znajdującym się poza twoim polem rażenia."""
# Transportowiec Rebelii
"GR-75 Medium Transport":
name: "Średni transportowiec GR-75"
ship: "Średni transportowiec GR-75"
# CR90
"CR90 Corvette (Fore)":
ship: "Korweta CR90 (dziób)"
name: "Korweta CR90 (dziób)"
text: """Kiedy atakujesz przy pomocy swojej głównej broni, możesz wydać 1 żeton energii aby rzucać 1 dodatkową kością ataku."""
"CR90 Corvette (Aft)":
ship: "Korweta CR90 (rufa)"
name: "Korweta CR90 (rufa)"
# YT-2400
"Dash Rendar":
text: """Podczas fazy aktywacji i w czasie wykonywania akcji możesz ignorować przeszkody."""
'"Leebo"':
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia, dobierz 1 dodatkową kartę uszkodzenia. Rozpatrz jedną z nich a drugą odrzuć."""
"Eaden Vrill":
text: """Podczas wykonywania ataku przy pomocy broni podstawowej, którego celem jest statek z żetonem stresu, rzucasz 1 dodatkową kością."""
"Wild Space Fringer":
name: "Outsider z DPI:NAME:<NAME>END_PIi"
# Scum and Villainy
# Starviper
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy się bronisz, przyjazny statek w Zasięgu 1 może otrzymać 1 nieanulowany wynik %HIT% lub %CRIT% (zamiast ciebie)."""
"PI:NAME:<NAME>END_PI":
text: """Na początku fazy walki, jeśli jesteś w Zasięgu 1 od wrogiego statku, możesz przypisać do swojego statku 1 żeton skupienia."""
"Black Sun Vigo":
name: "PI:NAME:<NAME>END_PI"
"Black Sun Enforcer":
name: "PI:NAME:<NAME>END_PI"
# Interceptor M3-A
"PI:NAME:<NAME>END_PI SpPI:NAME:<NAME>END_PI":
name: "Pilot kPI:NAME:<NAME>END_PIu"
"PI:NAME:<NAME>END_PIarii Point Veteran":
name: "PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PI":
text: """Kiedy inny przyjazny statek w Zasięgu 1 się broni, może przerzucić 1 kość obrony."""
"PI:NAME:<NAME>END_PI":
text: "Po tym jak obronisz się przed atakiem, jeśli atak nie trafił, możesz przypisać do swojego statku 1 żeton uniku."
# Agressor
"IG-88A":
text: """Po tym jak wykonasz atak, który zniszczy obrońcę, możesz odzyskać 1 osłonę."""
"IG-88B":
text: """Raz na rundę, po tym jak wykonasz atak, który nie trafi w wybrany cel, możesz wykonać atak przy pomocy dodatkowej broni %CANON%, w którą jesteś wyposażony."""
"IG-88C":
text: """Po tym jak wykonasz akcję „dopalacz” możesz przypisać do swojego statku 1 żeton uniku."""
"IG-88D":
text: """Możesz wykonać manewr (%SLOOPLEFT% 3) lub (%SLOOPRIGHT% 3) używając odpowiednio wzornika (%TURNLEFT% 3) lub (%TURNRIGHT% 3)."""
# Firespray
"PI:NAME:<NAME>END_PI (Scum)":
name: """PI:NAME:<NAME>END_PI (Szumowiny)"""
text: """Kiedy atakujesz lub się bronisz możesz przerzucić 1 ze swoich kości za każdy wrogi statek w Zasięgu 1."""
"PI:NAME:<NAME>END_PI (Scum)":
name: """PI:NAME:<NAME>END_PI (Szumowiny)"""
text: """Kiedy atakujesz statek znajdujący się w twoim pomocniczym polu rażenia, rzucasz 1 dodatkową kością ataku."""
"PI:NAME:<NAME>END_PI":
text: """Kiedy zrzucasz bombę, możesz użyć wzornika [%TURNLEFT% 3], [%STRAIGHT% 3], lub [%TURNRIGHT% 3] (zamiast wzornika [%STRAIGHT% 1])."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
# Y-Wing
"PI:NAME:<NAME>END_PI":
text: """Kiedy atakujesz statek znajdujący się poza twoim polem rażenia, rzucasz 1 dodatkową kością ataku."""
"PI:NAME:<NAME>END_PI":
text: """Po tym jak wydasz żeton namierzonego celu, możesz otrzymać 1 żeton stresu, aby namierzyć cel."""
"PI:NAME:<NAME>END_PI" :
name: "PI:NAME:<NAME>END_PI"
"PI:NAME:<NAME>END_PIdicate Thug":
name: "Zbir z syndykatu"
# HWK
"PI:NAME:<NAME>END_PI":
text: """Kiedy wrogi statek w Zasięgu 1-3 otrzyma co najmniej jeden żeton jonów, a ty nie masz żetonu stresu, możesz otrzymać 1 żeton stresu aby sprawić, żeby dany statek otrzymał 1 uszkodzenie."""
"PI:NAME:<NAME>END_PI":
text: """Na początku fazy walki możesz usunąć 1 żeton skupienia lub uników z wrogiego statku w Zasięgu 1-2 i przypisać go do siebie."""
"PI:NAME:<NAME>END_PI":
text: """Na koniec fazy aktywacji wybierz jeden wrogi statek w Zasięgu 1-2. Do końca fazy walki wartość umiejętności pilota wybranego statku wynosi "0"."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
# Z-95
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
ship: "Z-95 Łowca głów"
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
ship: "Z-95 Łowca głów"
"PI:NAME:<NAME>END_PI":
ship: "Z-95 Łowca głów"
text: """Kiedy atakujesz rzucasz 1 dodatkową kością ataku, jeśli w Zasięgu 1-2 nie ma żadnych innych przyjaznych statków."""
"PI:NAME:<NAME>END_PI":
ship: "Z-95 Łowca głów"
text: """Na początku fazy walki możesz usunąć 1 żeton skupienia lub uników z innego przyjaznego statku w Zasięgu 1-2 i przypisać go do siebie."""
"PI:NAME:<NAME>END_PI":
text: """When a friendly ship declares an attack, you may spend a target lock you have on the defender to reduce its agility by 1 for that attack."""
"GrPI:NAME:<NAME>END_PI":
text: """When defending, if the attacker is inside your firing arc, roll 1 additional defense die."""
"Esege Tuketu":
text: """When another friendly ship at Range 1-2 is attacking, it may treat your focus tokens as its own."""
'"Redline"':
text: """You may maintain 2 target locks on the same ship. When you acquire a target lock, you may acquire a second lock on that ship."""
'"Deathrain"':
text: """When dropping a bomb, you may use the front guides of your ship. After dropping a bomb, you may perform a free barrel roll action."""
"Moralo Eval":
text: """You can perform %CANNON% secondary attacks against ships inside your auxiliary firing arc."""
'Gozanti-class Cruiser':
text: """After you execute a maneuver, you may deploy up to 2 attached ships."""
'"Scourge"':
text: """When attacking a defender that has 1 or more Damage cards, roll 1 additional attack die."""
"The Inquisitor":
text: """When attacking with your primary weapon at Range 2-3, treat the range of the attack as Range 1."""
"Zuckuss":
text: """When attacking, you may roll 1 additional attack die. If you do, the defender rolls 1 additional defense die."""
"Dengar":
text: """Once per round after defending, if the attacker is inside your firing arc, you may perform an attack against the that ship."""
upgrade_translations =
#Elitarna zdolność
"Determination":
name: "Determinacja"
text: """Kiedy otrzymujesz odkrytą kartę uszkodzenia z cechą "Pilot", natychmiast ją odrzuć bez rozpatrywania jej efektu."""
"Swarm Tactics":
name: "Taktyka roju"
text: """Na początku fazy walki wybierz 1 przyjazny statek w Zasięgu 1. Do końca tej fazy traktuj wybrany statek, jakby jego wartość umiejętności pilota była równa twojej."""
"Squad Leader":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Akcja:</strong> Wybierz 1 statek w Zasięgu 1-2, który ma niższą wartość umiejętności pilota niż ty. %LINEBREAK% Wybrany statek może natychmiast wykonać 1 darmową akcję."""
"Expert Handling":
name: "PI:NAME:<NAME>END_PI"
text: """„<strong>Akcja:</strong> Wykonaj darmową akcję „beczka”. Jeśli nie masz symbolu akcji %BARRELROLL%, otrzymujesz 1 żeton stresu. Następnie możesz usunąć jeden wrogi żeton namierzonego celu znajdujący się na Twoim statku."""
"Marksmanship":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Akcja:</strong> Kiedy atakujesz w tej rundzie, możesz zamienić 1 swój wynik %FOCUS% na %CRIT%, a wszystkie pozostałe wyniki %FOCUS% na %HIT%."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Akcja:</strong> Wykonaj biały manewr (%TURNLEFT% 1) lub (%TURNRIGHT% 1)". Następnie otrzymujesz żeton stresu. %LINEBREAK% Następnie, jeśli nie masz symbolu akcji %BOOST%, rzuć 2 kośćmi ataku. Otrzymujesz wszystkie wyrzucone uszkodzenia %HIT% i uszkodzenia krytyczne %CRIT%."""
"Elusiveness":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy się bronisz możesz otrzymać 1 żeton stresu, aby wybrać jedną kość ataku. Atakujący musi przerzucić tą kość. Nie możesz skorzystać z tej zdolności, jeśli jeśli masz co najmniej 1 żeton stresu."""
"Push the Limit":
name: "Na granicy ryzyka"
text: """Raz na rundę po wykonaniu akcji, możesz wykonać 1 darmową akcję przedstawioną na twoim pasku akcji. Następnie otrzymujesz 1 żeton stresu."""
"DePI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Możesz traktować nagłówek <strong>"Atak (namierzony cel):"</strong> jako <strong>"Atak (skupienie):"</strong>. %LINEBREAK% Kiedy atak zmusza cię do wydania żetonu namierzonego celu, możesz zamiast niego wydać żeton skupienia."""
"Expose":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Akcja:</strong> Do końca rundy zwiększ wartość swojej podstawowej broni o 1 i zmniejsz wartość zwrotności o 1."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Na początku fazy walki usuń 1 żeton stresu z innego przyjaznego statku w Zasięgu 1."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Na początku fazy walki możesz wybrać 1 przyjazny statek w Zasięgu 1-2. Do końca fazy zamieniasz się z danym statkiem wartościami umiejętności pilota."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy atakujesz statek w swoim polu rażenia, a nie znajdujesz się w polu rażenia danego statku, zmniejsz jego wartość zwrotności o 1 (do minimum 0)."""
"Predator":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy atakujesz, możesz przerzucić 1 kość ataku. Jeśli wartość umiejętnosci pilota obrońcy wynosi "2" lub mniej, możesz przerzucić maksymalnie 2 kości ataku (zamiast 1)."""
"Draw Their Fire":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy przyjazny statek w Zasięgu 1 zostaje trafiony podczas ataku, możesz zamiast niego otrzymać 1 z nie anulowanych %CRIT%."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy ujawnisz czerwony manewr, możesz odrzucić tę kartę, aby do końca fazy aktywacji traktować ten manewr jako biały."""
"Veteran Instincts":
name: "PI:NAME:<NAME>END_PI"
text: """Zwiększ swoją wartość umiejętności pilota o 2."""
"Opportunist":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy atakujesz, jeśli obrońca nie ma żadnych żetonów skupienia ani uników, możesz otrzymać 1 żeton stresu aby rzucić 1 dodatkową kością ataku.%LINEBREAK%Nie możesz skorzystać z tej zdolności, jeśli posiadasz żetony stresu."""
"Lone Wolf":
name : "PI:NAME:<NAME>END_PI"
text: """Jeśli w zasięgu 1-2 nie ma żadnych innych przyjaznych statków, kiedy się bronisz lub atakujesz, możesz przerzucić 1 wynik z pustą ścianką."""
"Stay On Target":
name: "Utrzymać cel"
text: """Kiedy ujawnisz swój manewr możesz obrócić swój wskaźnik na inny manewr o tej samej prędkości.%LINEBREAK%Traktuj ten manewr jako czerwony."""
"Ruthlessness":
name: "Bezwzględność"
text: """%PL_IMPERIALONLY%%LINEBREAK% Po tym jak przeprowadzisz atak, który trafi w cel, musisz wybrać 1 inny statek w Zasięgu 1 od obrońcy (nie siebie). Statek ten otrzymuje 1 uszkodzenie."""
"Intimidation":
name: "Zastraszenie"
text: """Dopóki stykasz się z wrogim statkiem, jego zwrotność zostaje zmniejszona o 1."""
"Calculation":
name: "Kalkulacje"
text: """Kiedy atakujesz, możesz wydać żeton skupienia, aby zmienić jeden ze swoich wyników %FOCUS% na wynik %CRIT%."""
"Bodyguard":
name : "PI:NAME:<NAME>END_PI"
text: """%PL_SCUMONLY%%LINEBREAK% Na początku fazy walki możesz wydać żeton skupienia aby wybrać przyjazny statek w Zasięgu 1 o wartości umiejętności pilota wyższej od ciebie. Do końca rundy zwiększ jego wartość zwrotności o 1."""
# Astromechs
"R2 Astromech":
name: "Astromech R2"
text: """Możesz traktować wszystkie manewry o prędkości 1 i 2, jakby były to zielone manewry."""
"R2-D2":
text: """Po wykonaniu zielonego manewru możesz odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"R2-F2":
text: """<strong>Akcja:</strong> Do końca tej rundy zwiększ swoją wartość zwrotności o 1."""
"R5-D8":
text: """<strong>Akcja:</strong> Rzuć jedną kością obrony.%LINEBREAK% Jeżeli wypadnie wynik %EVADE% lub %FOCUS%, odrzuć jedną ze swoich zakrytych kart uszkodzeń."""
"R5-K6":
text: """Po wydaniu swojego rzetonu namierzonego celu rzuć 1 kością obrony.%LINEBREAK% Jeżeli wypadnie %EVADE% natychmiast zdobywasz żeton namierzonego celu dla tego samego statku. Nie możesz wydać nowego żetonu namierzonego celu podczas tego ataku."""
"R5 Astromech":
name: "Astromech R5"
text: """Podczas fazy końcowej możesz wybrać 1 ze swoich odkrytych kart z cechą "Statek" i ją zakryć."""
"R7 Astromech":
name: "Astromech R7"
text: """Raz na rundę kiedy się bronisz, jeśli namierzasz atakującego, możesz wydać żeton namierzonego celu aby wybrać dowolną liczbę kości ataku. Atakujący musi przerzucić wybrane kości."""
"R7-T1":
text: """<strong>Akcja:</strong> Wybierz wrogi statek w Zasięgu 1-2. Jeżeli znajdujesz się w polu rażenia wybranego statku, możesz namierzyć dany statek. Następnie możesz wykonać darmową akcję "dopalacz"."""
"R4-D6":
text: """Kiedy zostaniesz trafiony w wyniku ataku, a pośród wyników rzutu są co najmniej 3 nieaulowalne wyniki %HIT% możesz wybrać i anulować wszystkie poza 2. Za każdy wynik anulowany w ten sposób otrzymujesz 1 żeton stresu."""
"R5-P9":
text: """Na koniec fazy walki możesz wydać jeden ze swoich żetonów skupienia, aby odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"R3-A2":
text: """Kiedy wybierzesz cel ataku, jeżeli obrońca znajduje się w twoim polu rażenia, możesz otrzymać 1 żeton stresu, aby sprawić żeby obrońca otrzymał 1 żeton stresu."""
"R2-D6":
text: """Twój pasek rowinięć zyskuje symbol %ELITE%.%LINEBREAK% Nie możesz przypisać tej karty rozwinięcia do swojego statku jeżeli masz już symbol rozwinięcia %ELITE% lub jeżeli wartość umiejętności pilota wynosi 2 lub mniej."""
# Torpilles
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić 1 ze swoich wyników %FOCUS% na wynik %CRIT%."""
"Advanced Proton Torpedoes":
name: "Zaaw. torpedy protonowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić maksymalnie 3 swoje puste ścianki na wyniki %FOCUS%."""
"Flechette Torpedoes":
name: "Torpedy rozpryskowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Po wykonaniu tego ataku obrońca otrzymuje 1 żeton stresu jeżeli jego wartość kadłuba wynosi 4 lub mniej."""
"Ion Torpedoes":
name: "PI:NAME:<NAME>END_PI jonPI:NAME:<NAME>END_PI"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeżeli ten atak trafi w wybrany cel, obrońca oraz każdy statek w Zasięgu 1 od niego otrzymuje 1 żeton jonów."""
"Bomb Loadout":
name: "Ładunek bomb"
text: """<span class="card-restriction">Tylko Y-wing. Ograniczenie.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol %BOMB%."""
# Tourelles
"Ion Cannon Turret":
name: "Wieżyczka z działem jonowym"
text: """<strong>Atak:</strong> Zaatakuj 1 statek (nawet poza twoim polem rażenia). %LINEBREAK%Jeśli atak ten trafi w wybrany statek, otrzymuje on 1 uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystkie wyniki kości."""
"Blaster Turret":
name: "Wieżyczka blasterowa"
text: """<strong>Atak (skupienie):</strong> Wydaj 1 żeton skupienia, aby zaatakować 1 statek (nawet poza twoim polem rażenia)."""
"Autoblaster Turret":
name: "Wieżyczka autoblasterowa"
text: """<strong>Atak: Zaatakuj 1 statek (nawet poza twoim polem rażenia). %LINEBREAK%Twoje wyniki %HIT% nie mogą być anulowane przy pomocy kości obrony. Obrońca może anulować wyniki %CRIT% przed %HIT%."""
# Missiles
"Concussion Missiles":
name: "Rakiety wstrząsowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz zmienić 1 ze swoich wyników z pustą ścianką na wynik %HIT%."""
"Cluster Missiles":
name: "Rakiety kasetonowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu o odrzuć tę kartę, aby przeprowadzić ten atak dwukrotnie."""
"Homing Missiles":
name: "Rakiety samonaprowadzające"
text: """<strong>Atak (namierzony cel):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Podczas tego ataku obrońca nie może wydawać żetonów uniku."""
"Assault MPI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PIiety szPI:NAME:<NAME>END_PIowe"
text: """<strong>Atak (namierzony cel):</strong> Wydaj swój żeton namierzonego celu i odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeśli ten atak trafi w wybrany cel, każdy inny statek w Zasięgu 1 od obrońcy otrzymuje 1 uszkodzenie."""
"Ion Pulse Missiles":
name: "JonPI:NAME:<NAME>END_PI rakiety pulsacyjne"
text: """<strong>Atak (namierzony cel):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Jeśli ten atak trafi, obrońca otrzymuje 1 uszkodzenie oraz 2 żetony jonów."""
"ChPI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """<span class="card-restriction">Tylko A-wing.</span>%LINEBREAK%Ta karta ma ujemny koszt w punktach eskadry."""
"Proton Rockets":
name: "Rakiety protonowe"
text: """<strong>Atak (skupienie):</strong> Odrzuć tę kartę, aby wykonać ten atak. %LINEBREAK%Możesz rzucić dodatkowymi kośćmi ataku w liczbie równej twojej zwrotności (nie wiecej niż 3)."""
# Bombes
"Seismic Charges":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy odkrywasz swój wskaźnik manewrów, możesz odrzucić tą kartę aby zrzucić 1 żeton ładunku sejsmicznego. %LINEBREAK%Żeton ten zostanie zdetonowany na koniec fazy aktywacji."""
"Proximity Mines":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Akcja:</strong> odrzuć tę kartę aby zrzucić 1 żeton miny zbliżeniowej. Kiedy statek wykona manewr w wyniku którego podstawka statku lub wzornik manewru będzie nachodzić na ten żeton, żeton ten zostaje zdetonowany."""
"Proton Bombs":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy odkrywasz swój wskaźnik manewrów, możesz odrzucić tą kartę aby zrzucić 1 żeton bomby protonowej. %LINEBREAK%Żeton ten zostanie zdetonowany na koniec fazy aktywacji."""
# Canons
"Ion Cannon":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Jeżeli ten atak trafi wybrany cel, obrońca otrzymuje 1 uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystie wyniki kości."""
"Heavy Laser Cannon":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Natychmiast po rzucie swoimi kośćmi ataku musisz zmienić wszystkie swoje wyniki %CRIT% na wyniki %HIT%."""
"Autoblaster":
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Twoje wyniki %HIT% nie mogą być anulowane przez kości obrony. Obrońca może anulować wyniki %CRIT% przed wynikami %HIT%."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI rozPI:NAME:<NAME>END_PI"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Jeżeli ten atak trafi, obrońca otrzymuje 1 uszkodzenie i, jeśli nie jest zestresowany, otrzymuje także 1 żeton stresu. Następnie anuluj wszystkie wyniki kości."""
'"Mangler" Cannon':
name: "Działo typu Mangler"
text: """<strong>Atak: Zaatakuj 1 statek. %LINEBREAK%Kiedy atakujesz, możesz zmienić jeden ze swoich wyników %HIT% na wynik %CRIT%."""
# Systèmes
"Enhanced Scopes":
name: "Wzmocnione radary"
text: """Podczas fazy aktywacji traktuj swoją wartość umiejętności pilota jakby wynosiła "0"."""
"Fire-Control System":
name: "System kontroli ognia"
text: """Po tym jak wykonasz atak, możesz namierzyć obroncę."""
"Advanced Sensors":
name: "Zaawanswowane sensory"
text: """Zaraz przed tym jak ujawnisz swój manewr, możesz wykonać 1 darmową akcję. %LINEBREAK%Jeżeli skorzystawsz z tej zdolności, musisz w tej rundzie pominąć swój krok "Wykonywania akcji"."""
"Sensor Jammer":
name: "Zakłócacz sensorów"
text: """Kiedy się bronisz możesz zmienić 1 z wyników %HIT% atakującego na wynik %FOCUS%. Atakujący nie może przerzucić kości ze zmienionym wynikiem."""
"Accuracy Corrector":
name: "Korektor celności"
text: """Kiedy atakujesz, możesz anulować wszystkie swoje wyniki kości. Następnie możesz dodać 2 wyniki %HIT%.%LINEBREAK% Podczas tego ataku nie można ponownie modyfikować twoich kości."""
"Advanced Targeting Computer":
name: "Zaawansowany komputer celowniczy"
text: """<span class="card-restriction">Tylko TIE Advanced.</span>%LINEBREAK% Kiedy atakujesz namierzonego przez siebie przeciwnika przy pomocy broni podstawowej, do wyniku rzutu kośćmi możesz dodać jeden wynik %CRIT%. Jeżeli to zrobisz, podczas tego ataku nie możesz wydać żetonu namierzonego celu."""
# Équipages
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Po wykonaniu ataku, który nie trafił w wybrany cel, natychmiast wykonaj atak główną bronią. W tej rundzie nie możesz wykonać kolejnego ataku."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy atakujesz w Zasiegu 3 możesz zmienić 1 ze swoich wyników %HIT% na wynik %CRIT%."""
"Weapons Engineer":
name: "PI:NAME:<NAME>END_PI"
text: """Możesz namierzać naraz 2 statki (każdy wrogi statek możesz namierzać tylko raz). %LINEBREAK%Kiedy namierzasz cel, możesz namierzyć 2 różne statki."""
"PI:NAME:<NAME>END_PI":
text: """%PL_REBELONLY%%LINEBREAK%Po wykonaniu ataku, który nie trafi w wybrany cel, natychmiast wykonaj atak główną bPI:NAME:<NAME>END_PIią. Możesz zmienić 1 wynik %FOCUS% na %HIT%."""
"PI:NAME:<NAME>END_PI":
text: """%PL_REBELONLY%%LINEBREAK%Możesz traktować wszystkie manewry %STRAIGHT%, jakby były to zielone manewry."""
"Chewbacca":
text: """%PL_REBELONLY%%LINEBREAK%Kiedy otrzymujesz kartę uszkodzenia, możesz natychmiast odrzucić tę kartę i odzyskać 1 żeton osłony. Następnie odrzuć tę kartę rozwinięcia."""
"Recon Specialist":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy wykonujesz akcję skupienia, przypisz do swojego statku 1 dodatkowy żeton skupienia."""
"SPI:NAME:<NAME>END_PIoteur":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Akcja:</strong> Wybierz 1 wrogi statek w Zasięgu 1 i rzuć 1 koscią ataku. Jeśli wypadnie %HIT% lub %CRIT%, wylosuj 1 zakrytą kartę uszkodzenia przypisaną do tego statku, odkryj ją i rozpatrz."""
"Intelligence Agent":
name: "Agent PI:NAME:<NAME>END_PI"
text: """Na początku fazy aktywacji wybierz 1 wrogi statek w zasięgu 1-2. Możesz podejrzeć manewr wybrany przez ten statek."""
"DPI:NAME:<NAME>END_PI":
text: """%PL_IMPERIALONLY%%LINEBREAK%Tylko Imperium. Po tym jak wykonasz atak skierowany przeciwko wrogiemu statkowi, możesz otrzymać 2 uszkodzenia, aby zadać temu statkowi 1 krytyczne uszkodzenie."""
"Rebel Captive":
name: "PI:NAME:<NAME>END_PI"
text: """%PL_IMPERIALONLY%%LINEBREAK%Raz na rundę, pierwszy statek, który zadeklaruje ciebie jako cel ataku, natychmiast otrzymuje 1 żeton stresu."""
"Flight Instructor":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy się bronisz, możesz przerzucić 1 ze swoich wyników %FOCUS%. Jeśli wartość umiejętności atakującego pilota wynosi "2" lub mniej, zamiast tego przerzuć 1 ze swoich pustych scianek.%FOCUS%."""
"Navigator":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy ujawnisz swój manewr, możesz obrócić swój wskaźnik na inny manewr tego samego kierunku. %LINEBREAK%Nie możesz przekręcić wskaźnika na czerwony manewr, jeśli posiadasz jakieś żetony stresu."""
"PI:NAME:<NAME>END_PI":
text: """%PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> Rzuć 2 koścmi obrony. Za kazdy uzyskany wynik %FOCUS% przypisz do swojego statku 1 żeton skupienia. Za każdy wynik %EVADE% przypisz do swojego statku 1 żeton uniku.%FOCUS%"""
"PI:NAME:<NAME>END_PI":
text: """%PL_IMPERIALONLY%%LINEBREAK% Na koniec fazy walki kazdy wrogi statek w Zasięgu 1, który nie ma żetonu stresu, otrzymuje żeton stresu."""
"Fleet Officer":
name: "PI:NAME:<NAME>END_PI"
text: """%PL_IMPERIALONLY%%LINEBREAK%<strong>Akcja:</strong> Wybierz maksymalnie 2 przyjazne statki w Zasięgu 1-2 i do każdego przypisz po 1 żetonie skupienia, następnie otrzymujesz 1 żeton stresu."""
"Han Solo":
text: """%PL_REBELONLY%%LINEBREAK%Tylko rebelianci. Kiedy atakujesz, jeśli namierzyłeś obrońcę, możesz wydać żeton namierzonego celu aby zmienić wszystkie swoje wyniki %FOCUS% na %HIT%."""
"Leia Organa":
text: """%PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji możesz odrzucić tę kartę, aby umożliwić wszystkim przyjaznym statkom, które ujawiniają czerwony manewr, traktowanie do końca fazy tego manewru jako białego."""
"WED-15 Repair Droid":
name: "Droid naprawczy WED-15"
text: """%PL_HUGESHIPONLY%%LINEBREAK%<strong>Akcja:</strong> Wydaj 1 żeton energii aby odrzucić 1 ze swoich zakrytych kart uszkodzeń albo wydaj 3 żetony energii aby odrzucić 1 ze swoich odkrytych kart uszkodzeń."""
"PI:NAME:<NAME>END_PI":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji możesz odrzucić tę kartę aby do końca fazy traktować wartość umiejętności pilota każdego przyjaznego statku jakby wynosiła "12"."""
"PI:NAME:<NAME>END_PI":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Kiedy inny przyjazny statek w Zasięgu 1 wykonuje atak, możesz zmienić 1 z jego wyników %HIT% na %CRIT%."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: "Po tym jak wykonasz atak przeciwko statkowi znajdującemu się w twoim polu rażenia w Zasiegu 2, statek ten otrzymuje 1 żeton stresu."
"R2-D2 (Crew)":
name: "R2-D2 (Załoga)"
text: """%PL_REBELONLY%%LINEBREAK%Na koniec fazy końcowej, jeśli nie masz żadnych osłon, możesz odzyskać 1 osłonę i rzucić 1 kością ataku. Jeśli wypadnie %HIT% odkryj 1 losową ze swoich zakrytych kart uszkodzeń i ją rozpatrz."""
"C-3PO":
text: """%PL_REBELONLY%%LINEBREAK%Raz na rundę, zanim wykonasz rzut co najmniej 1 koscią obrony, możesz na głos zgadnąć liczbę wyników %EVADE%. Jeśli wypadło tyle %EVADE% (przed modyfikacjami) dodaj 1 wynik %EVADE%."""
"PI:NAME:<NAME>END_PI":
text: """%PL_REBELONLY%%LINEBREAK%Po tym jak usuniesz ze swojego statku żeton stresu, możesz przypisać do swojego statku żeton skupienia."""
"PI:NAME:<NAME>END_PI":
text: """%PL_REBELONLY%%LINEBREAK%Raz na rundę, kiedy przyjazny statek w Zasięgu 1-3 wykonuje akcję skupienia lub miałby otrzymać żeton skupienia, możesz danemu statkowi przypisać żeton uniku (zamiast skupienia)."""
"PI:NAME:<NAME>END_PI":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> Wydaj dowolną ilość żetonów energii aby wybrać taką samą liczbę wrogich statków w Zasiegu 1-2. Usuń z wybranych statków wszystkie żetony skupienia, uników i niebieskie żetony namierzonego celu."""
"Targeting Coordinator":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Energia:</strong> Możesz wydać 1 żeton energii aby wybrać 1 przyjazny statek w Zasięgu 1-2. Namierz cel, a następnie przydziel do wybranego statku niebieski żeton namierzonego celu."""
"PI:NAME:<NAME>END_PI":
text: """%PL_HUGESHIPONLY% %PL_REBELONLY%%LINEBREAK%Na początku fazy aktywacji wybierz 1 wrogi statek w Zasięgu 1-3. Możesz podejrzeć manewr wybrany dla tego statku. Jeżeli jest on biały, przydziel do niego 1 żeton stresu."""
'"LPI:NAME:<NAME>END_PIbo"':
text: """%PL_REBELONLY%%LINEBREAK%<strong>Akcja:</strong> wykonaj darmową akcję "dopalacz". Następnie otrzymujesz 1 żeton jonów."""
"Dash Rendar":
text: """%PL_REBELONLY%%LINEBREAK%Możesz wykonywać ataki kiedy nachodzisz na przeszkodę. %LINEBREAK%Twoje ataki nie mogą być przyblokowane."""
"PI:NAME:<NAME>END_PI":
text: """%PL_IMPERIALONLY%%LINEBREAK%Na początku fazy walki, jeśli nie masz żadnych osłon, a do twojego statku przypisana jest co najmniej 1 karta uszkodzenia, możesz wykonać darmową akcję unik."""
"PI:NAME:<NAME>END_PI":
text: """%PL_IMPERIALONLY%%LINEBREAK%Kiedy otrzymujesz odkrytą kartę uszkodzenia, możesz odrzucić to rozwinięcie lub inną kartę rozwinięcia [crew] aby zakryć tę kartę uszkodzenia (bez rozpatrywania jej efektu)."""
"PI:NAME:<NAME>END_PI":
text: """%PL_SCUMONLY%%LINEBREAK%Za pierwszym razem kiedy atakujesz lub bronisz sie w każdej rundzie, pierwsza przypisana karta uszkodzenia jest odkryta."""
"Outlaw Tech":
name: "Mechanik wyjęty spod prawa"
text: """%PL_SCUMONLY%%LINEBREAK%Po wykonaniu czerwonego manewru, możesz przypisać do swojego statku 1 żeton skupienia."""
"K4 Security Droid":
name: "Droid ochroniarz K4"
text: """%PL_SCUMONLY%%LINEBREAK%Po wykonaniu zielonego manewru możesz namierzyć cel."""
# Soute
"Frequency Jammer":
name: "Zakłócacz częstotliwości"
text: """Kiedy wykonujesz akcję Zakłócanie, wybierz 1 wrogi statek, który nie ma żetonu stresu i znajduje się w Zasięgu 1 od zakłócanego statku. Wybrany statek otrzymuje 1 żeton stresu."""
"Expanded Cargo Hold":
ship: "Średni transportowiec GR-75"
name: "PI:NAME:<NAME>END_PIięksPI:NAME:<NAME>END_PI ładPI:NAME:<NAME>END_PI"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Raz na rundę, kiedy masz otrzymać odkrytą kartę uszkodznia, możesz dobrać te kartę z talii uszkodzeń dziobu lub rufy."""
"Comms Booster":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Energia:</strong> Wydaj 1 żeton energii aby usunąć wszystkie żetony stresu z przyjaznego statku w Zasięgu 1-3, następnie przydziel do tego statku 1 żeton skupienia."""
"Slicer Tools":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Akcja:</strong> Wybierz co najmniej 1 wrogi statek w Zasięgu 1-3, na ktorym znajduje się żeton stresu. Za każdy wybrany statek możesz wydać 1 żeton energii aby sprawić, żeby dany statek otrzymał 1 uszkodzenie."""
"Shield Projector":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy wrogi statek stanie się podczas fazy walki, możesz wydać 3 żetony energii aby do końca fazy zmusić go do zaatakowania ciebie, jeśli to możliwe."""
"Tibanna Gas Supplies":
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Energia:</strong> Możesz odrzucić tę kartę aby otrzymać 3 żetony energii."""
"Ionization Reactor":
name: "PI:NAME:<NAME>END_PI jonPI:NAME:<NAME>END_PI"
text: """<strong>Energia:</strong> Wydaj 5 żetonów energii z tej karty i odrzuć tą kartę aby sprawić żeby każdy statek w Zasięgu 1 otrzymał 1 uszkodzneie i 1 żeton jonów."""
"Engine Booster":
name: "PI:NAME:<NAME>END_PI"
text: """Tuż przed tym jak odkryjesz swój wskaźnik manewrów, możesz wydać 1 żeton energii aby wykonać biały manewr (%STRAIGHT% 1). Nie możesz skorzystać z tej zdolności, jeśli w jej wyniku będziesz nachodzić na inny statek."""
"Backup Shield Generator":
name: "Zapasowy generator osłon"
text: """Na koniec każdej rudny możesz wydać 1 żeton energii aby odzyskać 1 osłonę (nie przekraczając swojej wartości osłon)."""
"EM Emitter":
name: "Emiter elektro-magnetyczny"
text: """Kiedy przyblokujesz atak, obrońca rzuca 3 dodatkowymi kośmi obrony (zamiast 1)."""
# Hardpiont
"Ion Cannon Battery":
name: "Bateria działa jonowego"
text: """<strong>Atak (energia):</strong> Aby wykonać ten atak, wydaj 2 żetony energii z tej karty. Jeżeli atak ten trafi w wybrany statek, otrzymuje on 1 krytyczne uszkodzenie oraz 1 żeton jonów. Następnie anuluj wszystkie wyniki kości."""
"Single Turbolasers":
name: "PI:NAME:<NAME>END_PIńPI:NAME:<NAME>END_PI TPI:NAME:<NAME>END_PI"
text: """<strong>Atak (energia):</strong> Wydaj 2 żetony energii z tej karty aby wykonać ten atak. Obronca podwaja swoją wartość zwrotności przeciwko temu atakowi. Możesz zmienić jeden ze swoich wyników %FOCUS% na %HIT%."""
"Quad Laser Cannons":
name: "Poczwórne działka laserowe"
text: """<strong>Atak (energia):</strong> Wydaj 1 żeton energii z tej karty aby wykonać ten atak. Jeśli ten atak nie trafi, możesz natychmiast wydać 1 żeton energii z tej karty aby ponownie przeprowadzić ten atak."""
# Équipe
"Gunnery Team":
name: "PI:NAME:<NAME>END_PI"
text: """Raz na rundę kiedy atakujesz przy pomocy daodatkowej broni, możesz wydać 1 żeton energii aby zmienić 1 ze swoich pustych wyników na %HIT%."""
"Sensor Team":
name: "PI:NAME:<NAME>END_PI sensorPI:NAME:<NAME>END_PI"
text: """Kiedy namierzasz cel, możesz namierzyć wrogi statek w Zasięgu 1-5 (zamiast Zasięgu 1-3)."""
"Engineering Team":
name: "PI:NAME:<NAME>END_PI"
text: """Podczas fazy aktywacji, kiedy ujawnisz manewr %STRAIGHT%, otrzymujesz 1 dodatkowy żeton energii podczas kroku Otrzymywania energii."""
# Illégal
"Inertial Dampeners":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy ujawniasz swój manewr, możesz odrzucić tę kartę żeby zamiast tego wykonać biały manewr [0%STOP%]. Następnie otrzymujesz 1 żeton stresu."""
"Dead Man's Switch":
name: "WłącznPI:NAME:<NAME>END_PI samPI:NAME:<NAME>END_PIóPI:NAME:<NAME>END_PI"
text: """Kiedy zostajesz zniszczony, każdy statek w Zasięgu 1 otrzymuje 1 uszkodzenie."""
"Feedback Array":
name: "System zwrotny"
text: """Podczas fazy walki, zamiast wykonywać jakiekolwiek ataki, możesz otrzymać 1 żeton jonów i 1 uszkodzenie aby wybrać wrogi statek w Zasięgu 1. Wybrany statek otrzymuje 1 uszkodzenie."""
'"Hot Shot" Blaster':
name: "PI:NAME:<NAME>END_PI"
text: """<strong>Atak:</strong> Odrzuć tę kartę, aby zaatakować 1 statek (nawet poza twoim polem rażenia)."""
# Astromech récupéré
"Salvaged Astromech":
name: "PI:NAME:<NAME>END_PI"
text: "Kiedy otrzymujesz kartę uszkodzenia z cechą Statek, natychmiast możesz ją odrzucić (przed rozpatrzeniem efektu). %LINEBREAK%Następnie odrzuć tę kartę rozwinięcia.%LINEBREAK%."
'"PI:NAME:<NAME>END_PI"':
name: "PI:NAME:<NAME>END_PI"
text: """Jeśli jesteś wyposażony w bombę, która może zostać zrzucona przed ujawnieniem twojego manewru, zamiast tego możesz ją zrzucić po tym jak wykonasz swój manewr."""
"Unhinged Astromech":
name: "PI:NAME:<NAME>END_PI"
text: """Możesz traktować manewry o prędkości 3 jako zielone."""
"R4-B11":
text: """Kiedy atakujesz namierzonego przez siebie obrońcę, możesz wydać żeton namierzonego celu aby wybrać dowolne kości obrony (nawet wszystkie). Następnie obrońca musi przerzucić wybrane przez ciebie kości."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """Kiedy atakujesz, po wydaniu żetonu skupienia, możesz namierzyć obrońcę."""
"PI:NAME:<NAME>END_PI":
text: """%IMPERIALONLY%%LINEBREAK%Once per round, you may change a friendly ship's die result to any other die result. That die result cannot be modified again."""
"BossPI:NAME:<NAME>END_PI":
text: """%SCUMONLY%%LINEBREAK%After you perform an attack that does not hit, if you are not stressed, you <strong>must</strong> receive 1 stress token. Then assign 1 focus token to your ship and acquire a target lock on the defender."""
"Lightning Reflexes":
text: """%SMALLSHIPONLY%%LINEBREAK%After you execute a white or green maneuver on your dial, you may discard this card to rotate your ship 180°. Then receive 1 stress token <strong>after</strong> the "Check Pilot Stress" step."""
"Twin Laser Turret":
text: """<strong>Attack:</strong> Perform this attack <strong>twice</strong> (even against a ship outside your firing arc).<br /><br />Each time this attack hits, the defender suffers 1 damage. Then cancel <strong>all</strong> dice results."""
"PlPI:NAME:<NAME>END_PI":
text: """<strong>Attack (target lock):</strong> Spend your target lock and discard this card to perform this attack.<br /><br />If this attack hits, after dealing damage, remove 1 shield token from the defender."""
"Ion Bombs":
text: """When you reveal your maneuver dial, you may discard this card to <strong>drop</strong> 1 ion bomb token.<br /><br />This token <strong>detonates</strong> at the end of the Activation phase.<br /><br /><strong>Ion Bombs Token:</strong> When this bomb token detonates, each ship at Range 1 of the token receives 2 ion tokens. Then discard this token."""
"Conner Net":
text: """<strong>Action:</strong> Discard this card to <strong>drop</strong> 1 Conner Net token.<br /><br />When a ship's base or maneuver template overlaps this token, this token <strong>detonates</strong>.<br /><br /><strong>Conner Net Token:</strong> When this bomb token detonates, the ship that moved through or overlapped this token suffers 1 damage, receives 2 ion tokens, and skips its "Perform Action" step. Then discard this token."""
"Bombardier":
text: """When dropping a bomb, you may use the (%STRAIGHT% 2) template instead of the (%STRAIGHT% 1) template."""
"Cluster Mines":
text: """<strong>Action:</strong> Discard this card to <strong>drop</strong> 3 cluster mine tokens.<br /><br />When a ship's base or maneuver template overlaps a cluster mine token, that token <strong>detonates</strong>.<br /><br /><strong>Cluster Mines Tokens:</strong> When one of these bomb tokens detonates, the ship that moved through or overlapped this token rolls 2 attack dice and suffers all damage (%HIT%) rolled. Then discard this token."""
'Crack Shot':
text: '''When attacking a ship inside your firing arc, you may discard this card to cancel 1 of the defender's %EVADE% results.'''
"Advanced Homing Missiles":
text: """<strong>Attack (target lock):</strong> Discard this card to perform this attack.%LINEBREAK%If this attack hits, deal 1 faceup Damage card to the defender. Then cancel <strong>all</strong> dice results."""
'Agent PI:NAME:<NAME>END_PI':
text: '''%IMPERIALONLY%%LINEBREAK%At the start of the first round, choose 1 enemy small or large ship. When attacking or defending against that ship, you may change 1 of your %FOCUS% results to a %HIT% or %EVADE% result.'''
'XX-23 S-Thread Tracers':
text: """<strong>Attack (focus):</strong> Discard this card to perform this attack. If this attack hits, each friendly ship at Range 1-2 of you may acquire a target lock on the defender. Then cancel <strong>all</strong> dice results."""
"Tractor Beam":
text: """<strong>Attack:</strong> Attack 1 ship.%LINEBREAK%If this attack hits, the defender receives 1 tractor beam token. Then cancel <strong>all</strong> dice results."""
"Cloaking Device":
text: """%SMALLSHIPONLY%%LINEBREAK%<strong>Action:</strong> Perform a free cloak action.%LINEBREAK%At the end of each round, if you are cloaked, roll 1 attack die. On a %FOCUS% result, discard this card, then decloak or discard your cloak token."""
modification_translations =
"Shield Upgrade":
name: "Ulepszenie osłon"
text: """Zwiększ wartość swoich osłon o 1."""
"Advanced Cloaking Device":
name: "Zaawansowany system maskowania"
text: """<span class="card-restriction">Tylko TIE Phantom.</span>%LINEBREAK%Po tym jak wykonasz atak, możesz wykonać darmową akcję maskowanie."""
ship: "TIE Phantom"
"Stealth Device":
name: "Urządzenie maskujące"
text: """Zwiększ wartość swojej zwrotności o 1. Jeśli zostaniesz trafiony podczas ataku, odrzuć tę kartę."""
"Engine Upgrade":
name : "Ulepszenie silnika"
text: """Twój pasek rozwinięć zyskuje symbol akcji %BOOST%."""
"Anti-Pursuit Lasers":
name: "Lasery antypościgowe"
text: """%PL_LARGESHIPONLY%Po tym jak wrogi statek wykona manewr, który sprawi że będzie zachodzić na ciebie, rzuć 1 kością ataku. Jeśli wypadnie %HIT% lub %CRIT%, wrogi statek otrzymuje 1 uszkodzenie."""
"Targeting Computer":
name: "Komputer celowniczy"
text: """Twój pasek akcji zyskuje symbol akcji %TARGETLOCK%."""
"Hull Upgrade":
name: "Ulepszenie kadłuba"
text: """Zwiększ wartość swojego kadłuba o 1."""
"Munitions Failsafe":
name: "Zabezpieczenie amunicji"
text: """Kiedy atakujesz przy pomocy broni dodatkowej, która nakazuje odrzucenie karty po wykonaniu ataku, nie odrzucasz jej jeśli atak nie trafi."""
"Stygium Particle Accelerator":
name: "Akcelerator cząsteczek stygium"
text: """Kiedy się demaskujesz lub wykonasz akcję maskowanie, możesz wykonać darmową akcję unik."""
"Combat Retrofit":
name: "Modyfikacja bojowa"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Zwiększ wartość swojego kadłuba o 2 i wartość swoich osłon o 1."""
ship: "Transport moyen GR-75"
"B-Wing/E2":
text: """<span class="card-restriction">Tylko B-wing.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %CREW%."""
"Countermeasures":
name: "Środki profilaktyczne"
text: """%PL_LARGESHIPONLY%%LINEBREAK%Na początku fazy walki możesz odrzucić tę kartę, aby do końca rundy zwiększyć swoją zwrotność o 1. Następnie możesz usunąć ze swojego statku 1 wrogi żeton namierzonego celu."""
"Experimental Interface":
name: "Eksperymentalny interfejs"
text: """Raz na rundę. Po tym jak wykonasz akcję możesz wykonać 1 darmową akcję z karty rozwinięcia z nagłówkiem <strong>Akcja:</strong>, w którą jesteś wyposażony. Następnie otrzymujesz 1 żeton stresu."""
"Tactical Jammer":
name: "Zakłócacz taktyczny"
text: """%PL_LARGESHIPONLY%%LINEBREAK%Twój statek może przyblokowywać wrogie ataki."""
"Autothrusters":
name: "Autodopalacze"
text: """Kiedy się bronisz, jeśli jesteś poza Zasięgiem 2 albo znajdujesz się poza polem rażenia atakującego, możesz zmienić 1 ze swoich pustych wyników na %EVADE%. Możesz wyposażyć swój statek w tę kartę tylko jeśli masz symbol akcji %BOOST%."""
"Twin Ion Engine Mk. II":
text: """You may treat all bank maneuvers (%BANKLEFT% and %BANKRIGHT%) as green maneuvers."""
"Maneuvering Fins":
text: """When you reveal a turn maneuver (%TURNLEFT% or %TURNRIGHT%), you may rotate your dial to the corresponding bank maneuver (%BANKLEFT% or %BANKRIGHT%) of the same speed."""
"Ion Projector":
text: """%LARGESHIPONLY%%LINEBREAK%After an enemy ship executes a maneuver that causes it to overlap your ship, roll 1 attack die. On a %HIT% or %CRIT% result, the enemy ship receives 1 ion token."""
title_translations =
"Slave I":
text: """<span class="card-restriction">Tylko Firespray-31.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol %TORPEDO%."""
"PI:NAME:<NAME>END_PIium PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
text: """<span class="card-restriction">Tylko YT-1300.</span>%LINEBREAK% Twój pasek akcji zyskuje symbol akcji %EVADE%."""
"PI:NAME:<NAME>END_PI":
text: """<span class="card-restriction">Tylko HWK-290.</span>%LINEBREAK%Podczas fazy końcowej nie usuwaj ze swojego statku niewykorzystanych żetonów skupienia."""
"ST-321":
ship: "Navette de classe Lambda"
text: """<span class="card-restriction">Navette de classe <em>Lambda</em> uniquement.</span>%LINEBREAK%Quand vous verrouillez une cible, vous pouvez verrouiller n'importe quel vaisseau ennemi situé dans la zone de jeu."""
"Royal Guard TIE":
ship: "TIE Interceptor"
name: "PI:NAME:<NAME>END_PI"
text: """<span class="card-restriction">Tylko TIE Interceptor.</span>%LINEBREAK%Możesz dołączyć do swojego statku maksymalnie 2 różne karty Modyfikacji (zamiast 1). Nie możesz dołączyć tej karty do swojego statku, jeśli wartość umiejętności pilota wynosi "4" lub mniej."""
"PI:NAME:<NAME>END_PI":
name: "PI:NAME:<NAME>END_PI"
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Kiedy wykonujesz akcję "Koordynacja", możesz wybrać 2 przyjazne statki (zamiast 1). Statki te mogą wykonać po 1 darmowej akcji."""
"A-Wing Test Pilot":
name: "Pilot testowy PI:NAME:<NAME>END_PI-PI:NAME:<NAME>END_PI"
text: """<span class="card-restriction">Tylko A-wing.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %ELITE%. Nie możesz wyposażyć się w 2 takie same karty rozwinięcia [elite talent]. Nie możesz wyposażyć się w tę kartę, jeśli twoja wartość umiejętności pilota wynosi "1" lub mniej."""
"Tantive IV":
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Twój pasek rozwinięć sekcji dziobowej zyskuje po 1 symbolu rozwinięcia %CREW% i %TEAM%."""
"Bright Hope":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Żetony wsparcia przypisane do twojej sekcji dziobowej dostają 2 wyniki %EVADE% (zamiast 1)."""
"Quantum Storm":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Na początku fazy końcowej, jeśli masz nie więcej niż 1 żeton energi, otrzymujesz 1 żeton energii."""
"Dutyfree":
ship: "Średni transportowiec GR-75"
text: """<span class="card-restriction">Tylko GR-75.</span>%LINEBREAK%Kiedy wykonujesz akcję Zakłócenie, możesz wybrać wrogi statek w Zasięgu 1-3 (zamiast Zasięgu 1-2)."""
"Jaina's Light":
ship: "Korweta CR90 (dziób)"
text: """<span class="card-restriction">Tylko sekcja dziobowa CR90.</span>%LINEBREAK%Kiedy się bronisz, raz na atak, jeśli otrzymujesz odkrytą kartę uszkodzenia, możesz ją odrzucić i dobrać nową odkrytą kartę uszkodzenia."""
"Outrider":
text: """<span class="card-restriction">Tylko YT-2400.</span>%LINEBREAK%Dopóki jesteś wyposażony w kartę rozwinięcia [cannon], nie możesz wykonywać ataków bronią podstawową. Przy pomocy dodatkowej broni [cannon] możesz wykonywać ataki skierowane przeciwko statkom znajdujacym się poza twoim polem rażenia. """
"Dauntless":
ship: "Décimateur VT-49"
text: """<span class="card-restriction">Tylko Decimator VT-49.</span>%LINEBREAK%Po tym jak wykonasz manewr, który sprawi że będziesz nachodzić na inny statek, możesz wykonać 1 darmową akcję. Następnie otrzymujesz 1 żeton stresu."""
"Virago":
text: """<span class="card-restriction">Tylko StarViper.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbole rozwinięć %SYSTEM% i %ILLICIT%. Nie możesz wyposażyć swojego statku w tę kartę jeśli wartość umiejętności twojego pilota wynosi „3” lub mniej."""
'"Heavy Scyk" Interceptor (Cannon)':
name: 'Interceptor typu Heavy Scyk (Działo)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
'"Heavy Scyk" Interceptor (Torpedo)':
name: 'Interceptor typu Heavy Scyk (Torpeda)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
'"Heavy Scyk" Interceptor (Missile)':
name: 'Intercepteur "PI:NAME:<NAME>END_PI" (Rakieta)'
ship: "Interceptor M3-A"
text: """<span class="card-restriction">Tylko Interceptor M3-A.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje jeden z symboli rozwinięć: %CANNON%, %TORPEDO% lub %MISSILE%."""
"IG-2000":
text: """<span class="card-restriction">Tylko Aggressor.</span>%LINEBREAK%Masz zdolność pilota każdego innego przyjaznego statku z kartą ulepszenia IG-2000 (jako dodatek do swojej zdolności pilota)."""
"BTL-A4 Y-Wing":
text: """<span class="card-restriction">Tylko Y-wing.</span>%LINEBREAK%Nie możesz atakować statków znajdujących się poza twoim polem rażenia. Po wykonaniu ataku przy pomocy broni podstawowej, możesz natychmiast wykonać atak przy pomocy dodatkowej broni %TURRET%."""
"Andrasta":
text: """<span class="card-restriction">Tylko Firespray-31.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje dwa symbole %BOMB%."""
"TIE/x1":
text: """<span class="card-restriction">Tylko TIE Advanced.</span>%LINEBREAK%Twój pasek rozwinięć zyskuje symbol rozwinięcia %SYSTEM%. %LINEBREAK%Koszt przypisanej do tego statku karty rozwinięcia %SYSTEM% jest obniżony o 4 punkty (do minimum 0)."""
"Ghost":
text: """<span class="card-restriction">VCX-100 only.</span>%LINEBREAK%Equip the <em>Phantom</em> title card to a friendly Attack Shuttle and dock it to this ship.%LINEBREAK%After you execute a maneuver, you may deploy it from your rear guides."""
"Phantom":
text: """While you are docked, the <em>Ghost</em> can perform primary weapon attacks from its special firing arc, and, at the end of the Combat phase, it may perform an additional attack with an equipped %TURRET%. If it performs this attack, it cannot attack again this round."""
"TIE/v1":
text: """<span class="card-restriction">TIE Advanced Prototype only.</span>%LINEBREAK%After you acquire a target lock, you may perform a free evade action."""
"PI:NAME:<NAME>END_PI":
text: """<span class="card-restriction">G-1A starfighter only.</span>%LINEBREAK%Your upgrade bar gains the %BARRELROLL% Upgrade icon.%LINEBREAK%You <strong>must</strong> equip 1 "Tractor Beam" Upgrade card (paying its squad point cost as normal)."""
"Punishing One":
text: """<span class="card-restriction">JumpMaster 5000 only.</span>%LINEBREAK%Increase your primary weapon value by 1."""
exportObj.setupCardData basic_cards, pilot_translations, upgrade_translations, modification_translations, title_translations
|
[
{
"context": ".tokenizeLine(\"@name '@name' @name's @name. @name, (@name) [@name]\")\n expect(tokens[0]).toEqual value: \"",
"end": 33466,
"score": 0.7798135280609131,
"start": 33460,
"tag": "USERNAME",
"value": "(@name"
},
{
"context": "]\n\n {tokens} = grammar.tokenizeLine(\"... | spec/gfm-spec.coffee | ssfrr/language-gfm | 0 | describe "GitHub Flavored Markdown grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-gfm")
runs ->
grammar = atom.grammars.grammarForScopeName("source.gfm")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.gfm"
it "tokenizes spaces", ->
{tokens} = grammar.tokenizeLine(" ")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
it "tokenizes horizontal rules", ->
{tokens} = grammar.tokenizeLine("***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("---")
expect(tokens[0]).toEqual value: "---", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "comment.hr.gfm"]
it "tokenizes escaped characters", ->
{tokens} = grammar.tokenizeLine("\\*")
expect(tokens[0]).toEqual value: "\\*", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\\\")
expect(tokens[0]).toEqual value: "\\\\", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\abc")
expect(tokens[0]).toEqual value: "\\a", scopes: ["source.gfm", "constant.character.escape.gfm"]
expect(tokens[1]).toEqual value: "bc", scopes: ["source.gfm"]
it "tokenizes ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("this is ***bold italic*** text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ***bold\nitalic***!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes ___bold italic___ text", ->
{tokens} = grammar.tokenizeLine("this is ___bold italic___ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ___bold\nitalic___!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes **bold** text", ->
{tokens} = grammar.tokenizeLine("**bold**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is **bo\nld**!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(firstLineTokens[2]).toEqual value: "bo", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[0]).toEqual value: "ld", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not**bold**")
expect(tokens[0]).toEqual value: "not**bold**", scopes: ["source.gfm"]
it "tokenizes __bold__ text", ->
{tokens} = grammar.tokenizeLine("____")
expect(tokens[0]).toEqual value: "____", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("__bold__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is __bo\nld__!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(firstLineTokens[2]).toEqual value: "bo", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[0]).toEqual value: "ld", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[1]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not__bold__")
expect(tokens[0]).toEqual value: "not__bold__", scopes: ["source.gfm"]
it "tokenizes *italic* text", ->
{tokens} = grammar.tokenizeLine("**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is *italic* text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not*italic*")
expect(tokens[0]).toEqual value: "not*italic*", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* not italic")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "not italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is *ita\nlic*!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "ita", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "lic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes _italic_ text", ->
{tokens} = grammar.tokenizeLine("__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is _italic_ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not_italic_")
expect(tokens[0]).toEqual value: "not_italic_", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not x^{a}_m y^{b}_n italic")
expect(tokens[0]).toEqual value: "not x^{a}_m y^{b}_n italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is _ita\nlic_!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "ita", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "lic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes ~~strike~~ text", ->
{tokens} = grammar.tokenizeLine("~~strike~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "strike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[2]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ~~str\nike~~!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(firstLineTokens[2]).toEqual value: "str", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[0]).toEqual value: "ike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not~~strike~~")
expect(tokens[0]).toEqual value: "not~~strike~~", scopes: ["source.gfm"]
it "tokenizes headings", ->
{tokens} = grammar.tokenizeLine("# Heading 1")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 1", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
{tokens} = grammar.tokenizeLine("## Heading 2")
expect(tokens[0]).toEqual value: "##", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 2", scopes: ["source.gfm", "markup.heading.heading-2.gfm"]
{tokens} = grammar.tokenizeLine("### Heading 3")
expect(tokens[0]).toEqual value: "###", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 3", scopes: ["source.gfm", "markup.heading.heading-3.gfm"]
{tokens} = grammar.tokenizeLine("#### Heading 4")
expect(tokens[0]).toEqual value: "####", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 4", scopes: ["source.gfm", "markup.heading.heading-4.gfm"]
{tokens} = grammar.tokenizeLine("##### Heading 5")
expect(tokens[0]).toEqual value: "#####", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 5", scopes: ["source.gfm", "markup.heading.heading-5.gfm"]
{tokens} = grammar.tokenizeLine("###### Heading 6")
expect(tokens[0]).toEqual value: "######", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 6", scopes: ["source.gfm", "markup.heading.heading-6.gfm"]
it "tokenizes matches inside of headers", ->
{tokens} = grammar.tokenizeLine("# Heading :one:")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading ", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[4]).toEqual value: "one", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[5]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
it "tokenizes an :emoji:", ->
{tokens} = grammar.tokenizeLine("this is :no_good:")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[2]).toEqual value: "no_good", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
{tokens} = grammar.tokenizeLine("this is :no good:")
expect(tokens[0]).toEqual value: "this is :no good:", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("http://localhost:8080")
expect(tokens[0]).toEqual value: "http://localhost:8080", scopes: ["source.gfm"]
it "tokenizes a ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~", ruleStack)
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "doesn't tokenise ~`~ as a code block", ->
{tokens} = grammar.tokenizeLine("~`~")
expect(tokens[0]).toEqual value: '~', scopes: ['source.gfm']
expect(tokens[1]).toEqual value: '`', scopes: ['source.gfm', 'markup.raw.gfm']
expect(tokens[2]).toEqual value: '~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenises code-blocks with borders of differing lengths", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~\nfoo bar\n~~~~~~~")
expect(firstLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~~~~~\nfoo bar\n~~~")
expect(firstLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenizes a ``` code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenises a ``` code block with an unknown language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` myLanguage")
expect(tokens[0]).toEqual value: '``` myLanguage', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ['source.gfm', 'markup.code.other.gfm', 'source.embedded.mylanguage']
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: '```', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
it "tokenizes a ``` code block with a known language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
expect(tokens[0]).toEqual value: "``` bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
expect(tokens[0]).toEqual value: "```js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```JS ")
expect(tokens[0]).toEqual value: "```JS ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```r ")
expect(tokens[0]).toEqual value: "```r ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a Rmarkdown ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r}")
expect(tokens[0]).toEqual value: "```{r}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r,eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r,eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a Rmarkdown ``` code block with whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r }")
expect(tokens[0]).toEqual value: "```{r }", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{R } ")
expect(tokens[0]).toEqual value: "```{R } ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval = TRUE, cache = FALSE}")
expect(tokens[0]).toEqual value: "```{r eval = TRUE, cache = FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a ~~~ code block with a language", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
expect(tokens[0]).toEqual value: "~~~ bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
expect(tokens[0]).toEqual value: "~~~js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes a ``` code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes a ~~~ code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes inline `code` blocks", ->
{tokens} = grammar.tokenizeLine("`this` is `code`")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "this", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[3]).toEqual value: " is ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[5]).toEqual value: "code", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[6]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``a\\`b``")
expect(tokens[0]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "a\\`b", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
it "tokenizes [links](links)", ->
{tokens} = grammar.tokenizeLine("please click [this link](website)")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes reference [links][links]", ->
{tokens} = grammar.tokenizeLine("please click [this link][website]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes id-less reference [links][]", ->
{tokens} = grammar.tokenizeLine("please click [this link][]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [link]: footers", ->
{tokens} = grammar.tokenizeLine("[aLink]: http://website")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "link", "punctuation.separator.key-value.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "link"]
expect(tokens[5]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
it "tokenizes [link]: <footers>", ->
{tokens} = grammar.tokenizeLine("[aLink]: <http://website>")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ": <", scopes: ["source.gfm", "link"]
expect(tokens[4]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[5]).toEqual value: ">", scopes: ["source.gfm", "link"]
it "tokenizes [](links)", ->
{tokens} = grammar.tokenizeLine("[](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [][links]", ->
{tokens} = grammar.tokenizeLine("[][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]](links)", ->
{tokens} = grammar.tokenizeLine("[![title][image]](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]][links]", ->
{tokens} = grammar.tokenizeLine("[![title][image]][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes mentions", ->
{tokens} = grammar.tokenizeLine("sentence with no space before@name ")
expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[17]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[18]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[19]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[20]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"@name"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after")
expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* @name at the start of an unordered list")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name at the start of a line")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("any email like you@domain.com shouldn't mistakenly be matched as a mention")
expect(tokens[0]).toEqual value: "any email like you@domain.com shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person's")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person;")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"]
it "tokenizes issue numbers", ->
{tokens} = grammar.tokenizeLine("sentence with no space before#12 ")
expect(tokens[0]).toEqual value: "sentence with no space before#12 ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #101 '#101' #101's #101. #101, (#101) [#101]")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[5]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[6]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[7]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[8]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[9]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[10]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[11]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[12]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[13]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[14]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[15]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[16]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[17]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[18]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[19]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[20]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[21]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"#101"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123i and a character after")
expect(tokens[0]).toEqual value: "sentence with a space before #123i and a character after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123 that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #123's")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: "'s", scopes: ["source.gfm"]
it "tokenizes unordered lists", ->
{tokens} = grammar.tokenizeLine("*Item 1")
expect(tokens[0]).not.toEqual value: "*Item 1", scopes: ["source.gfm", "variable.unordered.list.gfm"]
{tokens} = grammar.tokenizeLine(" * Item 1")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 1", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" + Item 2")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "+", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 2", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" - Item 3")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "-", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 3", scopes: ["source.gfm"]
it "tokenizes ordered lists", ->
{tokens} = grammar.tokenizeLine("1.First Item")
expect(tokens[0]).toEqual value: "1.First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 1. First Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "1.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 10. Tenth Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "10.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Tenth Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 111. Hundred and eleventh item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "111.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Hundred and eleventh item", scopes: ["source.gfm"]
it "tokenizes > quoted text", ->
{tokens} = grammar.tokenizeLine("> Quotation :+1:")
expect(tokens[0]).toEqual value: ">", scopes: ["source.gfm", "comment.quote.gfm", "support.quote.gfm"]
expect(tokens[1]).toEqual value: " Quotation ", scopes: ["source.gfm", "comment.quote.gfm"]
expect(tokens[2]).toEqual value: ":", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[3]).toEqual value: "+1", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[4]).toEqual value: ":", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
it "tokenizes HTML entities", ->
{tokens} = grammar.tokenizeLine("™ ™ &a1; ³")
expect(tokens[0]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "trade", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[3]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[5]).toEqual value: "#8482", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[6]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[7]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[8]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[9]).toEqual value: "a1", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[10]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[13]).toEqual value: "#xb3", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[14]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes HTML entities in *italic* text", ->
{tokens} = grammar.tokenizeLine("*™ ™ ³*")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
{tokens} = grammar.tokenizeLine("_™ ™ ³_")
expect(tokens[0]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
it "tokenizes HTML entities in **bold** text", ->
{tokens} = grammar.tokenizeLine("**™ ™ ³**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
{tokens} = grammar.tokenizeLine("__™ ™ ³__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
it "tokenizes HTML entities in ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("***™ ™ ³***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
{tokens} = grammar.tokenizeLine("___™ ™ ³___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
it "tokenizes HTML entities in strikethrough text", ->
{tokens} = grammar.tokenizeLine("~~™ ™ ³~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
it "tokenizes HTML comments", ->
{tokens} = grammar.tokenizeLine("<!-- a comment -->")
expect(tokens[0]).toEqual value: "<!--", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
expect(tokens[1]).toEqual value: " a comment ", scopes: ["source.gfm", "comment.block.gfm"]
expect(tokens[2]).toEqual value: "-->", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
it "tokenizes YAML front matter", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines """
---
front: matter
---
"""
expect(firstLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
expect(secondLineTokens[0]).toEqual value: "front: matter", scopes: ["source.gfm", "front-matter.yaml.gfm"]
expect(thirdLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
it "tokenizes linebreaks", ->
{tokens} = grammar.tokenizeLine("line ")
expect(tokens[0]).toEqual value: "line", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "linebreak.gfm"]
it "tokenizes tables", ->
[headerTokens, alignTokens, contentTokens] = grammar.tokenizeLines """
| Column 1 | Column 2 |
|:----------|:---------:|
| Content 1 | Content 2 |
"""
# Header line
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Alignment line
expect(alignTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(alignTokens[1]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[2]).toEqual value: "----------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[3]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(alignTokens[4]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[5]).toEqual value: "---------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[6]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[7]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Content line
expect(contentTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(contentTokens[1]).toEqual value: " Content 1 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(contentTokens[3]).toEqual value: " Content 2 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
[headerTokens, emptyLineTokens, headingTokens] = grammar.tokenizeLines """
| Column 1 | Column 2\t
# Heading
"""
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "\t", scopes: ["source.gfm", "table.gfm"]
expect(headingTokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(headingTokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(headingTokens[2]).toEqual value: "Heading", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
it "tokenizes criticmarkup", ->
[addToken, delToken, hlToken, subToken] = grammar.tokenizeLines """
Add{++ some text++}
Delete{-- some text--}
Highlight {==some text==}{>>with comment<<}
Replace {~~this~>by that~~}
"""
# Addition
expect(addToken[0]).toEqual value: "Add", scopes: ["source.gfm"]
expect(addToken[1]).toEqual value: "{++", scopes: ["source.gfm", "critic.gfm.addition", "critic.gfm.addition.marker"]
expect(addToken[2]).toEqual value: " some text", scopes: ["source.gfm", "critic.gfm.addition"]
expect(addToken[3]).toEqual value: "++}", scopes: ["source.gfm", "critic.gfm.addition", "critic.gfm.addition.marker"]
# Deletion
expect(delToken[0]).toEqual value: "Delete", scopes: ["source.gfm"]
expect(delToken[1]).toEqual value: "{--", scopes: ["source.gfm", "critic.gfm.deletion", "critic.gfm.deletion.marker"]
expect(delToken[2]).toEqual value: " some text", scopes: ["source.gfm", "critic.gfm.deletion"]
expect(delToken[3]).toEqual value: "--}", scopes: ["source.gfm", "critic.gfm.deletion", "critic.gfm.deletion.marker"]
# Comment and highlight
expect(hlToken[0]).toEqual value: "Highlight ", scopes: ["source.gfm"]
expect(hlToken[1]).toEqual value: "{==", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[2]).toEqual value: "some text", scopes: ["source.gfm", "critic.gfm.highlight"]
expect(hlToken[3]).toEqual value: "==}", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[4]).toEqual value: "{>>", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
expect(hlToken[5]).toEqual value: "with comment", scopes: ["source.gfm", "critic.gfm.comment"]
expect(hlToken[6]).toEqual value: "<<}", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
# Replace
expect(subToken[0]).toEqual value: "Replace ", scopes: ["source.gfm"]
expect(subToken[1]).toEqual value: "{~~", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.marker"]
expect(subToken[2]).toEqual value: "this", scopes: ["source.gfm", "critic.gfm.substitution"]
expect(subToken[3]).toEqual value: "~>", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.operator"]
expect(subToken[4]).toEqual value: "by that", scopes: ["source.gfm", "critic.gfm.substitution"]
expect(subToken[5]).toEqual value: "~~}", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.marker"]
| 53081 | describe "GitHub Flavored Markdown grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-gfm")
runs ->
grammar = atom.grammars.grammarForScopeName("source.gfm")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.gfm"
it "tokenizes spaces", ->
{tokens} = grammar.tokenizeLine(" ")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
it "tokenizes horizontal rules", ->
{tokens} = grammar.tokenizeLine("***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("---")
expect(tokens[0]).toEqual value: "---", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "comment.hr.gfm"]
it "tokenizes escaped characters", ->
{tokens} = grammar.tokenizeLine("\\*")
expect(tokens[0]).toEqual value: "\\*", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\\\")
expect(tokens[0]).toEqual value: "\\\\", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\abc")
expect(tokens[0]).toEqual value: "\\a", scopes: ["source.gfm", "constant.character.escape.gfm"]
expect(tokens[1]).toEqual value: "bc", scopes: ["source.gfm"]
it "tokenizes ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("this is ***bold italic*** text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ***bold\nitalic***!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes ___bold italic___ text", ->
{tokens} = grammar.tokenizeLine("this is ___bold italic___ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ___bold\nitalic___!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes **bold** text", ->
{tokens} = grammar.tokenizeLine("**bold**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is **bo\nld**!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(firstLineTokens[2]).toEqual value: "bo", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[0]).toEqual value: "ld", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not**bold**")
expect(tokens[0]).toEqual value: "not**bold**", scopes: ["source.gfm"]
it "tokenizes __bold__ text", ->
{tokens} = grammar.tokenizeLine("____")
expect(tokens[0]).toEqual value: "____", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("__bold__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is __bo\nld__!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(firstLineTokens[2]).toEqual value: "bo", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[0]).toEqual value: "ld", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[1]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not__bold__")
expect(tokens[0]).toEqual value: "not__bold__", scopes: ["source.gfm"]
it "tokenizes *italic* text", ->
{tokens} = grammar.tokenizeLine("**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is *italic* text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not*italic*")
expect(tokens[0]).toEqual value: "not*italic*", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* not italic")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "not italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is *ita\nlic*!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "ita", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "lic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes _italic_ text", ->
{tokens} = grammar.tokenizeLine("__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is _italic_ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not_italic_")
expect(tokens[0]).toEqual value: "not_italic_", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not x^{a}_m y^{b}_n italic")
expect(tokens[0]).toEqual value: "not x^{a}_m y^{b}_n italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is _ita\nlic_!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "ita", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "lic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes ~~strike~~ text", ->
{tokens} = grammar.tokenizeLine("~~strike~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "strike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[2]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ~~str\nike~~!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(firstLineTokens[2]).toEqual value: "str", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[0]).toEqual value: "ike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not~~strike~~")
expect(tokens[0]).toEqual value: "not~~strike~~", scopes: ["source.gfm"]
it "tokenizes headings", ->
{tokens} = grammar.tokenizeLine("# Heading 1")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 1", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
{tokens} = grammar.tokenizeLine("## Heading 2")
expect(tokens[0]).toEqual value: "##", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 2", scopes: ["source.gfm", "markup.heading.heading-2.gfm"]
{tokens} = grammar.tokenizeLine("### Heading 3")
expect(tokens[0]).toEqual value: "###", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 3", scopes: ["source.gfm", "markup.heading.heading-3.gfm"]
{tokens} = grammar.tokenizeLine("#### Heading 4")
expect(tokens[0]).toEqual value: "####", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 4", scopes: ["source.gfm", "markup.heading.heading-4.gfm"]
{tokens} = grammar.tokenizeLine("##### Heading 5")
expect(tokens[0]).toEqual value: "#####", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 5", scopes: ["source.gfm", "markup.heading.heading-5.gfm"]
{tokens} = grammar.tokenizeLine("###### Heading 6")
expect(tokens[0]).toEqual value: "######", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 6", scopes: ["source.gfm", "markup.heading.heading-6.gfm"]
it "tokenizes matches inside of headers", ->
{tokens} = grammar.tokenizeLine("# Heading :one:")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading ", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[4]).toEqual value: "one", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[5]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
it "tokenizes an :emoji:", ->
{tokens} = grammar.tokenizeLine("this is :no_good:")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[2]).toEqual value: "no_good", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
{tokens} = grammar.tokenizeLine("this is :no good:")
expect(tokens[0]).toEqual value: "this is :no good:", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("http://localhost:8080")
expect(tokens[0]).toEqual value: "http://localhost:8080", scopes: ["source.gfm"]
it "tokenizes a ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~", ruleStack)
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "doesn't tokenise ~`~ as a code block", ->
{tokens} = grammar.tokenizeLine("~`~")
expect(tokens[0]).toEqual value: '~', scopes: ['source.gfm']
expect(tokens[1]).toEqual value: '`', scopes: ['source.gfm', 'markup.raw.gfm']
expect(tokens[2]).toEqual value: '~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenises code-blocks with borders of differing lengths", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~\nfoo bar\n~~~~~~~")
expect(firstLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~~~~~\nfoo bar\n~~~")
expect(firstLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenizes a ``` code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenises a ``` code block with an unknown language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` myLanguage")
expect(tokens[0]).toEqual value: '``` myLanguage', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ['source.gfm', 'markup.code.other.gfm', 'source.embedded.mylanguage']
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: '```', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
it "tokenizes a ``` code block with a known language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
expect(tokens[0]).toEqual value: "``` bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
expect(tokens[0]).toEqual value: "```js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```JS ")
expect(tokens[0]).toEqual value: "```JS ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```r ")
expect(tokens[0]).toEqual value: "```r ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a Rmarkdown ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r}")
expect(tokens[0]).toEqual value: "```{r}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r,eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r,eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a Rmarkdown ``` code block with whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r }")
expect(tokens[0]).toEqual value: "```{r }", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{R } ")
expect(tokens[0]).toEqual value: "```{R } ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval = TRUE, cache = FALSE}")
expect(tokens[0]).toEqual value: "```{r eval = TRUE, cache = FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a ~~~ code block with a language", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
expect(tokens[0]).toEqual value: "~~~ bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
expect(tokens[0]).toEqual value: "~~~js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes a ``` code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes a ~~~ code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes inline `code` blocks", ->
{tokens} = grammar.tokenizeLine("`this` is `code`")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "this", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[3]).toEqual value: " is ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[5]).toEqual value: "code", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[6]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``a\\`b``")
expect(tokens[0]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "a\\`b", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
it "tokenizes [links](links)", ->
{tokens} = grammar.tokenizeLine("please click [this link](website)")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes reference [links][links]", ->
{tokens} = grammar.tokenizeLine("please click [this link][website]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes id-less reference [links][]", ->
{tokens} = grammar.tokenizeLine("please click [this link][]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [link]: footers", ->
{tokens} = grammar.tokenizeLine("[aLink]: http://website")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "link", "punctuation.separator.key-value.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "link"]
expect(tokens[5]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
it "tokenizes [link]: <footers>", ->
{tokens} = grammar.tokenizeLine("[aLink]: <http://website>")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ": <", scopes: ["source.gfm", "link"]
expect(tokens[4]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[5]).toEqual value: ">", scopes: ["source.gfm", "link"]
it "tokenizes [](links)", ->
{tokens} = grammar.tokenizeLine("[](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [][links]", ->
{tokens} = grammar.tokenizeLine("[][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]](links)", ->
{tokens} = grammar.tokenizeLine("[![title][image]](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]][links]", ->
{tokens} = grammar.tokenizeLine("[![title][image]][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes mentions", ->
{tokens} = grammar.tokenizeLine("sentence with no space before@name ")
expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[17]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[18]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[19]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[20]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"@name"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after")
expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* @name at the start of an unordered list")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name at the start of a line")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("any email like <EMAIL> shouldn't mistakenly be matched as a mention")
expect(tokens[0]).toEqual value: "any email like <EMAIL> shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person's")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person;")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"]
it "tokenizes issue numbers", ->
{tokens} = grammar.tokenizeLine("sentence with no space before#12 ")
expect(tokens[0]).toEqual value: "sentence with no space before#12 ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #101 '#101' #101's #101. #101, (#101) [#101]")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[5]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[6]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[7]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[8]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[9]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[10]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[11]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[12]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[13]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[14]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[15]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[16]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[17]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[18]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[19]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[20]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[21]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"#101"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123i and a character after")
expect(tokens[0]).toEqual value: "sentence with a space before #123i and a character after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123 that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #123's")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: "'s", scopes: ["source.gfm"]
it "tokenizes unordered lists", ->
{tokens} = grammar.tokenizeLine("*Item 1")
expect(tokens[0]).not.toEqual value: "*Item 1", scopes: ["source.gfm", "variable.unordered.list.gfm"]
{tokens} = grammar.tokenizeLine(" * Item 1")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 1", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" + Item 2")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "+", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 2", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" - Item 3")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "-", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 3", scopes: ["source.gfm"]
it "tokenizes ordered lists", ->
{tokens} = grammar.tokenizeLine("1.First Item")
expect(tokens[0]).toEqual value: "1.First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 1. First Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "1.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 10. Tenth Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "10.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Tenth Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 111. Hundred and eleventh item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "111.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Hundred and eleventh item", scopes: ["source.gfm"]
it "tokenizes > quoted text", ->
{tokens} = grammar.tokenizeLine("> Quotation :+1:")
expect(tokens[0]).toEqual value: ">", scopes: ["source.gfm", "comment.quote.gfm", "support.quote.gfm"]
expect(tokens[1]).toEqual value: " Quotation ", scopes: ["source.gfm", "comment.quote.gfm"]
expect(tokens[2]).toEqual value: ":", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[3]).toEqual value: "+1", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[4]).toEqual value: ":", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
it "tokenizes HTML entities", ->
{tokens} = grammar.tokenizeLine("™ ™ &a1; ³")
expect(tokens[0]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "trade", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[3]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[5]).toEqual value: "#8482", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[6]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[7]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[8]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[9]).toEqual value: "a1", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[10]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[13]).toEqual value: "#xb3", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[14]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes HTML entities in *italic* text", ->
{tokens} = grammar.tokenizeLine("*™ ™ ³*")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
{tokens} = grammar.tokenizeLine("_™ ™ ³_")
expect(tokens[0]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
it "tokenizes HTML entities in **bold** text", ->
{tokens} = grammar.tokenizeLine("**™ ™ ³**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
{tokens} = grammar.tokenizeLine("__™ ™ ³__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
it "tokenizes HTML entities in ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("***™ ™ ³***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
{tokens} = grammar.tokenizeLine("___™ ™ ³___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
it "tokenizes HTML entities in strikethrough text", ->
{tokens} = grammar.tokenizeLine("~~™ ™ ³~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
it "tokenizes HTML comments", ->
{tokens} = grammar.tokenizeLine("<!-- a comment -->")
expect(tokens[0]).toEqual value: "<!--", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
expect(tokens[1]).toEqual value: " a comment ", scopes: ["source.gfm", "comment.block.gfm"]
expect(tokens[2]).toEqual value: "-->", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
it "tokenizes YAML front matter", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines """
---
front: matter
---
"""
expect(firstLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
expect(secondLineTokens[0]).toEqual value: "front: matter", scopes: ["source.gfm", "front-matter.yaml.gfm"]
expect(thirdLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
it "tokenizes linebreaks", ->
{tokens} = grammar.tokenizeLine("line ")
expect(tokens[0]).toEqual value: "line", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "linebreak.gfm"]
it "tokenizes tables", ->
[headerTokens, alignTokens, contentTokens] = grammar.tokenizeLines """
| Column 1 | Column 2 |
|:----------|:---------:|
| Content 1 | Content 2 |
"""
# Header line
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Alignment line
expect(alignTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(alignTokens[1]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[2]).toEqual value: "----------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[3]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(alignTokens[4]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[5]).toEqual value: "---------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[6]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[7]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Content line
expect(contentTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(contentTokens[1]).toEqual value: " Content 1 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(contentTokens[3]).toEqual value: " Content 2 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
[headerTokens, emptyLineTokens, headingTokens] = grammar.tokenizeLines """
| Column 1 | Column 2\t
# Heading
"""
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "\t", scopes: ["source.gfm", "table.gfm"]
expect(headingTokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(headingTokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(headingTokens[2]).toEqual value: "Heading", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
it "tokenizes criticmarkup", ->
[addToken, delToken, hlToken, subToken] = grammar.tokenizeLines """
Add{++ some text++}
Delete{-- some text--}
Highlight {==some text==}{>>with comment<<}
Replace {~~this~>by that~~}
"""
# Addition
expect(addToken[0]).toEqual value: "Add", scopes: ["source.gfm"]
expect(addToken[1]).toEqual value: "{++", scopes: ["source.gfm", "critic.gfm.addition", "critic.gfm.addition.marker"]
expect(addToken[2]).toEqual value: " some text", scopes: ["source.gfm", "critic.gfm.addition"]
expect(addToken[3]).toEqual value: "++}", scopes: ["source.gfm", "critic.gfm.addition", "critic.gfm.addition.marker"]
# Deletion
expect(delToken[0]).toEqual value: "Delete", scopes: ["source.gfm"]
expect(delToken[1]).toEqual value: "{--", scopes: ["source.gfm", "critic.gfm.deletion", "critic.gfm.deletion.marker"]
expect(delToken[2]).toEqual value: " some text", scopes: ["source.gfm", "critic.gfm.deletion"]
expect(delToken[3]).toEqual value: "--}", scopes: ["source.gfm", "critic.gfm.deletion", "critic.gfm.deletion.marker"]
# Comment and highlight
expect(hlToken[0]).toEqual value: "Highlight ", scopes: ["source.gfm"]
expect(hlToken[1]).toEqual value: "{==", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[2]).toEqual value: "some text", scopes: ["source.gfm", "critic.gfm.highlight"]
expect(hlToken[3]).toEqual value: "==}", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[4]).toEqual value: "{>>", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
expect(hlToken[5]).toEqual value: "with comment", scopes: ["source.gfm", "critic.gfm.comment"]
expect(hlToken[6]).toEqual value: "<<}", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
# Replace
expect(subToken[0]).toEqual value: "Replace ", scopes: ["source.gfm"]
expect(subToken[1]).toEqual value: "{~~", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.marker"]
expect(subToken[2]).toEqual value: "this", scopes: ["source.gfm", "critic.gfm.substitution"]
expect(subToken[3]).toEqual value: "~>", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.operator"]
expect(subToken[4]).toEqual value: "by that", scopes: ["source.gfm", "critic.gfm.substitution"]
expect(subToken[5]).toEqual value: "~~}", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.marker"]
| true | describe "GitHub Flavored Markdown grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-gfm")
runs ->
grammar = atom.grammars.grammarForScopeName("source.gfm")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.gfm"
it "tokenizes spaces", ->
{tokens} = grammar.tokenizeLine(" ")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
it "tokenizes horizontal rules", ->
{tokens} = grammar.tokenizeLine("***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("---")
expect(tokens[0]).toEqual value: "---", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "comment.hr.gfm"]
it "tokenizes escaped characters", ->
{tokens} = grammar.tokenizeLine("\\*")
expect(tokens[0]).toEqual value: "\\*", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\\\")
expect(tokens[0]).toEqual value: "\\\\", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\abc")
expect(tokens[0]).toEqual value: "\\a", scopes: ["source.gfm", "constant.character.escape.gfm"]
expect(tokens[1]).toEqual value: "bc", scopes: ["source.gfm"]
it "tokenizes ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("this is ***bold italic*** text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ***bold\nitalic***!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes ___bold italic___ text", ->
{tokens} = grammar.tokenizeLine("this is ___bold italic___ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ___bold\nitalic___!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes **bold** text", ->
{tokens} = grammar.tokenizeLine("**bold**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is **bo\nld**!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(firstLineTokens[2]).toEqual value: "bo", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[0]).toEqual value: "ld", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not**bold**")
expect(tokens[0]).toEqual value: "not**bold**", scopes: ["source.gfm"]
it "tokenizes __bold__ text", ->
{tokens} = grammar.tokenizeLine("____")
expect(tokens[0]).toEqual value: "____", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("__bold__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is __bo\nld__!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(firstLineTokens[2]).toEqual value: "bo", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[0]).toEqual value: "ld", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[1]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not__bold__")
expect(tokens[0]).toEqual value: "not__bold__", scopes: ["source.gfm"]
it "tokenizes *italic* text", ->
{tokens} = grammar.tokenizeLine("**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is *italic* text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not*italic*")
expect(tokens[0]).toEqual value: "not*italic*", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* not italic")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "not italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is *ita\nlic*!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "ita", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "lic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes _italic_ text", ->
{tokens} = grammar.tokenizeLine("__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is _italic_ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not_italic_")
expect(tokens[0]).toEqual value: "not_italic_", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not x^{a}_m y^{b}_n italic")
expect(tokens[0]).toEqual value: "not x^{a}_m y^{b}_n italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is _ita\nlic_!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "ita", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "lic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes ~~strike~~ text", ->
{tokens} = grammar.tokenizeLine("~~strike~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "strike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[2]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ~~str\nike~~!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(firstLineTokens[2]).toEqual value: "str", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[0]).toEqual value: "ike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not~~strike~~")
expect(tokens[0]).toEqual value: "not~~strike~~", scopes: ["source.gfm"]
it "tokenizes headings", ->
{tokens} = grammar.tokenizeLine("# Heading 1")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 1", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
{tokens} = grammar.tokenizeLine("## Heading 2")
expect(tokens[0]).toEqual value: "##", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 2", scopes: ["source.gfm", "markup.heading.heading-2.gfm"]
{tokens} = grammar.tokenizeLine("### Heading 3")
expect(tokens[0]).toEqual value: "###", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 3", scopes: ["source.gfm", "markup.heading.heading-3.gfm"]
{tokens} = grammar.tokenizeLine("#### Heading 4")
expect(tokens[0]).toEqual value: "####", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 4", scopes: ["source.gfm", "markup.heading.heading-4.gfm"]
{tokens} = grammar.tokenizeLine("##### Heading 5")
expect(tokens[0]).toEqual value: "#####", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 5", scopes: ["source.gfm", "markup.heading.heading-5.gfm"]
{tokens} = grammar.tokenizeLine("###### Heading 6")
expect(tokens[0]).toEqual value: "######", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 6", scopes: ["source.gfm", "markup.heading.heading-6.gfm"]
it "tokenizes matches inside of headers", ->
{tokens} = grammar.tokenizeLine("# Heading :one:")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading ", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[4]).toEqual value: "one", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[5]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
it "tokenizes an :emoji:", ->
{tokens} = grammar.tokenizeLine("this is :no_good:")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[2]).toEqual value: "no_good", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
{tokens} = grammar.tokenizeLine("this is :no good:")
expect(tokens[0]).toEqual value: "this is :no good:", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("http://localhost:8080")
expect(tokens[0]).toEqual value: "http://localhost:8080", scopes: ["source.gfm"]
it "tokenizes a ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~", ruleStack)
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "doesn't tokenise ~`~ as a code block", ->
{tokens} = grammar.tokenizeLine("~`~")
expect(tokens[0]).toEqual value: '~', scopes: ['source.gfm']
expect(tokens[1]).toEqual value: '`', scopes: ['source.gfm', 'markup.raw.gfm']
expect(tokens[2]).toEqual value: '~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenises code-blocks with borders of differing lengths", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~\nfoo bar\n~~~~~~~")
expect(firstLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~~~~~\nfoo bar\n~~~")
expect(firstLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenizes a ``` code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenises a ``` code block with an unknown language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` myLanguage")
expect(tokens[0]).toEqual value: '``` myLanguage', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ['source.gfm', 'markup.code.other.gfm', 'source.embedded.mylanguage']
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: '```', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
it "tokenizes a ``` code block with a known language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
expect(tokens[0]).toEqual value: "``` bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
expect(tokens[0]).toEqual value: "```js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```JS ")
expect(tokens[0]).toEqual value: "```JS ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```r ")
expect(tokens[0]).toEqual value: "```r ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a Rmarkdown ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r}")
expect(tokens[0]).toEqual value: "```{r}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r,eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r,eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a Rmarkdown ``` code block with whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r }")
expect(tokens[0]).toEqual value: "```{r }", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{R } ")
expect(tokens[0]).toEqual value: "```{R } ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval = TRUE, cache = FALSE}")
expect(tokens[0]).toEqual value: "```{r eval = TRUE, cache = FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a ~~~ code block with a language", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
expect(tokens[0]).toEqual value: "~~~ bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
expect(tokens[0]).toEqual value: "~~~js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes a ``` code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes a ~~~ code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes inline `code` blocks", ->
{tokens} = grammar.tokenizeLine("`this` is `code`")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "this", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[3]).toEqual value: " is ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[5]).toEqual value: "code", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[6]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``a\\`b``")
expect(tokens[0]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "a\\`b", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
it "tokenizes [links](links)", ->
{tokens} = grammar.tokenizeLine("please click [this link](website)")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes reference [links][links]", ->
{tokens} = grammar.tokenizeLine("please click [this link][website]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes id-less reference [links][]", ->
{tokens} = grammar.tokenizeLine("please click [this link][]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [link]: footers", ->
{tokens} = grammar.tokenizeLine("[aLink]: http://website")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "link", "punctuation.separator.key-value.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "link"]
expect(tokens[5]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
it "tokenizes [link]: <footers>", ->
{tokens} = grammar.tokenizeLine("[aLink]: <http://website>")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ": <", scopes: ["source.gfm", "link"]
expect(tokens[4]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[5]).toEqual value: ">", scopes: ["source.gfm", "link"]
it "tokenizes [](links)", ->
{tokens} = grammar.tokenizeLine("[](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [][links]", ->
{tokens} = grammar.tokenizeLine("[][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]](links)", ->
{tokens} = grammar.tokenizeLine("[![title][image]](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]][links]", ->
{tokens} = grammar.tokenizeLine("[![title][image]][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes mentions", ->
{tokens} = grammar.tokenizeLine("sentence with no space before@name ")
expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[17]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[18]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[19]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[20]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"@name"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after")
expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* @name at the start of an unordered list")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name at the start of a line")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("any email like PI:EMAIL:<EMAIL>END_PI shouldn't mistakenly be matched as a mention")
expect(tokens[0]).toEqual value: "any email like PI:EMAIL:<EMAIL>END_PI shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person's")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person;")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"]
it "tokenizes issue numbers", ->
{tokens} = grammar.tokenizeLine("sentence with no space before#12 ")
expect(tokens[0]).toEqual value: "sentence with no space before#12 ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #101 '#101' #101's #101. #101, (#101) [#101]")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[5]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[6]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[7]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[8]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[9]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[10]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[11]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[12]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[13]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[14]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[15]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[16]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[17]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[18]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[19]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[20]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[21]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"#101"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123i and a character after")
expect(tokens[0]).toEqual value: "sentence with a space before #123i and a character after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123 that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #123's")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: "'s", scopes: ["source.gfm"]
it "tokenizes unordered lists", ->
{tokens} = grammar.tokenizeLine("*Item 1")
expect(tokens[0]).not.toEqual value: "*Item 1", scopes: ["source.gfm", "variable.unordered.list.gfm"]
{tokens} = grammar.tokenizeLine(" * Item 1")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 1", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" + Item 2")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "+", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 2", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" - Item 3")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "-", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 3", scopes: ["source.gfm"]
it "tokenizes ordered lists", ->
{tokens} = grammar.tokenizeLine("1.First Item")
expect(tokens[0]).toEqual value: "1.First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 1. First Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "1.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 10. Tenth Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "10.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Tenth Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 111. Hundred and eleventh item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "111.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Hundred and eleventh item", scopes: ["source.gfm"]
it "tokenizes > quoted text", ->
{tokens} = grammar.tokenizeLine("> Quotation :+1:")
expect(tokens[0]).toEqual value: ">", scopes: ["source.gfm", "comment.quote.gfm", "support.quote.gfm"]
expect(tokens[1]).toEqual value: " Quotation ", scopes: ["source.gfm", "comment.quote.gfm"]
expect(tokens[2]).toEqual value: ":", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[3]).toEqual value: "+1", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[4]).toEqual value: ":", scopes: ["source.gfm", "comment.quote.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
it "tokenizes HTML entities", ->
{tokens} = grammar.tokenizeLine("™ ™ &a1; ³")
expect(tokens[0]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "trade", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[3]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[5]).toEqual value: "#8482", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[6]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[7]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[8]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[9]).toEqual value: "a1", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[10]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[13]).toEqual value: "#xb3", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[14]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes HTML entities in *italic* text", ->
{tokens} = grammar.tokenizeLine("*™ ™ ³*")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "*", scopes: ["source.gfm", "markup.italic.gfm"]
{tokens} = grammar.tokenizeLine("_™ ™ ³_")
expect(tokens[0]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "_", scopes: ["source.gfm", "markup.italic.gfm"]
it "tokenizes HTML entities in **bold** text", ->
{tokens} = grammar.tokenizeLine("**™ ™ ³**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm"]
{tokens} = grammar.tokenizeLine("__™ ™ ³__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm"]
it "tokenizes HTML entities in ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("***™ ™ ³***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
{tokens} = grammar.tokenizeLine("___™ ™ ³___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
it "tokenizes HTML entities in strikethrough text", ->
{tokens} = grammar.tokenizeLine("~~™ ™ ³~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
it "tokenizes HTML comments", ->
{tokens} = grammar.tokenizeLine("<!-- a comment -->")
expect(tokens[0]).toEqual value: "<!--", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
expect(tokens[1]).toEqual value: " a comment ", scopes: ["source.gfm", "comment.block.gfm"]
expect(tokens[2]).toEqual value: "-->", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
it "tokenizes YAML front matter", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines """
---
front: matter
---
"""
expect(firstLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
expect(secondLineTokens[0]).toEqual value: "front: matter", scopes: ["source.gfm", "front-matter.yaml.gfm"]
expect(thirdLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
it "tokenizes linebreaks", ->
{tokens} = grammar.tokenizeLine("line ")
expect(tokens[0]).toEqual value: "line", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "linebreak.gfm"]
it "tokenizes tables", ->
[headerTokens, alignTokens, contentTokens] = grammar.tokenizeLines """
| Column 1 | Column 2 |
|:----------|:---------:|
| Content 1 | Content 2 |
"""
# Header line
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Alignment line
expect(alignTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(alignTokens[1]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[2]).toEqual value: "----------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[3]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(alignTokens[4]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[5]).toEqual value: "---------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[6]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[7]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Content line
expect(contentTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(contentTokens[1]).toEqual value: " Content 1 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(contentTokens[3]).toEqual value: " Content 2 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
[headerTokens, emptyLineTokens, headingTokens] = grammar.tokenizeLines """
| Column 1 | Column 2\t
# Heading
"""
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "\t", scopes: ["source.gfm", "table.gfm"]
expect(headingTokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(headingTokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(headingTokens[2]).toEqual value: "Heading", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
it "tokenizes criticmarkup", ->
[addToken, delToken, hlToken, subToken] = grammar.tokenizeLines """
Add{++ some text++}
Delete{-- some text--}
Highlight {==some text==}{>>with comment<<}
Replace {~~this~>by that~~}
"""
# Addition
expect(addToken[0]).toEqual value: "Add", scopes: ["source.gfm"]
expect(addToken[1]).toEqual value: "{++", scopes: ["source.gfm", "critic.gfm.addition", "critic.gfm.addition.marker"]
expect(addToken[2]).toEqual value: " some text", scopes: ["source.gfm", "critic.gfm.addition"]
expect(addToken[3]).toEqual value: "++}", scopes: ["source.gfm", "critic.gfm.addition", "critic.gfm.addition.marker"]
# Deletion
expect(delToken[0]).toEqual value: "Delete", scopes: ["source.gfm"]
expect(delToken[1]).toEqual value: "{--", scopes: ["source.gfm", "critic.gfm.deletion", "critic.gfm.deletion.marker"]
expect(delToken[2]).toEqual value: " some text", scopes: ["source.gfm", "critic.gfm.deletion"]
expect(delToken[3]).toEqual value: "--}", scopes: ["source.gfm", "critic.gfm.deletion", "critic.gfm.deletion.marker"]
# Comment and highlight
expect(hlToken[0]).toEqual value: "Highlight ", scopes: ["source.gfm"]
expect(hlToken[1]).toEqual value: "{==", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[2]).toEqual value: "some text", scopes: ["source.gfm", "critic.gfm.highlight"]
expect(hlToken[3]).toEqual value: "==}", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[4]).toEqual value: "{>>", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
expect(hlToken[5]).toEqual value: "with comment", scopes: ["source.gfm", "critic.gfm.comment"]
expect(hlToken[6]).toEqual value: "<<}", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
# Replace
expect(subToken[0]).toEqual value: "Replace ", scopes: ["source.gfm"]
expect(subToken[1]).toEqual value: "{~~", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.marker"]
expect(subToken[2]).toEqual value: "this", scopes: ["source.gfm", "critic.gfm.substitution"]
expect(subToken[3]).toEqual value: "~>", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.operator"]
expect(subToken[4]).toEqual value: "by that", scopes: ["source.gfm", "critic.gfm.substitution"]
expect(subToken[5]).toEqual value: "~~}", scopes: ["source.gfm", "critic.gfm.substitution", "critic.gfm.substitution.marker"]
|
[
{
"context": "is file is part of the ChinesePuzzle package.\n\n(c) Mathieu Ledru\n\nFor the full copyright and license information, ",
"end": 70,
"score": 0.9998481869697571,
"start": 57,
"tag": "NAME",
"value": "Mathieu Ledru"
}
] | Common/Bin/Data/coffee/GameStruct.coffee | matyo91/ChinesePuzzle | 1 | ###
This file is part of the ChinesePuzzle package.
(c) Mathieu Ledru
For the full copyright and license information, please view the LICENSE
file that was distributed with this source code.
###
cpz.GridCoord = cc.Class.extend(
i: 0
j: 0
ctor: ->
encode: ->
i: @i
j: @j
decode: (data) ->
@i = data['i']
@j = data['j']
@
)
cpz.GridCoord.decode = (data) ->
obj = new cpz.GridCoord()
obj.decode(data)
return obj
cpz.gc = (i, j) ->
coord = new cpz.GridCoord()
coord.i = i
coord.j = j
coord
cpz.MoveCoord = cc.Class.extend(
from: null
to: null
ctor: ->
encode: ->
from: @from.encode()
to: @to.encode()
decode: (data) ->
@from = cpz.GridCoord.decode(data['from'])
@to = cpz.GridCoord.decode(data['to'])
@
)
cpz.MoveCoord.decode = (data) ->
obj = new cpz.MoveCoord()
obj.decode(data)
return obj
cpz.mv = (from, to) ->
coord = new cpz.MoveCoord()
coord.from = from
coord.to = to
coord
| 145875 | ###
This file is part of the ChinesePuzzle package.
(c) <NAME>
For the full copyright and license information, please view the LICENSE
file that was distributed with this source code.
###
cpz.GridCoord = cc.Class.extend(
i: 0
j: 0
ctor: ->
encode: ->
i: @i
j: @j
decode: (data) ->
@i = data['i']
@j = data['j']
@
)
cpz.GridCoord.decode = (data) ->
obj = new cpz.GridCoord()
obj.decode(data)
return obj
cpz.gc = (i, j) ->
coord = new cpz.GridCoord()
coord.i = i
coord.j = j
coord
cpz.MoveCoord = cc.Class.extend(
from: null
to: null
ctor: ->
encode: ->
from: @from.encode()
to: @to.encode()
decode: (data) ->
@from = cpz.GridCoord.decode(data['from'])
@to = cpz.GridCoord.decode(data['to'])
@
)
cpz.MoveCoord.decode = (data) ->
obj = new cpz.MoveCoord()
obj.decode(data)
return obj
cpz.mv = (from, to) ->
coord = new cpz.MoveCoord()
coord.from = from
coord.to = to
coord
| true | ###
This file is part of the ChinesePuzzle package.
(c) PI:NAME:<NAME>END_PI
For the full copyright and license information, please view the LICENSE
file that was distributed with this source code.
###
cpz.GridCoord = cc.Class.extend(
i: 0
j: 0
ctor: ->
encode: ->
i: @i
j: @j
decode: (data) ->
@i = data['i']
@j = data['j']
@
)
cpz.GridCoord.decode = (data) ->
obj = new cpz.GridCoord()
obj.decode(data)
return obj
cpz.gc = (i, j) ->
coord = new cpz.GridCoord()
coord.i = i
coord.j = j
coord
cpz.MoveCoord = cc.Class.extend(
from: null
to: null
ctor: ->
encode: ->
from: @from.encode()
to: @to.encode()
decode: (data) ->
@from = cpz.GridCoord.decode(data['from'])
@to = cpz.GridCoord.decode(data['to'])
@
)
cpz.MoveCoord.decode = (data) ->
obj = new cpz.MoveCoord()
obj.decode(data)
return obj
cpz.mv = (from, to) ->
coord = new cpz.MoveCoord()
coord.from = from
coord.to = to
coord
|
[
{
"context": "'moment'\n\nyoutubeRegex = /\\?v=(.{11})/\n\napiKey = 'AIzaSyDxJwk3B-SVXot_r1E2Ys3RbFN_D3ygOcw'\nytApi = youtubeGet(apiKey)\n\nclass Youtube\n\n s",
"end": 166,
"score": 0.999757707118988,
"start": 127,
"tag": "KEY",
"value": "AIzaSyDxJwk3B-SVXot_r1E2Ys3RbFN_D3ygOcw"
}
] | app/domain/youtube/youtube.coffee | jas-bar/netjockey-server-node | 2 | youtubeGet = require 'youtube-get'
Song = require '../song'
moment = require 'moment'
youtubeRegex = /\?v=(.{11})/
apiKey = 'AIzaSyDxJwk3B-SVXot_r1E2Ys3RbFN_D3ygOcw'
ytApi = youtubeGet(apiKey)
class Youtube
songFromUrl: (callback, url)->
array = youtubeRegex.exec(url)
if array?
videoId = array[array.length - 1]
if videoId?
ytApi("videos", {
'id': videoId
'part': 'id, snippet, contentDetails'
}, (err, data) ->
item = data['items'][0]
duration = moment.duration(item['contentDetails']['duration']).asSeconds()
title = item['snippet']['title']
videoId = item['id']
thumbnailUrl = item['snippet']['thumbnails']['default']['url']
song = new Song(duration, title, videoId, thumbnailUrl)
callback(song)
)
{status: "ok"}
else
callback(undefined)
module.exports = new Youtube()
| 66878 | youtubeGet = require 'youtube-get'
Song = require '../song'
moment = require 'moment'
youtubeRegex = /\?v=(.{11})/
apiKey = '<KEY>'
ytApi = youtubeGet(apiKey)
class Youtube
songFromUrl: (callback, url)->
array = youtubeRegex.exec(url)
if array?
videoId = array[array.length - 1]
if videoId?
ytApi("videos", {
'id': videoId
'part': 'id, snippet, contentDetails'
}, (err, data) ->
item = data['items'][0]
duration = moment.duration(item['contentDetails']['duration']).asSeconds()
title = item['snippet']['title']
videoId = item['id']
thumbnailUrl = item['snippet']['thumbnails']['default']['url']
song = new Song(duration, title, videoId, thumbnailUrl)
callback(song)
)
{status: "ok"}
else
callback(undefined)
module.exports = new Youtube()
| true | youtubeGet = require 'youtube-get'
Song = require '../song'
moment = require 'moment'
youtubeRegex = /\?v=(.{11})/
apiKey = 'PI:KEY:<KEY>END_PI'
ytApi = youtubeGet(apiKey)
class Youtube
songFromUrl: (callback, url)->
array = youtubeRegex.exec(url)
if array?
videoId = array[array.length - 1]
if videoId?
ytApi("videos", {
'id': videoId
'part': 'id, snippet, contentDetails'
}, (err, data) ->
item = data['items'][0]
duration = moment.duration(item['contentDetails']['duration']).asSeconds()
title = item['snippet']['title']
videoId = item['id']
thumbnailUrl = item['snippet']['thumbnails']['default']['url']
song = new Song(duration, title, videoId, thumbnailUrl)
callback(song)
)
{status: "ok"}
else
callback(undefined)
module.exports = new Youtube()
|
[
{
"context": " headerObjects = [\n { header: { key1: 'value1a', key2: 'value2a' } }\n { header: { key1: '",
"end": 1563,
"score": 0.9733762741088867,
"start": 1556,
"tag": "KEY",
"value": "value1a"
},
{
"context": "s = [\n { header: { key1: 'value1a', key2: '... | test/lib/test.coffee | elidoran/node-cio-oboe | 0 | assert = require 'assert'
corepath = require 'path'
buildCio = require 'cio'
buildOboe = require 'oboe'
listener = require '../../lib'
# oboe lib
lib = corepath.resolve __dirname, '..', '..', 'lib'
describe 'test oboe', ->
describe 'with fake socket', ->
# pass a fake socket to the listener
fakeSocket =
events: {}
emits: {}
pipe: (stream) ->
stream.pipedFrom = this
return stream
on: (event, listener) ->
if @events[event]?
@events[event] = [ @events[event] ]
@events[event].push listener
else
@events[event] = listener
emit: (event, args...) ->
@emits[event] = args
context =
isSecure: false
client : fakeSocket
oboe : true
# call the listener as if a new socket connection has been made
listener.call context
before 'wait for oboe event', (done) -> setTimeout done, 10
it 'should call plugin to create the oboe and attach to the socket', ->
assert fakeSocket.oboe
it 'should emit the \'oboe\' event', ->
assert.deepEqual fakeSocket.emits.oboe[0], fakeSocket.oboe
describe 'with client and server', ->
describe.only 'with defaults', ->
cio = buildCio()
cio.onServerClient listener
# remember these for assertions
client = null
server = null
listening = false
connected = false
closed = false
oboed = {}
roots = []
headers = []
nodes = []
headerObjects = [
{ header: { key1: 'value1a', key2: 'value2a' } }
{ header: { key1: 'value1b', key2: 'value2b' } }
# { header: { key1: 'value1c', key2: 'value2c' } }
# { header: { key1: 'value1d', key2: 'value2d' } }
# { header: { key1: 'value1e', key2: 'value2e' } }
# { header: { key1: 'value1f', key2: 'value2f' } }
]
before 'build server', ->
# use `cio` to create a server with a tranform (and an arbitrary port)
server = cio.server
onConnect: (connection) ->
serverConnection = connection
serverConnection.on 'end', ->
server.close()
oboe:
root: (object) ->
roots.push object
return
top:
header: (header) ->
headers.push header
return header
node:
'!.header': (header) ->
nodes.push header
return header
fail: (info...) ->
console.log 'oboe fail:',info
return
server.on 'error', (error) -> console.log 'Server error:',error
# once the server is listening do the client stuffs
server.on 'listening', ->
listening = true
# create a client via `cio` with its transform and the same port as the server
client = cio.client
port : server.address().port
host : 'localhost'
onConnect: ->
connected = true
console.log 'client connected'
for el,index in headerObjects
client.write JSON.stringify(el), 'utf8'
client.end ->
console.log 'client ended'
client.on 'error', (error) -> console.log 'client error:',error
server.on 'close', -> closed = true
before 'wait for server to listen', (done) ->
server.listen 1357, 'localhost', done
before 'wait for server to close', (done) ->
server.on 'close', done
it 'should listen', -> assert.equal listening, true
it 'should connect', -> assert.equal connected, true
it 'should receive root objects', ->
assert.equal roots.length, 2
for object,index in headerObjects
assert.deepEqual roots[index], object
it 'should receive header objects', ->
assert.equal headers.length, 2
for object,index in headerObjects
assert.deepEqual headers[index], object.header
it 'should receive node objects', ->
assert.equal nodes.length, 2
for object,index in headerObjects
assert.deepEqual nodes[index], object.header
it 'should close', -> assert.equal closed, true
| 184155 | assert = require 'assert'
corepath = require 'path'
buildCio = require 'cio'
buildOboe = require 'oboe'
listener = require '../../lib'
# oboe lib
lib = corepath.resolve __dirname, '..', '..', 'lib'
describe 'test oboe', ->
describe 'with fake socket', ->
# pass a fake socket to the listener
fakeSocket =
events: {}
emits: {}
pipe: (stream) ->
stream.pipedFrom = this
return stream
on: (event, listener) ->
if @events[event]?
@events[event] = [ @events[event] ]
@events[event].push listener
else
@events[event] = listener
emit: (event, args...) ->
@emits[event] = args
context =
isSecure: false
client : fakeSocket
oboe : true
# call the listener as if a new socket connection has been made
listener.call context
before 'wait for oboe event', (done) -> setTimeout done, 10
it 'should call plugin to create the oboe and attach to the socket', ->
assert fakeSocket.oboe
it 'should emit the \'oboe\' event', ->
assert.deepEqual fakeSocket.emits.oboe[0], fakeSocket.oboe
describe 'with client and server', ->
describe.only 'with defaults', ->
cio = buildCio()
cio.onServerClient listener
# remember these for assertions
client = null
server = null
listening = false
connected = false
closed = false
oboed = {}
roots = []
headers = []
nodes = []
headerObjects = [
{ header: { key1: '<KEY>', key2: '<KEY>' } }
{ header: { key1: '<KEY>b', key2: '<KEY>' } }
# { header: { key1: 'value<KEY>c', key2: 'value<KEY>' } }
# { header: { key1: '<KEY>', key2: '<KEY>' } }
# { header: { key1: '<KEY>', key2: '<KEY>' } }
# { header: { key1: '<KEY>', key2: '<KEY>' } }
]
before 'build server', ->
# use `cio` to create a server with a tranform (and an arbitrary port)
server = cio.server
onConnect: (connection) ->
serverConnection = connection
serverConnection.on 'end', ->
server.close()
oboe:
root: (object) ->
roots.push object
return
top:
header: (header) ->
headers.push header
return header
node:
'!.header': (header) ->
nodes.push header
return header
fail: (info...) ->
console.log 'oboe fail:',info
return
server.on 'error', (error) -> console.log 'Server error:',error
# once the server is listening do the client stuffs
server.on 'listening', ->
listening = true
# create a client via `cio` with its transform and the same port as the server
client = cio.client
port : server.address().port
host : 'localhost'
onConnect: ->
connected = true
console.log 'client connected'
for el,index in headerObjects
client.write JSON.stringify(el), 'utf8'
client.end ->
console.log 'client ended'
client.on 'error', (error) -> console.log 'client error:',error
server.on 'close', -> closed = true
before 'wait for server to listen', (done) ->
server.listen 1357, 'localhost', done
before 'wait for server to close', (done) ->
server.on 'close', done
it 'should listen', -> assert.equal listening, true
it 'should connect', -> assert.equal connected, true
it 'should receive root objects', ->
assert.equal roots.length, 2
for object,index in headerObjects
assert.deepEqual roots[index], object
it 'should receive header objects', ->
assert.equal headers.length, 2
for object,index in headerObjects
assert.deepEqual headers[index], object.header
it 'should receive node objects', ->
assert.equal nodes.length, 2
for object,index in headerObjects
assert.deepEqual nodes[index], object.header
it 'should close', -> assert.equal closed, true
| true | assert = require 'assert'
corepath = require 'path'
buildCio = require 'cio'
buildOboe = require 'oboe'
listener = require '../../lib'
# oboe lib
lib = corepath.resolve __dirname, '..', '..', 'lib'
describe 'test oboe', ->
describe 'with fake socket', ->
# pass a fake socket to the listener
fakeSocket =
events: {}
emits: {}
pipe: (stream) ->
stream.pipedFrom = this
return stream
on: (event, listener) ->
if @events[event]?
@events[event] = [ @events[event] ]
@events[event].push listener
else
@events[event] = listener
emit: (event, args...) ->
@emits[event] = args
context =
isSecure: false
client : fakeSocket
oboe : true
# call the listener as if a new socket connection has been made
listener.call context
before 'wait for oboe event', (done) -> setTimeout done, 10
it 'should call plugin to create the oboe and attach to the socket', ->
assert fakeSocket.oboe
it 'should emit the \'oboe\' event', ->
assert.deepEqual fakeSocket.emits.oboe[0], fakeSocket.oboe
describe 'with client and server', ->
describe.only 'with defaults', ->
cio = buildCio()
cio.onServerClient listener
# remember these for assertions
client = null
server = null
listening = false
connected = false
closed = false
oboed = {}
roots = []
headers = []
nodes = []
headerObjects = [
{ header: { key1: 'PI:KEY:<KEY>END_PI', key2: 'PI:KEY:<KEY>END_PI' } }
{ header: { key1: 'PI:KEY:<KEY>END_PIb', key2: 'PI:KEY:<KEY>END_PI' } }
# { header: { key1: 'valuePI:KEY:<KEY>END_PIc', key2: 'valuePI:KEY:<KEY>END_PI' } }
# { header: { key1: 'PI:KEY:<KEY>END_PI', key2: 'PI:KEY:<KEY>END_PI' } }
# { header: { key1: 'PI:KEY:<KEY>END_PI', key2: 'PI:KEY:<KEY>END_PI' } }
# { header: { key1: 'PI:KEY:<KEY>END_PI', key2: 'PI:KEY:<KEY>END_PI' } }
]
before 'build server', ->
# use `cio` to create a server with a tranform (and an arbitrary port)
server = cio.server
onConnect: (connection) ->
serverConnection = connection
serverConnection.on 'end', ->
server.close()
oboe:
root: (object) ->
roots.push object
return
top:
header: (header) ->
headers.push header
return header
node:
'!.header': (header) ->
nodes.push header
return header
fail: (info...) ->
console.log 'oboe fail:',info
return
server.on 'error', (error) -> console.log 'Server error:',error
# once the server is listening do the client stuffs
server.on 'listening', ->
listening = true
# create a client via `cio` with its transform and the same port as the server
client = cio.client
port : server.address().port
host : 'localhost'
onConnect: ->
connected = true
console.log 'client connected'
for el,index in headerObjects
client.write JSON.stringify(el), 'utf8'
client.end ->
console.log 'client ended'
client.on 'error', (error) -> console.log 'client error:',error
server.on 'close', -> closed = true
before 'wait for server to listen', (done) ->
server.listen 1357, 'localhost', done
before 'wait for server to close', (done) ->
server.on 'close', done
it 'should listen', -> assert.equal listening, true
it 'should connect', -> assert.equal connected, true
it 'should receive root objects', ->
assert.equal roots.length, 2
for object,index in headerObjects
assert.deepEqual roots[index], object
it 'should receive header objects', ->
assert.equal headers.length, 2
for object,index in headerObjects
assert.deepEqual headers[index], object.header
it 'should receive node objects', ->
assert.equal nodes.length, 2
for object,index in headerObjects
assert.deepEqual nodes[index], object.header
it 'should close', -> assert.equal closed, true
|
[
{
"context": "e='password'] | //textarea | //select\"\n keys: 'abcdefghijklmnopqrstuvwxyz'\n hintClass: 'ushints_hint'\n selectedHintCl",
"end": 279,
"score": 0.9996112585067749,
"start": 253,
"tag": "KEY",
"value": "abcdefghijklmnopqrstuvwxyz"
}
] | chrome/userScripts/hints.us.coffee | sloonz/utils | 1 | config =
defaultExpr: "//a[@href or @onclick or @oncommand] | //input[@type='button' or @type='submit' or @type='radio' or @type='checkbox']"
formExpr: "//input[@type='text' or not(@type) or @type='password'] | //textarea | //select"
keys: 'abcdefghijklmnopqrstuvwxyz'
hintClass: 'ushints_hint'
selectedHintClass: 'ushints_hint ushints_sel_hint'
stopOnEmptyMatch: false
pow = (base, n)->
return 1 if n == 0
return base if n == 1
return base * pow(base, n-1)
min = (x,y)->
return x if x < y
return y
max = (x,y)->
return x if x > y
return y
innerRect = (elem)->
top = elem.offset().top
left = elem.offset().left
return [top, top + elem.innerHeight(),
left, left + elem.innerWidth()]
rectIntersect = (ecoords, vcoords)->
top = max ecoords[0], vcoords[0]
bottom = min ecoords[1], vcoords[1]
left = max ecoords[2], vcoords[2]
right = min ecoords[3], vcoords[3]
if left > right or top > bottom
return false
else
return [top, bottom, left, right]
nextNumber = (number, base)->
ret = []
inc = true
for chiffer in number
if inc
chiffer = (chiffer + 1) % base
inc = (chiffer == 0)
ret.push chiffer
return ret
numberToString = (number)->
res = ""
(res = config.keys[digit] + res) for digit in number
return res.toUpperCase()
isVisible = (element, vcoords)->
return rectIntersect(innerRect(element), vcoords) and element.css('visibility') != 'hidden'
xpath = (expr, document)->
elems = []
res = document.evaluate(expr, document, null, XPathResult.ANY_TYPE, null)
elems.push elem while elem = res.iterateNext()
return elems
class UsHints
constructor: (@document)->
@container = null
@curHints = null
@callback = null
stop: ->
@curHints = null
if @container?
$(@container).remove()
@container = null
start: (expr, @callback)->
# Find visible elements
win = @document.defaultView
vcoords = [win.pageYOffset, win.pageYOffset + win.innerHeight,
win.pageXOffset, win.pageXOffset + win.innerWidth]
elems = (elem for elem in $(xpath(expr, @document)).filter(':visible') when isVisible($(elem), vcoords))
# How many letters do we need ?
curHint = [0]
curHint.push(0) while elems.length >= pow(config.keys.length, curHint.length)
# Create hints
@container = @document.body.appendChild @document.createElement 'div'
@curHints = []
for elem in elems
rect = innerRect $(elem)
hint = @document.createElement 'div'
label = numberToString curHint
$(hint).html label
$(hint).css
top: rect[0] + "px"
left: rect[2] + "px"
hint.className = config.selectedHintClass
@curHints.push
elem: elem
hint: hint
label: label
@container.appendChild hint
curHint = nextNumber curHint, config.keys.length
updateSelectedLinks: (letters)->
hints = []
for hint in @curHints
if hint.label.indexOf(letters) == 0
hints.push hint
hint.hint.className = config.selectedHintClass
else
hint.hint.className = config.hintClass
return hints
hit: (hint)->
ret = this.updateSelectedLinks hint.toUpperCase()
if ret.length == 0
if config.stopOnEmptyMatch
this.stop()
return false
else if ret.length == 1
if @callback and not @callback(this, ret[0].elem)
this.stop()
return false
return true
callbacks =
simulateClick: (ush, element)->
element.focus()
element.dispatchEvent(userScripts.utils.createClickEvent(document))
return false
simulateCtrlClick: (ush, element)->
element.focus()
if navigator.appVersion.indexOf("Mac") != -1
[ctrl, meta] = [false, true]
else
[ctrl, meta] = [true, false]
element.dispatchEvent(userScripts.utils.createClickEvent(document, ctrl, false, false, meta))
return false
activate: (ush, element)->
element.focus()
return false
userScripts.register
include: "*"
callback: (document)->
ush = new UsHints(document)
bindCommand = (command, expr, callback)->
userScripts.bindCommand document, command, (document, event, hint)->
if not hint?
ush.stop()
return false
hint = hint.split(':')[1]
if hint == ""
ush.start expr, callback
return true
else
return ush.hit hint
bindCommand "f:", config.defaultExpr, callbacks.simulateClick
bindCommand "F:", config.defaultExpr, callbacks.simulateCtrlClick
bindCommand "a:", "#{config.defaultExpr}|#{config.formExpr}", callbacks.activate
| 94804 | config =
defaultExpr: "//a[@href or @onclick or @oncommand] | //input[@type='button' or @type='submit' or @type='radio' or @type='checkbox']"
formExpr: "//input[@type='text' or not(@type) or @type='password'] | //textarea | //select"
keys: '<KEY>'
hintClass: 'ushints_hint'
selectedHintClass: 'ushints_hint ushints_sel_hint'
stopOnEmptyMatch: false
pow = (base, n)->
return 1 if n == 0
return base if n == 1
return base * pow(base, n-1)
min = (x,y)->
return x if x < y
return y
max = (x,y)->
return x if x > y
return y
innerRect = (elem)->
top = elem.offset().top
left = elem.offset().left
return [top, top + elem.innerHeight(),
left, left + elem.innerWidth()]
rectIntersect = (ecoords, vcoords)->
top = max ecoords[0], vcoords[0]
bottom = min ecoords[1], vcoords[1]
left = max ecoords[2], vcoords[2]
right = min ecoords[3], vcoords[3]
if left > right or top > bottom
return false
else
return [top, bottom, left, right]
nextNumber = (number, base)->
ret = []
inc = true
for chiffer in number
if inc
chiffer = (chiffer + 1) % base
inc = (chiffer == 0)
ret.push chiffer
return ret
numberToString = (number)->
res = ""
(res = config.keys[digit] + res) for digit in number
return res.toUpperCase()
isVisible = (element, vcoords)->
return rectIntersect(innerRect(element), vcoords) and element.css('visibility') != 'hidden'
xpath = (expr, document)->
elems = []
res = document.evaluate(expr, document, null, XPathResult.ANY_TYPE, null)
elems.push elem while elem = res.iterateNext()
return elems
class UsHints
constructor: (@document)->
@container = null
@curHints = null
@callback = null
stop: ->
@curHints = null
if @container?
$(@container).remove()
@container = null
start: (expr, @callback)->
# Find visible elements
win = @document.defaultView
vcoords = [win.pageYOffset, win.pageYOffset + win.innerHeight,
win.pageXOffset, win.pageXOffset + win.innerWidth]
elems = (elem for elem in $(xpath(expr, @document)).filter(':visible') when isVisible($(elem), vcoords))
# How many letters do we need ?
curHint = [0]
curHint.push(0) while elems.length >= pow(config.keys.length, curHint.length)
# Create hints
@container = @document.body.appendChild @document.createElement 'div'
@curHints = []
for elem in elems
rect = innerRect $(elem)
hint = @document.createElement 'div'
label = numberToString curHint
$(hint).html label
$(hint).css
top: rect[0] + "px"
left: rect[2] + "px"
hint.className = config.selectedHintClass
@curHints.push
elem: elem
hint: hint
label: label
@container.appendChild hint
curHint = nextNumber curHint, config.keys.length
updateSelectedLinks: (letters)->
hints = []
for hint in @curHints
if hint.label.indexOf(letters) == 0
hints.push hint
hint.hint.className = config.selectedHintClass
else
hint.hint.className = config.hintClass
return hints
hit: (hint)->
ret = this.updateSelectedLinks hint.toUpperCase()
if ret.length == 0
if config.stopOnEmptyMatch
this.stop()
return false
else if ret.length == 1
if @callback and not @callback(this, ret[0].elem)
this.stop()
return false
return true
callbacks =
simulateClick: (ush, element)->
element.focus()
element.dispatchEvent(userScripts.utils.createClickEvent(document))
return false
simulateCtrlClick: (ush, element)->
element.focus()
if navigator.appVersion.indexOf("Mac") != -1
[ctrl, meta] = [false, true]
else
[ctrl, meta] = [true, false]
element.dispatchEvent(userScripts.utils.createClickEvent(document, ctrl, false, false, meta))
return false
activate: (ush, element)->
element.focus()
return false
userScripts.register
include: "*"
callback: (document)->
ush = new UsHints(document)
bindCommand = (command, expr, callback)->
userScripts.bindCommand document, command, (document, event, hint)->
if not hint?
ush.stop()
return false
hint = hint.split(':')[1]
if hint == ""
ush.start expr, callback
return true
else
return ush.hit hint
bindCommand "f:", config.defaultExpr, callbacks.simulateClick
bindCommand "F:", config.defaultExpr, callbacks.simulateCtrlClick
bindCommand "a:", "#{config.defaultExpr}|#{config.formExpr}", callbacks.activate
| true | config =
defaultExpr: "//a[@href or @onclick or @oncommand] | //input[@type='button' or @type='submit' or @type='radio' or @type='checkbox']"
formExpr: "//input[@type='text' or not(@type) or @type='password'] | //textarea | //select"
keys: 'PI:KEY:<KEY>END_PI'
hintClass: 'ushints_hint'
selectedHintClass: 'ushints_hint ushints_sel_hint'
stopOnEmptyMatch: false
pow = (base, n)->
return 1 if n == 0
return base if n == 1
return base * pow(base, n-1)
min = (x,y)->
return x if x < y
return y
max = (x,y)->
return x if x > y
return y
innerRect = (elem)->
top = elem.offset().top
left = elem.offset().left
return [top, top + elem.innerHeight(),
left, left + elem.innerWidth()]
rectIntersect = (ecoords, vcoords)->
top = max ecoords[0], vcoords[0]
bottom = min ecoords[1], vcoords[1]
left = max ecoords[2], vcoords[2]
right = min ecoords[3], vcoords[3]
if left > right or top > bottom
return false
else
return [top, bottom, left, right]
nextNumber = (number, base)->
ret = []
inc = true
for chiffer in number
if inc
chiffer = (chiffer + 1) % base
inc = (chiffer == 0)
ret.push chiffer
return ret
numberToString = (number)->
res = ""
(res = config.keys[digit] + res) for digit in number
return res.toUpperCase()
isVisible = (element, vcoords)->
return rectIntersect(innerRect(element), vcoords) and element.css('visibility') != 'hidden'
xpath = (expr, document)->
elems = []
res = document.evaluate(expr, document, null, XPathResult.ANY_TYPE, null)
elems.push elem while elem = res.iterateNext()
return elems
class UsHints
constructor: (@document)->
@container = null
@curHints = null
@callback = null
stop: ->
@curHints = null
if @container?
$(@container).remove()
@container = null
start: (expr, @callback)->
# Find visible elements
win = @document.defaultView
vcoords = [win.pageYOffset, win.pageYOffset + win.innerHeight,
win.pageXOffset, win.pageXOffset + win.innerWidth]
elems = (elem for elem in $(xpath(expr, @document)).filter(':visible') when isVisible($(elem), vcoords))
# How many letters do we need ?
curHint = [0]
curHint.push(0) while elems.length >= pow(config.keys.length, curHint.length)
# Create hints
@container = @document.body.appendChild @document.createElement 'div'
@curHints = []
for elem in elems
rect = innerRect $(elem)
hint = @document.createElement 'div'
label = numberToString curHint
$(hint).html label
$(hint).css
top: rect[0] + "px"
left: rect[2] + "px"
hint.className = config.selectedHintClass
@curHints.push
elem: elem
hint: hint
label: label
@container.appendChild hint
curHint = nextNumber curHint, config.keys.length
updateSelectedLinks: (letters)->
hints = []
for hint in @curHints
if hint.label.indexOf(letters) == 0
hints.push hint
hint.hint.className = config.selectedHintClass
else
hint.hint.className = config.hintClass
return hints
hit: (hint)->
ret = this.updateSelectedLinks hint.toUpperCase()
if ret.length == 0
if config.stopOnEmptyMatch
this.stop()
return false
else if ret.length == 1
if @callback and not @callback(this, ret[0].elem)
this.stop()
return false
return true
callbacks =
simulateClick: (ush, element)->
element.focus()
element.dispatchEvent(userScripts.utils.createClickEvent(document))
return false
simulateCtrlClick: (ush, element)->
element.focus()
if navigator.appVersion.indexOf("Mac") != -1
[ctrl, meta] = [false, true]
else
[ctrl, meta] = [true, false]
element.dispatchEvent(userScripts.utils.createClickEvent(document, ctrl, false, false, meta))
return false
activate: (ush, element)->
element.focus()
return false
userScripts.register
include: "*"
callback: (document)->
ush = new UsHints(document)
bindCommand = (command, expr, callback)->
userScripts.bindCommand document, command, (document, event, hint)->
if not hint?
ush.stop()
return false
hint = hint.split(':')[1]
if hint == ""
ush.start expr, callback
return true
else
return ush.hit hint
bindCommand "f:", config.defaultExpr, callbacks.simulateClick
bindCommand "F:", config.defaultExpr, callbacks.simulateCtrlClick
bindCommand "a:", "#{config.defaultExpr}|#{config.formExpr}", callbacks.activate
|
[
{
"context": "-----------------------------\n# Copyright (c) 2012 Patrick Mueller\n#\n# Licensed under the Apache License, Version 2.",
"end": 117,
"score": 0.9998325109481812,
"start": 102,
"tag": "NAME",
"value": "Patrick Mueller"
}
] | lib/offl-shell/Processor.coffee | pmuellr/offl-site | 1 | #-------------------------------------------------------------------------------
# Copyright (c) 2012 Patrick Mueller
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
fs = require 'fs'
path = require 'path'
events = require 'events'
util = require 'util'
crypto = require 'crypto'
utils = require '../utils'
FileSet = require '../FileSet'
Properties = require '../Properties'
#-------------------------------------------------------------------------------
module.exports = class Processor extends events.EventEmitter
#---------------------------------------------------------------------------
constructor: (@iDir, @oDir, @options) ->
@records = []
#---------------------------------------------------------------------------
process: ->
@iDirFull = path.resolve @iDir
@oDirFull = path.resolve @oDir
# console.log "iDir: #{@iDir} #{@iDirFull}"
# console.log "oDir: #{@oDir} #{@oDirFull}"
# console.log "options: #{JSON.stringify(@options)}"
# console.log ""
if !path.existsSync @iDirFull
@emitErrorMessage "input directory '#{@iDir}' does not exist"
return
if !path.existsSync @oDirFull
@emitErrorMessage "output directory '#{@oDir}' does not exist"
return
iStats = fs.statSync @iDirFull
oStats = fs.statSync @oDirFull
if !iStats.isDirectory()
@emitErrorMessage "input directory '#{@iDir}' is not a directory"
return
if !oStats.isDirectory()
@emitErrorMessage "output directory '#{@oDir}' is not a directory"
return
if @iDirFull == @oDirFull
@emitErrorMessage "the input and output directory cannot be the same"
return
config = @readConfig()
config.name ||= 'no title provided'
mainHtml = config['main.html'] || 'main.html'
mainCss = config['main.css'] || 'main.css'
mainJs = config['main.js'] || 'main.js'
mainHtml = utils.readFile(path.join(@iDir, mainHtml))
mainHtml ||= '<!-- no HTML provided -->'
mainCss = utils.readFile(path.join(@iDir, mainCss))
mainCss ||= '/* no CSS provided */'
mainJs = utils.readFile(path.join(@iDir, mainJs))
mainJs ||= '/* no JavaScript provided */'
config['main.html'] = mainHtml
config['main.css'] = mainCss
config['main.js'] = mainJs
@emptyDir(@oDirFull)
@writeIndexHtml(config)
@writeIndexManifest(config)
@writeHtAccess(config)
# @checkForMain @iDirFull
# iFileSet = FileSet.fromDir(@iDirFull)
# oFileSet = FileSet.fromDir(@oDirFull)
# console.log "iDir files:"
# iFileSet.dump()
# console.log ""
# console.log "oDir files:"
# oFileSet.dump()
# @copyFiles(@oDirFull, @iDirFull, @)
# manifest = path.join(@oDirFull, 'offl-site.manifest.txt')
# contents = JSON.stringify(@records, null, 4)
# fs.writeFileSync manifest, contents
# manifest = path.join(@oDir, path.basename(manifest))
# utils.logVerbose "created: #{manifest}"
utils.log "shell created in: #{@oDir}"
@emit 'done'
#---------------------------------------------------------------------------
writeIndexHtml: (config) ->
# <link rel="apple-touch-icon" sizes="72x72" href="touch-icon-ipad.png" />
# <meta name="apple-mobile-web-app-capable" content="yes" />
# <meta name="apple-mobile-web-app-status-bar-style" content="black" />
# <meta name="viewport" content="user-scalable=no, initial-scale=1.0, width=device-width">
# <meta name="format-detection" content="telephone=no">
index = path.join(@oDirFull, 'index.html')
return if path.existsSync(index)
contents = []
contents.push '<html manifest="index.manifest">'
contents.push '<head>'
contents.push "<title>#{config.name}</title>"
if config['status-bar-style']
contents.push '<meta name="apple-mobile-web-app-status-bar-style" content="' + config['status-bar-style'] + '" />'
contents.push '<meta name="apple-mobile-web-app-capable" content="yes" />'
vpUserScalable = config['viewport-user-scalable']
vpInitialScale = config['viewport-initial-scale']
vpDeviceWidth = config['viewport-device-width']
viewPortParts = []
viewPortParts.push "user-scalable=#{vpUserScalable}" if vpUserScalable
viewPortParts.push "initial-scale=#{vpInitialScale}" if vpInitialScale
viewPortParts.push "width=#{vpDeviceWidth}" if vpDeviceWidth
if viewPortParts.length
vpContent = viewPortParts.join(', ')
contents.push '<meta name="viewport" content="' + vpContent + '"" />'
contents.push '<style>'
contents.push config['main.css']
contents.push '</style>'
contents.push '<script>'
contents.push config['main.js']
contents.push '</script>'
contents.push '</head>'
contents.push '<body>'
contents.push config['main.html']
contents.push '</body>'
contents.push '</html>'
contents = contents.join '\n'
fs.writeFileSync index, contents
utils.logVerbose "created: #{index}"
#---------------------------------------------------------------------------
writeIndexManifest: () ->
manifest = path.join(@oDirFull, 'index.manifest')
return if path.existsSync(manifest)
contents = """
CACHE MANIFEST
#----------------------------------------------------------
CACHE:
#----------------------------------------------------------
NETWORK:
*
#----------------------------------------------------------
# updated: #{new Date().toString()}
#----------------------------------------------------------
"""
fs.writeFileSync manifest, contents
utils.logVerbose "created: #{manifest}"
#---------------------------------------------------------------------------
writeHtAccess: ->
htAccess = path.join(@oDirFull, '.htaccess')
return if path.existsSync(htAccess)
contents = """
# set content type for manifest
AddType text/cache-manifest .manifest
"""
fs.writeFileSync htAccess, contents
utils.logVerbose "created: #{htAccess}"
#---------------------------------------------------------------------------
readConfig: () ->
configFile = path.join(@iDir, 'config.properties')
if !path.existsSync configFile
@emitErrorMessage "a config.properties file was not found in #{@iDir}"
return
# console.log "config: #{JSON.stringify(@config,null,4)}"
Properties.fromFile configFile
#---------------------------------------------------------------------------
emptyDir: (dir) ->
fileSet = FileSet.fromDir(dir)
for file in fileSet.fullFiles()
utils.logVerbose "erased: #{file}"
fs.unlinkSync file
for dir in fileSet.fullDirs().reverse()
utils.logVerbose "rmdir: #{dir}"
fs.rmdirSync dir
#---------------------------------------------------------------------------
emitErrorMessage: (message) ->
@emit 'error', new Error(message)
| 121725 | #-------------------------------------------------------------------------------
# Copyright (c) 2012 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
fs = require 'fs'
path = require 'path'
events = require 'events'
util = require 'util'
crypto = require 'crypto'
utils = require '../utils'
FileSet = require '../FileSet'
Properties = require '../Properties'
#-------------------------------------------------------------------------------
module.exports = class Processor extends events.EventEmitter
#---------------------------------------------------------------------------
constructor: (@iDir, @oDir, @options) ->
@records = []
#---------------------------------------------------------------------------
process: ->
@iDirFull = path.resolve @iDir
@oDirFull = path.resolve @oDir
# console.log "iDir: #{@iDir} #{@iDirFull}"
# console.log "oDir: #{@oDir} #{@oDirFull}"
# console.log "options: #{JSON.stringify(@options)}"
# console.log ""
if !path.existsSync @iDirFull
@emitErrorMessage "input directory '#{@iDir}' does not exist"
return
if !path.existsSync @oDirFull
@emitErrorMessage "output directory '#{@oDir}' does not exist"
return
iStats = fs.statSync @iDirFull
oStats = fs.statSync @oDirFull
if !iStats.isDirectory()
@emitErrorMessage "input directory '#{@iDir}' is not a directory"
return
if !oStats.isDirectory()
@emitErrorMessage "output directory '#{@oDir}' is not a directory"
return
if @iDirFull == @oDirFull
@emitErrorMessage "the input and output directory cannot be the same"
return
config = @readConfig()
config.name ||= 'no title provided'
mainHtml = config['main.html'] || 'main.html'
mainCss = config['main.css'] || 'main.css'
mainJs = config['main.js'] || 'main.js'
mainHtml = utils.readFile(path.join(@iDir, mainHtml))
mainHtml ||= '<!-- no HTML provided -->'
mainCss = utils.readFile(path.join(@iDir, mainCss))
mainCss ||= '/* no CSS provided */'
mainJs = utils.readFile(path.join(@iDir, mainJs))
mainJs ||= '/* no JavaScript provided */'
config['main.html'] = mainHtml
config['main.css'] = mainCss
config['main.js'] = mainJs
@emptyDir(@oDirFull)
@writeIndexHtml(config)
@writeIndexManifest(config)
@writeHtAccess(config)
# @checkForMain @iDirFull
# iFileSet = FileSet.fromDir(@iDirFull)
# oFileSet = FileSet.fromDir(@oDirFull)
# console.log "iDir files:"
# iFileSet.dump()
# console.log ""
# console.log "oDir files:"
# oFileSet.dump()
# @copyFiles(@oDirFull, @iDirFull, @)
# manifest = path.join(@oDirFull, 'offl-site.manifest.txt')
# contents = JSON.stringify(@records, null, 4)
# fs.writeFileSync manifest, contents
# manifest = path.join(@oDir, path.basename(manifest))
# utils.logVerbose "created: #{manifest}"
utils.log "shell created in: #{@oDir}"
@emit 'done'
#---------------------------------------------------------------------------
writeIndexHtml: (config) ->
# <link rel="apple-touch-icon" sizes="72x72" href="touch-icon-ipad.png" />
# <meta name="apple-mobile-web-app-capable" content="yes" />
# <meta name="apple-mobile-web-app-status-bar-style" content="black" />
# <meta name="viewport" content="user-scalable=no, initial-scale=1.0, width=device-width">
# <meta name="format-detection" content="telephone=no">
index = path.join(@oDirFull, 'index.html')
return if path.existsSync(index)
contents = []
contents.push '<html manifest="index.manifest">'
contents.push '<head>'
contents.push "<title>#{config.name}</title>"
if config['status-bar-style']
contents.push '<meta name="apple-mobile-web-app-status-bar-style" content="' + config['status-bar-style'] + '" />'
contents.push '<meta name="apple-mobile-web-app-capable" content="yes" />'
vpUserScalable = config['viewport-user-scalable']
vpInitialScale = config['viewport-initial-scale']
vpDeviceWidth = config['viewport-device-width']
viewPortParts = []
viewPortParts.push "user-scalable=#{vpUserScalable}" if vpUserScalable
viewPortParts.push "initial-scale=#{vpInitialScale}" if vpInitialScale
viewPortParts.push "width=#{vpDeviceWidth}" if vpDeviceWidth
if viewPortParts.length
vpContent = viewPortParts.join(', ')
contents.push '<meta name="viewport" content="' + vpContent + '"" />'
contents.push '<style>'
contents.push config['main.css']
contents.push '</style>'
contents.push '<script>'
contents.push config['main.js']
contents.push '</script>'
contents.push '</head>'
contents.push '<body>'
contents.push config['main.html']
contents.push '</body>'
contents.push '</html>'
contents = contents.join '\n'
fs.writeFileSync index, contents
utils.logVerbose "created: #{index}"
#---------------------------------------------------------------------------
writeIndexManifest: () ->
manifest = path.join(@oDirFull, 'index.manifest')
return if path.existsSync(manifest)
contents = """
CACHE MANIFEST
#----------------------------------------------------------
CACHE:
#----------------------------------------------------------
NETWORK:
*
#----------------------------------------------------------
# updated: #{new Date().toString()}
#----------------------------------------------------------
"""
fs.writeFileSync manifest, contents
utils.logVerbose "created: #{manifest}"
#---------------------------------------------------------------------------
writeHtAccess: ->
htAccess = path.join(@oDirFull, '.htaccess')
return if path.existsSync(htAccess)
contents = """
# set content type for manifest
AddType text/cache-manifest .manifest
"""
fs.writeFileSync htAccess, contents
utils.logVerbose "created: #{htAccess}"
#---------------------------------------------------------------------------
readConfig: () ->
configFile = path.join(@iDir, 'config.properties')
if !path.existsSync configFile
@emitErrorMessage "a config.properties file was not found in #{@iDir}"
return
# console.log "config: #{JSON.stringify(@config,null,4)}"
Properties.fromFile configFile
#---------------------------------------------------------------------------
emptyDir: (dir) ->
fileSet = FileSet.fromDir(dir)
for file in fileSet.fullFiles()
utils.logVerbose "erased: #{file}"
fs.unlinkSync file
for dir in fileSet.fullDirs().reverse()
utils.logVerbose "rmdir: #{dir}"
fs.rmdirSync dir
#---------------------------------------------------------------------------
emitErrorMessage: (message) ->
@emit 'error', new Error(message)
| true | #-------------------------------------------------------------------------------
# Copyright (c) 2012 PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
fs = require 'fs'
path = require 'path'
events = require 'events'
util = require 'util'
crypto = require 'crypto'
utils = require '../utils'
FileSet = require '../FileSet'
Properties = require '../Properties'
#-------------------------------------------------------------------------------
module.exports = class Processor extends events.EventEmitter
#---------------------------------------------------------------------------
constructor: (@iDir, @oDir, @options) ->
@records = []
#---------------------------------------------------------------------------
process: ->
@iDirFull = path.resolve @iDir
@oDirFull = path.resolve @oDir
# console.log "iDir: #{@iDir} #{@iDirFull}"
# console.log "oDir: #{@oDir} #{@oDirFull}"
# console.log "options: #{JSON.stringify(@options)}"
# console.log ""
if !path.existsSync @iDirFull
@emitErrorMessage "input directory '#{@iDir}' does not exist"
return
if !path.existsSync @oDirFull
@emitErrorMessage "output directory '#{@oDir}' does not exist"
return
iStats = fs.statSync @iDirFull
oStats = fs.statSync @oDirFull
if !iStats.isDirectory()
@emitErrorMessage "input directory '#{@iDir}' is not a directory"
return
if !oStats.isDirectory()
@emitErrorMessage "output directory '#{@oDir}' is not a directory"
return
if @iDirFull == @oDirFull
@emitErrorMessage "the input and output directory cannot be the same"
return
config = @readConfig()
config.name ||= 'no title provided'
mainHtml = config['main.html'] || 'main.html'
mainCss = config['main.css'] || 'main.css'
mainJs = config['main.js'] || 'main.js'
mainHtml = utils.readFile(path.join(@iDir, mainHtml))
mainHtml ||= '<!-- no HTML provided -->'
mainCss = utils.readFile(path.join(@iDir, mainCss))
mainCss ||= '/* no CSS provided */'
mainJs = utils.readFile(path.join(@iDir, mainJs))
mainJs ||= '/* no JavaScript provided */'
config['main.html'] = mainHtml
config['main.css'] = mainCss
config['main.js'] = mainJs
@emptyDir(@oDirFull)
@writeIndexHtml(config)
@writeIndexManifest(config)
@writeHtAccess(config)
# @checkForMain @iDirFull
# iFileSet = FileSet.fromDir(@iDirFull)
# oFileSet = FileSet.fromDir(@oDirFull)
# console.log "iDir files:"
# iFileSet.dump()
# console.log ""
# console.log "oDir files:"
# oFileSet.dump()
# @copyFiles(@oDirFull, @iDirFull, @)
# manifest = path.join(@oDirFull, 'offl-site.manifest.txt')
# contents = JSON.stringify(@records, null, 4)
# fs.writeFileSync manifest, contents
# manifest = path.join(@oDir, path.basename(manifest))
# utils.logVerbose "created: #{manifest}"
utils.log "shell created in: #{@oDir}"
@emit 'done'
#---------------------------------------------------------------------------
writeIndexHtml: (config) ->
# <link rel="apple-touch-icon" sizes="72x72" href="touch-icon-ipad.png" />
# <meta name="apple-mobile-web-app-capable" content="yes" />
# <meta name="apple-mobile-web-app-status-bar-style" content="black" />
# <meta name="viewport" content="user-scalable=no, initial-scale=1.0, width=device-width">
# <meta name="format-detection" content="telephone=no">
index = path.join(@oDirFull, 'index.html')
return if path.existsSync(index)
contents = []
contents.push '<html manifest="index.manifest">'
contents.push '<head>'
contents.push "<title>#{config.name}</title>"
if config['status-bar-style']
contents.push '<meta name="apple-mobile-web-app-status-bar-style" content="' + config['status-bar-style'] + '" />'
contents.push '<meta name="apple-mobile-web-app-capable" content="yes" />'
vpUserScalable = config['viewport-user-scalable']
vpInitialScale = config['viewport-initial-scale']
vpDeviceWidth = config['viewport-device-width']
viewPortParts = []
viewPortParts.push "user-scalable=#{vpUserScalable}" if vpUserScalable
viewPortParts.push "initial-scale=#{vpInitialScale}" if vpInitialScale
viewPortParts.push "width=#{vpDeviceWidth}" if vpDeviceWidth
if viewPortParts.length
vpContent = viewPortParts.join(', ')
contents.push '<meta name="viewport" content="' + vpContent + '"" />'
contents.push '<style>'
contents.push config['main.css']
contents.push '</style>'
contents.push '<script>'
contents.push config['main.js']
contents.push '</script>'
contents.push '</head>'
contents.push '<body>'
contents.push config['main.html']
contents.push '</body>'
contents.push '</html>'
contents = contents.join '\n'
fs.writeFileSync index, contents
utils.logVerbose "created: #{index}"
#---------------------------------------------------------------------------
writeIndexManifest: () ->
manifest = path.join(@oDirFull, 'index.manifest')
return if path.existsSync(manifest)
contents = """
CACHE MANIFEST
#----------------------------------------------------------
CACHE:
#----------------------------------------------------------
NETWORK:
*
#----------------------------------------------------------
# updated: #{new Date().toString()}
#----------------------------------------------------------
"""
fs.writeFileSync manifest, contents
utils.logVerbose "created: #{manifest}"
#---------------------------------------------------------------------------
writeHtAccess: ->
htAccess = path.join(@oDirFull, '.htaccess')
return if path.existsSync(htAccess)
contents = """
# set content type for manifest
AddType text/cache-manifest .manifest
"""
fs.writeFileSync htAccess, contents
utils.logVerbose "created: #{htAccess}"
#---------------------------------------------------------------------------
readConfig: () ->
configFile = path.join(@iDir, 'config.properties')
if !path.existsSync configFile
@emitErrorMessage "a config.properties file was not found in #{@iDir}"
return
# console.log "config: #{JSON.stringify(@config,null,4)}"
Properties.fromFile configFile
#---------------------------------------------------------------------------
emptyDir: (dir) ->
fileSet = FileSet.fromDir(dir)
for file in fileSet.fullFiles()
utils.logVerbose "erased: #{file}"
fs.unlinkSync file
for dir in fileSet.fullDirs().reverse()
utils.logVerbose "rmdir: #{dir}"
fs.rmdirSync dir
#---------------------------------------------------------------------------
emitErrorMessage: (message) ->
@emit 'error', new Error(message)
|
[
{
"context": "= -1\n\t\t\t\tdate_array.push(key)\n\n\t\t\telse if key == 'phinchID'\n\t\t\t\tconsole.log 'PhinchID does exsit!'\n\n\t\t\telse ",
"end": 3627,
"score": 0.9381144642829895,
"start": 3619,
"tag": "KEY",
"value": "phinchID"
},
{
"context": "param) ->\n\n\t\tthat = this\n\... | src/filter.coffee | PitchInteractiveInc/Phinch | 110 | class filter
biom = null
phinch = null
filename = null
attr_length = null
format = d3.format(',d')
date_array = []
no_data_attributes_array = []
unknown_array = []
attributes_array = []
attributes_array_units = []
groupable_array = []
groupable_array_content = []
columns_sample_name_array = [] # All sample names
columns_sample_count_list = [] # Each sample count
columns_non_empty_sample_count = [] # Add up all columns
phinchID_array = []
constructor: () ->
db.open(
server: "BiomData", version: 1,
schema:
"biom": key: keyPath: 'id', autoIncrement: true,
).done (s) =>
@server = s
s.biom.query().all().execute().done (results) =>
currentData = results[results.length-1]
filename = currentData.name
biom = JSON.parse(currentData.data)
phinch = JSON.parse(currentData.data)
# Parse
attr_length = biom.shape[1]-1
@generateColumns()
@generateColumnsSummary()
@generateColumnsValues()
@generateDate()
# Build
$("#file_details").append( "ANALYZING <span>" + filename.substring(0,52) + "</span> " + (parseFloat(currentData.size.valueOf() / 1000000)).toFixed(1) + " MB <br/><br />OBSERVATION <span>" + format(biom.shape[0]) + "</span> SELECTED SAMPLES <span>" + format(biom.shape[1]) + "</span>")
$('#goExport').click( () => @downloadPhinch(0) )
$('#goGallery').click () =>
$('#right_live_panel').html('<i class="icon-spinner icon-spin icon-large icon-4x" style="float:right;"></i>')
setTimeout(() =>
@downloadPhinch(1)
, 200)
@generateLeftDates()
@generateLeftNumeric()
@generateLeftNonNumeric()
@generateLeftGroupable()
# remove the numbers and leave the string values
if groupable_array_content.length > 0
for i in [0..groupable_array_content.length-1]
if typeof groupable_array_content[i] == 'number'
groupable_array_content.splice( groupable_array_content.indexOf(groupable_array_content[i]),1 )
@generateThumbnails()
@livePreview()
# 0 Jump to Gallery
jumpToGallery: () ->
that = this
db.open(
server: "BiomSample", version: 1,
schema:
"biomSample": key: keyPath: 'id', autoIncrement: true,
).done (s) =>
sampleToStore = {}
sampleToStore.name = filename
sampleToStore.type = 'sampleIDs'
sampleToStore.selected_sample = @selected_sample
sampleToStore.groupable = groupable_array
sampleToStore.selected_groupable_array = @selected_groupable_array
sampleToStore.selected_attributes_array = @selected_attributes_array
selected_phinchID_array = [] # last step to store the selected ones
for i in [0..@selected_sample.length-1]
selected_phinchID_array.push(phinchID_array[@selected_sample[i]])
sampleToStore.selected_phinchID_array = selected_phinchID_array
selected_attributes_units_array = @selected_attributes_units_array # store the units in case of changes
if @selected_attributes_units_array.length > 0
for i in [0..@selected_attributes_units_array.length-1]
if $('#unit_' + (i+1) ).val() != @selected_attributes_units_array[i] and $('#unit_' + (i+1) ).val() != ''
selected_attributes_units_array[i] = $('#unit_' + (i+1) ).val()
sampleToStore.selected_attributes_units_array = @selected_attributes_units_array
s.biomSample.add( sampleToStore ).done (item) ->
setTimeout( "window.location.href = 'viz.html'" )
# 1 Parse Data
generateColumns: () ->
for key of biom.columns[0].metadata
if key.toLowerCase().indexOf("date") != -1
date_array.push(key)
else if key == 'phinchID'
console.log 'PhinchID does exsit!'
else if (key.toLowerCase().indexOf("barcode") != -1) || (key.toLowerCase().indexOf("sequence") != -1) || (key.toLowerCase().indexOf("reverse") != -1) || (key.toLowerCase() == "internalcode") || (key.toLowerCase() == "description") || (key.toLowerCase().indexOf("adapter") !=-1)
no_data_attributes_array.push(key)
else if !isNaN(biom.columns[0].metadata[key].split(" ")[0].replace(",","")) || biom.columns[0].metadata[key] == "no_data"
idential_elements_in_array_flag = false
for i in [0..attr_length]
if biom.columns[i].metadata[key] != 'no_data'
idential_elements_in_array = biom.columns[i].metadata[key]
break
for i in [0..attr_length]
if biom.columns[i].metadata[key] != idential_elements_in_array and biom.columns[i].metadata[key] != 'no_data'
idential_elements_in_array_flag = true
unitsFlag = false
if idential_elements_in_array_flag
attributes_array.push(key)
for i in [0..attr_length] # in case 'no_data'
if biom.columns[i].metadata[key] != 'no_data' and unitsFlag is false
attributes_array_units.push(biom.columns[i].metadata[key].split(" ")[1])
unitsFlag = true
else
no_data_attributes_array.push(key)
else if typeof key == 'string'
groupable_array.push(key)
starting_flag = groupable_array_content.length
groupable_array_content.push(starting_flag)
for i in [0..attr_length]
flag = true
if groupable_array_content.length > 0
for j in [(starting_flag+1)..groupable_array_content.length-1]
if biom.columns[i].metadata[key] == groupable_array_content[j]
flag = false
break
if flag
groupable_array_content.push(biom.columns[i].metadata[key])
if groupable_array_content.length - starting_flag == 2
no_data_attributes_array.push(key)
groupable_array.splice(groupable_array.length-1,1)
groupable_array_content.splice(groupable_array_content.length-2, 2)
else
unknown_array.push(key)
generateColumnsSummary: () ->
columns_sample_total_count = 0 # Non empty sample ids, for new phinch file
for i in [0..attr_length]
columns_sample_count_list[i] = 0
columns_sample_name_array.push(biom.columns[i].id)
for i in [0..attr_length]
if biom.columns[i].metadata['phinchID']?
phinchID_array.push(biom.columns[i].metadata['phinchID'])
else
phinchID_array.push(i)
for i in [0..biom.data.length-1]
columns_sample_total_count += biom.data[i][2]
columns_sample_count_list[biom.data[i][1]] += biom.data[i][2]
for i in [0..attr_length]
if columns_sample_count_list[i] > 0
columns_non_empty_sample_count.push(i)
generateColumnsValues: () ->
@columns_metadata_array = [] # All column data values
@columns_metadata_array = new Array(attributes_array.length)
if attributes_array.length > 0
for i in [0..attributes_array.length-1]
@columns_metadata_array[i] = new Array(attr_length+1)
for i in [0..attr_length]
for key of biom.columns[i].metadata
for j in [0..attributes_array.length-1]
if key == attributes_array[j]
@columns_metadata_array[j][i] = parseFloat(biom.columns[i].metadata[key].split(" ")[0].replace(",","")) # in case there is between thousands
if isNaN(@columns_metadata_array[j][i])
@columns_metadata_array[j][i] = -99999
generateDate: () ->
@formatted_date_array = new Array(date_array.length)
@sorted_number_date_array_d = new Array(date_array.length)
@sorted_number_date_array_freq = new Array(date_array.length)
number_date_array = new Array(date_array.length)
if date_array.length > 0
for m in [0..date_array.length-1]
@formatted_date_array[m] = []
@sorted_number_date_array_d[m] = []
@sorted_number_date_array_freq[m] = []
date_meta_key = date_array[m]
number_date_array[m] = []
for i in [0..attr_length]
ori_timestamp = biom.columns[i].metadata[date_meta_key]
if ori_timestamp.length < 11 && ori_timestamp.indexOf(":") == -1 # No Hour Min Sec
@formatted_date_array[m].push(moment(ori_timestamp).format("YYYY-MM-DD"))
number_date_array[m].push(moment(ori_timestamp).format("YYYYMMDD"))
else
@formatted_date_array[m].push(moment(ori_timestamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format())
number_date_array[m].push( moment(ori_timestamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format("YYYYMMDDHHmmss") )
@sorted_number_date_array_d[m] = @sortByFrequency(number_date_array[m])[0]
@sorted_number_date_array_freq[m] = @sortByFrequency(number_date_array[m])[1]
# 2 Build Panels
generateLeftDates: () ->
content = ""
@range_dates_array = []
if date_array.length == 0
$('#att_head_dates').hide()
else
if date_array.length > 0
for m in [0..date_array.length-1]
if @check_unique(@formatted_date_array[m])
$('#dates').append("<div class = 'biom_valid_attr'><p>" + date_array[m] + ": " + @formatted_date_array[m][0] + "</p></div>")
@range_dates_array[m] = new Array(2)
@range_dates_array[m][0] = moment(@formatted_date_array[m][0]).utc().format("X")
@range_dates_array[m][1] = moment(@formatted_date_array[m][0]).utc().format("X")
else
content += "<div class = 'biom_valid_attr_dates'>"
content += date_array[m]
content += "<div class = 'icon-expand-collapse-c' id= 'expend_collapse_dates_icon_" + (m + 1) + "'><i class='icon-expand-alt'></i></div>"
# display smaller dates
if @sorted_number_date_array_d[m][0].length < 9
content += "<p class='range_new_dates' id='range_dates_" + (m+1) + "_new'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").format("MM/DD/YY") + " - " + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").format("MM/DD/YY") + "</p>"
else
content += "<p class='range_new_dates' id='range_dates_" + (m+1) + "_new'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss").format("MM/DD/YY") + " - " + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss").format("MM/DD/YY") + "</p>"
content += "<div style='display: none;' id = 'expend_collapse_dates_" + (m+1) + "'>" + "<div class= 'biom_valid_att_thumbnail_dates' id='thumb_dates_" + (m+1) + "'></div>"
content += "<div class='biom_valid_att_slider' id='slider_dates_" + (m+1) + "'></div>"
if @sorted_number_date_array_d[m][0].length < 9
content += "<div class='range range_left_dates' id='range_dates_" + (m+1) + "_left'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").format("YYYY-MM-DD") + "</div>"
content += "<div class='range range_right_dates' id='range_dates_" + (m+1) + "_right'>" + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").format("YYYY-MM-DD") + "</div>"
min_timestamp = moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").utc().format("X")
max_timestamp = moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").utc().format("X")
else
content += "<div class='range range_left_dates' id='range_dates_" + (m+1) + "_left'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss").format("YYYY-MM-DD<br/>HH:mm:ss") + "</div>"
content += "<div class='range range_right_dates' id='range_dates_" + (m+1) + "_right'>" + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss").format("YYYY-MM-DD<br/>HH:mm:ss") + "</div>"
min_timestamp = moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss Z").utc().format("X")
max_timestamp = moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss Z").utc().format("X")
content += "</div></div>"
$('#dates').append(content)
$('#expend_collapse_dates_icon_' + (m + 1) ).click (event) =>
id = event.currentTarget.id.replace('expend_collapse_dates_icon_','')
if $('#expend_collapse_dates_' + id).attr('style') == 'display: none;'
$('#expend_collapse_dates_' + id).show()
$('#expend_collapse_dates_icon_' + id).html('<i class="icon-collapse-alt"></i>')
else
$('#expend_collapse_dates_' + id).hide()
$('#expend_collapse_dates_icon_' + id).html('<i class="icon-expand-alt"></i>')
@drawBasicBars( '#thumb_dates_' + (m+1), null, @sorted_number_date_array_freq[m], null, [250, 50] )
$('#slider_dates_' + (m+1)).width( $('#thumb_dates_' + (m+1) + ' svg').attr('width') - 2 )
@range_dates_array[m] = new Array(2)
@range_dates_array[m][0] = min_timestamp
@range_dates_array[m][1] = max_timestamp
$( "#slider_dates_" + (m+1)).slider({
range: true,
min: 0,
max: @sorted_number_date_array_freq[m].length-1,
step: 1,
values: [ 0, @sorted_number_date_array_freq[m].length-1 ],
slide: ( event, ui ) =>
id = event.target.id.replace("slider_dates_","")
$("#range_dates_" + id + "_new").text( "[" + moment(@sorted_number_date_array_d[id-1][ui.values[0]], "YYYYMMDD").format("MM/DD/YY") + " — " + moment(@sorted_number_date_array_d[id-1][ui.values[1]], "YYYYMMDD").format("MM/DD/YY") + "]")
if @sorted_number_date_array_d[id-1][ui.values[0]].length < 9
@range_dates_array[id-1][0] = moment(@sorted_number_date_array_d[id-1][ui.values[0]],"YYYYMMDD").utc().format("X")
@range_dates_array[id-1][1] = moment(@sorted_number_date_array_d[id-1][ui.values[1]],"YYYYMMDD").utc().format("X")
else
@range_dates_array[id-1][0] = moment(@sorted_number_date_array_d[id-1][ui.values[0]], "YYYYMMDDHHmmss").utc().format("X")
@range_dates_array[id-1][1] = moment(@sorted_number_date_array_d[id-1][ui.values[1]], "YYYYMMDDHHmmss").utc().format("X")
@livePreview()
})
# find the attributes with numeric values
generateLeftNumeric: () ->
if attributes_array.length == 0
$('#att_head_numeric').hide()
else
if attributes_array.length > 0
for i in [0..attributes_array.length-1]
content = ""
content += "<input type='checkbox' name='numeric_check_group' id='numeric_check_" + (i+1) + "' checked='checked' /><label for='numeric_check_" + (i+1) + "'></label>"
content += "<span class = 'biom_valid_attr' id='att_" + (i+1) + "'>" + attributes_array[i] + "</span>"
if (typeof(attributes_array_units[i]) != 'undefined' && attributes_array_units[i] != null)
content += "<input type='text' class='biom_valid_attr_units' id='unit_" + (i+1) + "' placeholder='" + attributes_array_units[i] + "'>"
else
content += "<input type='text' class='biom_valid_attr_units' id='unit_" + (i+1) + "' placeholder='unit'>"
content += "<div class = 'icon-expand-collapse-c' id= 'expend_collapse_icon_" + (i+1) + "'><i class='icon-expand-alt'></i></div>"
content += "<div class='biom_valid_att_thumbnail_sm' id='thumb_sm_" + (i+1) + "'></div>"
content += "<p class='range range_new' id='range_" + (i+1) + "_new'></p>"
content += "<div style='display: none;' id = 'expend_collapse_" + (i+1) + "'>" + "<div class='biom_valid_att_thumbnail' id='thumb_" + (i+1) + "'></div>"
content += "<div class='biom_valid_att_slider' id='slider_" + (i+1) + "'></div>"
content += "<div class='blackSticks'></div>"
content += "<p class='range range_left' id='range_" + (i+1) + "_left'></p>"
content += "<p class='range range_right' id='range_" + (i+1) + "_right'></p>"
content += "<p class='biom_valid_notes' id='att_note_" + (i+1) + "'></p></div>"
$('#numeric_att').append("<div>" + content + "</div>")
$('#expend_collapse_icon_' + (i+1) ).click (event) =>
id = event.currentTarget.id.replace('expend_collapse_icon_','')
if $('#expend_collapse_' + id).attr('style') == 'display: none;'
$('#expend_collapse_' + id).show()
$('#att_' + id).css('font-weight', 'bold')
$('#unit_' + id).show()
$('#range_' + id + '_new').show()
$('#thumb_sm_' + id).hide()
$('#expend_collapse_icon_' + id).html('<i class="icon-collapse-alt"></i>')
else
$('#expend_collapse_' + id).hide()
$('#att_' + id).css('font-weight', 'normal')
$('#unit_' + id).hide()
$('#range_' + id + '_new').hide()
$('#thumb_sm_' + id).show()
$('#expend_collapse_icon_' + id).html('<i class="icon-expand-alt"></i>')
$('#numeric_check_' + (i+1) ).click () => @livePreview()
# deals with the non-numeric attributes
generateLeftNonNumeric: () ->
if no_data_attributes_array.length == 0
$('#att_head_descriptive').hide()
else
if no_data_attributes_array.length > 0
for i in [0..no_data_attributes_array.length-1]
content = ""
content += "<input type='checkbox' name='non_numeric_check_group' id='non_numeric_check_" + (i+1) + "' /><label for='non_numeric_check_" + (i+1) + "'></label><span class = 'biom_valid_attr'>" + no_data_attributes_array[i] + "</span>"
$('#non_numeric_att').append("<div>" + content + "</div>")
$('#non_numeric_check_' + (i+1)).click () => @livePreview()
# generate the groupable attributes panel
generateLeftGroupable: () ->
pointer_left = 1
pointer_right = groupable_array_content.length-1
check_count = 1
if groupable_array.length == 0
$('#att_head_groupable').hide()
else
if groupable_array.length > 0
for i in [0..groupable_array.length-1]
flag = true
toprocess = []
content = ""
content += "<span class = 'biom_valid_attr'>" + groupable_array[i] + "</span><br/>"
if groupable_array_content.length > 0
for j in [pointer_left..groupable_array_content.length-1]
if groupable_array_content[j] == j
pointer_right = j
flag = false
break
if flag
toprocess = groupable_array_content.slice(pointer_left, groupable_array_content.length)
else
toprocess = groupable_array_content.slice(pointer_left, pointer_right)
pointer_left = pointer_right + 1
pointer_right = groupable_array_content.length-1
if toprocess.length > 0
for k in [0..toprocess.length-1]
content += "<input type='checkbox' name='groupable_check_group' id='groupable_check_" + check_count + "' class='groupable_check' checked='checked' /><label for='groupable_check_" + check_count + "'></label><span class = 'biom_valid_attr_grp'>" + toprocess[k] + "</span><br/>"
check_count++
$('#groupable_att').append("<div class='overflowControl'>" + content + "</div>")
$('.groupable_check').click () => @livePreview()
# generate the thumbnails for users to filter
generateThumbnails: () ->
@range_array = []
@lines_array = new Array(@columns_metadata_array.length)
if @columns_metadata_array.length > 0
step = new Array(@columns_metadata_array.length) # keeps the step value between each bar
for i in [0..@columns_metadata_array.length-1]
nan_values = 0
each_numeric_linechart = @sortByFrequency(@columns_metadata_array[i])
if each_numeric_linechart[0][0] == -99999
nan_values = each_numeric_linechart[1][0]
each_numeric_linechart[0].shift()
each_numeric_linechart[1].shift()
if nan_values > 0
$("#att_note_" + (i+1)).text("* This column has " + nan_values + " empty values.")
@lines_array[i] = new Array(2)
@lines_array[i][0] = each_numeric_linechart[0]
@lines_array[i][1] = each_numeric_linechart[1]
each_numeric_linechart_min = Math.min.apply(Math, each_numeric_linechart[0])
each_numeric_linechart_max = Math.max.apply(Math, each_numeric_linechart[0])
@drawBasicBars( '#thumb_' + (i+1), each_numeric_linechart[0], each_numeric_linechart[1], null, [250, 50] )
@drawBasicBars( '#thumb_sm_' + (i+1), each_numeric_linechart[0], each_numeric_linechart[1], null, [130, 15])
@range_array[i] = new Array(2)
@range_array[i][0] = each_numeric_linechart_min
@range_array[i][1] = each_numeric_linechart_max
step[i] = (each_numeric_linechart_max - each_numeric_linechart_min) / each_numeric_linechart[1].length
$('#slider_' + (i+1)).width( $('#thumb_' + (i+1) + ' svg').attr('width') - 2 )
$( "#slider_" + (i+1)).slider({
range: true,
min: each_numeric_linechart_min,
max: each_numeric_linechart_max,
step: (each_numeric_linechart_max - each_numeric_linechart_min) / each_numeric_linechart[1].length, # step for adjustment, get the min between unit & 1
values: [ each_numeric_linechart_min, each_numeric_linechart_max ],
slide: ( event, ui ) =>
id = event.target.id.replace("slider_","")
if ui.value == ui.values[0]
order = Math.round( (ui.values[ 0 ] - @lines_array[id-1][0][0]) / step[id-1] )
leftValue = @lines_array[id-1][0][order]
@range_array[id-1][0] = leftValue # ui.values[0]
$("#range_" + id + "_left").text( leftValue ).css('margin-left', Math.max( event.clientX - 40, 20) )
$("#range_" + id + "_new").text( "range: [" + leftValue + " — " + @range_array[id-1][1] + "]")
else
order = Math.round( ( ui.values[ 1 ] - @lines_array[id-1][0][0]) / step[id-1] ) - 1
rightValue = @lines_array[id-1][0][order]
@range_array[id-1][1] = rightValue # ui.values[1]
$("#range_" + id + "_right").text( rightValue ).css('margin-left', Math.min( event.clientX - 40, 270) )
$("#range_" + id + "_new").text( "range: [" + @range_array[id-1][0] + " — " + rightValue + "]")
$('#numeric_check_' + id).prop('checked', true)
@drawBasicBars( '#thumb_sm_' + id, @lines_array[id-1][0], @lines_array[id-1][1], @range_array[id-1], [130, 15]) # values - ui.values
@livePreview()
})
$( "#range_" + (i+1) + "_left").text( each_numeric_linechart_min )
$( "#range_" + (i+1) + "_right").text(each_numeric_linechart_max )
$( "#range_" + (i+1) + "_new").text("range: [" + each_numeric_linechart_min + " — " + each_numeric_linechart_max + "]" )
# 3 Live Preview, the right section
livePreview: () ->
@selected_sample = []
@selected_groupable_array = []
@selected_attributes_array = []
@selected_attributes_units_array = []
@selected_no_data_attributes_array = []
selected_range_array = []
if attributes_array.length > 0
for i in [1..attributes_array.length]
if $('#numeric_check_' + i).is(':checked')
@selected_attributes_array.push(attributes_array[i-1])
@selected_attributes_units_array.push(attributes_array_units[i-1])
if no_data_attributes_array.length > 0
for i in [1..no_data_attributes_array.length]
if $('#non_numeric_check_' + i).is(':checked')
@selected_no_data_attributes_array.push(no_data_attributes_array[i-1])
if groupable_array_content.length > 0
for i in [1..groupable_array_content.length]
if $('#groupable_check_' + i).is(':checked')
@selected_groupable_array.push(groupable_array_content[i-1])
if @range_array.length > 0
for i in [1..@range_array.length]
if $('#numeric_check_' + i).is(':checked')
selected_range_array.push(@range_array[i-1])
$('#right_live_panel').html('')
# Step 1
for i in [0..biom.shape[1]-1]
@selected_sample.push(i)
if selected_range_array.length > 0
for i in [0..selected_range_array.length-1]
key = @selected_attributes_array[i]
for r in [0..biom.shape[1]-1]
if biom.columns[r].metadata[key].split(" ")[0] < selected_range_array[i][0] || biom.columns[r].metadata[key].split(" ")[0] > selected_range_array[i][1]
delete_index = @selected_sample.indexOf(r)
if delete_index != -1 then @selected_sample.splice(delete_index,1)
if date_array.length > 0
for i in [0..date_array.length-1]
key = date_array[i]
for r in [0..biom.shape[1]-1]
current_timeStamp = biom.columns[r].metadata[key]
if current_timeStamp.length < 11 # and current_timeStamp.indexOf(":") != -1
formatted_timeStamp = moment(current_timeStamp).utc().format("X")
else
formatted_timeStamp = moment(current_timeStamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format("X")
if formatted_timeStamp < @range_dates_array[i][0] || formatted_timeStamp > @range_dates_array[i][1]
delete_index = @selected_sample.indexOf(r)
if delete_index != -1
@selected_sample.splice(delete_index,1)
# console.log 'sample #' + delete_index + ' doesn't meet date range
# Step 2
if groupable_array.length > 0
for i in [0..groupable_array.length-1]
for k in [0..biom.shape[1]-1]
flag = true
if @selected_groupable_array.length > 0
for r in [0..@selected_groupable_array.length-1]
if biom.columns[k].metadata[ groupable_array[i] ] == @selected_groupable_array[r]
flag = false
break
if flag
delete_index = @selected_sample.indexOf(k)
if delete_index != -1 then @selected_sample.splice(delete_index,1)
else if @selected_groupable_array.length == 0
@selected_sample = []
# Add one more step here: get rid of _empty_sample_count, leave only the valid samples
delete_index = []
if @selected_sample.length > 0
for i in [0..@selected_sample.length-1]
flag = true
if columns_non_empty_sample_count.length > 1
for j in [0..columns_non_empty_sample_count.length-1]
if columns_non_empty_sample_count[j] == @selected_sample[i]
flag = false
break
if flag
delete_index.push(@selected_sample[i])
console.log 'Sample ' + (i+1) + ' has 0 count'
if delete_index.length > 0
for i in [0..delete_index.length-1]
@selected_sample.splice(@selected_sample.indexOf(delete_index[i]), 1)
# Step 3 Now based on the filters, selected sample now contains all the right sample # within that range.
content = "<table id='myTable'><thead><tr><th class = 'headerID myTableHeader'>PHINCH NAME</th><th class = 'headerID myTableHeader'>BIOM SAMPLE ID" + "</th><th class='myTableHeader'>SAMPLE NAME</th><th class='headerCount myTableHeader'>SEQUENCE READS</th></thead>"
if @selected_sample.length > 0
for i in [0..@selected_sample.length-1]
content += '<tr><td contenteditable="true" id="phinchID_' + @selected_sample[i] + '">' + phinchID_array[@selected_sample[i]] + '</td><td>' + (@selected_sample[i] + 1) + '</td><td>' + columns_sample_name_array[@selected_sample[i]] + '</td><td>' + columns_sample_count_list[@selected_sample[i]] + '</td></tr>'
content += "</table>"
$("#right_live_panel").html(content)
$('#myTable').dataTable({
"iDisplayLength": @selected_sample.length, # 50
"aaSorting": [[ 1, "asc" ]],
"oLanguage": {
# "sLengthMenu": "_MENU_ samples per page",
"sLengthMenu": "",
"sZeroRecords": "Nothing found - sorry",
"sInfo": "Showing _TOTAL_ Samples", # _START_ to _END_ of
"sInfoEmpty": "Showing 0 Samples", # 0 to 0 of
"sInfoFiltered": "(filtered from _MAX_ total samples)"
}
})
$('#myTable').on('input', 'td[contenteditable]', @editPhinchID );
$('tr td:first-child').on('mouseover', () -> $(this).addClass('phinchCol') ).on('mouseout', () -> $(this).removeClass('phinchCol') )
console.log 'selected_sample: ' + @selected_sample.length
# 4 Download button
downloadPhinch: (param) ->
that = this
phinch.generated_by = 'Phinch 1.0'
phinch.date = new Date()
# Step 1 - get data matrix ready
phinch_data_matrix = []
sum_rows = new Array(biom.shape[0])
for i in [0..biom.shape[0]-1]
sum_rows[i] = 0
index = 0
for i in [0..biom.data.length-1]
flag = false
for j in [0..@selected_sample.length-1]
if biom.data[i][1] == @selected_sample[j] # is selected
flag = true
break
if flag
phinch_data_matrix[index] = new Array(3)
phinch_data_matrix[index] = [biom.data[i][0], j ,biom.data[i][2]]
sum_rows[biom.data[i][0]] += biom.data[i][2]
index++
phinch.data = phinch_data_matrix
# Step 2 - get columns ready
for i in [0..biom.shape[1]-1]
# If this is a not selected descriptive attribute, delete it
for j in [0..no_data_attributes_array.length-1]
if @selected_no_data_attributes_array.indexOf(no_data_attributes_array[j]) == -1
@removeFromObjectByKey(phinch.columns[i].metadata, no_data_attributes_array[j])
# If this is not a selected attributes, delete it
for k in [0..attributes_array.length-1]
if @selected_attributes_array.indexOf(attributes_array[k]) == -1
@removeFromObjectByKey(phinch.columns[i].metadata, attributes_array[k])
# Add the new phinch Id column back in the file
phinch.columns[i].metadata['phinchID'] = phinchID_array[i]
# Step 2'2 - get rid of the deleted columns & also save the units
tempCol = new Array(@selected_sample.length)
for i in [0..@selected_sample.length-1]
tempCol[i] = phinch.columns[@selected_sample[i]]
if @selected_attributes_units_array.length > 0
for j in [0..@selected_attributes_array.length-1]
if $('#unit_' + (j+1) ).val() != ""
tStr = String(tempCol[i].metadata[@selected_attributes_array[j]]).replace( String(@selected_attributes_units_array[j]), $('#unit_' + (j+1) ).val() )
tempCol[i].metadata[@selected_attributes_array[j]] = tStr
phinch.columns = tempCol
# Step 3 - get rows ready, if sum == 0, get rid of that row
valid_rows_count = 0
for i in [0..sum_rows.length-1]
if parseInt(sum_rows[i]) > 0
valid_rows_count++
else
phinch.rows[i].metadata.taxonomy = ["k__", "p__", "c__", "o__", "f__", "g__", "s__"]
# console.log valid_rows_count # not change the shape[0], cuz otherwise we have to change all the row numbers
# phinch.shape[0] = valid_rows_count
phinch.shape[1] = @selected_sample.length
obj = JSON.stringify(phinch)
blob = new Blob([obj], {type: "text/plain;charset=utf-8"})
biomToStore = {}
biomToStore.name = filename
biomToStore.size = blob.size
biomToStore.data = obj
d = new Date();
biomToStore.date = d.getUTCFullYear() + "-" + (d.getUTCMonth() + 1) + "-" + d.getUTCDate() + "T" + d.getUTCHours() + ":" + d.getUTCMinutes() + ":" + d.getUTCSeconds() + " UTC"
@server.biom.add(biomToStore).done () ->
# Step 4 - stringify
if param == 0 # Download
saveAs(blob, filename)
# Step 5 - jump to gallery
else if param == 1
that.jumpToGallery()
# 5 Utilities & Control Parts
check_unique: (arr) ->
arr = $.grep arr, (v, k) ->
return $.inArray(v ,arr) is k
if arr.length == 1 then return true else return false
sortByFrequency: (arr) ->
a = []
b = []
arr.sort(@numberSort)
for i in [0..arr.length-1]
if arr[i] != prev
a.push(arr[i])
b.push(1)
else
b[b.length-1]++
prev = arr[i]
return [a,b]
numberSort: (a,b) -> return a - b
removeFromObjectByKey: (thisObject, key) -> delete thisObject[key]
editPhinchID: () -> # make the PhinchID column editable
changedID = parseInt( $(this)[0].id.replace('phinchID_','') )
phinchID_array[changedID] = $(this).html()
# draw basic bar chart on the canvas
drawBasicBars: (div, each_numeric_linechart0, each_numeric_linechart1, values, size) =>
d3.select(div + " svg").remove()
max_single = d3.max( each_numeric_linechart1 )
y = d3.scale.linear().domain([0, max_single]).range([1, size[1] ])
eachBarWidth = (size[0] + 2) / each_numeric_linechart1.length - 2
tooltipOverPanel = d3.select(div)
.append("div")
.attr('class', 'basicTooltip')
.style("visibility", "hidden")
tempViz = d3.select(div).append("svg")
.attr("width", size[0] )
.attr("height", size[1] )
tempBar = tempViz.selectAll('rect').data(each_numeric_linechart1)
.enter().append("rect")
.attr('height', (d) -> return y(d) )
.attr('width', Math.max(0.1, eachBarWidth) + 'px')
.attr('x', (d,i) -> return i * (eachBarWidth + 2) )
.attr('y', (d,i) -> return size[1] - y(d) )
.attr('fill', (d,i) ->
if values == null
return '#919396'
else if values != null and each_numeric_linechart0[i] >= values[0] and each_numeric_linechart0[i] <= values[1]
return '#919396'
else
return '#e5e6e7'
)
.on('mouseover', (d,i) ->
tooltipOverPanel.html( 'Value: ' + each_numeric_linechart0[i] + ', Freq: ' + d )
tooltipOverPanel.style( { "visibility": "visible", top: (d3.event.pageY ) + "px", left: (d3.event.pageX + 10) + "px" })
)
.on('mouseout', (d) ->
tooltipOverPanel.style("visibility", "hidden")
)
window.filter = filter
| 115092 | class filter
biom = null
phinch = null
filename = null
attr_length = null
format = d3.format(',d')
date_array = []
no_data_attributes_array = []
unknown_array = []
attributes_array = []
attributes_array_units = []
groupable_array = []
groupable_array_content = []
columns_sample_name_array = [] # All sample names
columns_sample_count_list = [] # Each sample count
columns_non_empty_sample_count = [] # Add up all columns
phinchID_array = []
constructor: () ->
db.open(
server: "BiomData", version: 1,
schema:
"biom": key: keyPath: 'id', autoIncrement: true,
).done (s) =>
@server = s
s.biom.query().all().execute().done (results) =>
currentData = results[results.length-1]
filename = currentData.name
biom = JSON.parse(currentData.data)
phinch = JSON.parse(currentData.data)
# Parse
attr_length = biom.shape[1]-1
@generateColumns()
@generateColumnsSummary()
@generateColumnsValues()
@generateDate()
# Build
$("#file_details").append( "ANALYZING <span>" + filename.substring(0,52) + "</span> " + (parseFloat(currentData.size.valueOf() / 1000000)).toFixed(1) + " MB <br/><br />OBSERVATION <span>" + format(biom.shape[0]) + "</span> SELECTED SAMPLES <span>" + format(biom.shape[1]) + "</span>")
$('#goExport').click( () => @downloadPhinch(0) )
$('#goGallery').click () =>
$('#right_live_panel').html('<i class="icon-spinner icon-spin icon-large icon-4x" style="float:right;"></i>')
setTimeout(() =>
@downloadPhinch(1)
, 200)
@generateLeftDates()
@generateLeftNumeric()
@generateLeftNonNumeric()
@generateLeftGroupable()
# remove the numbers and leave the string values
if groupable_array_content.length > 0
for i in [0..groupable_array_content.length-1]
if typeof groupable_array_content[i] == 'number'
groupable_array_content.splice( groupable_array_content.indexOf(groupable_array_content[i]),1 )
@generateThumbnails()
@livePreview()
# 0 Jump to Gallery
jumpToGallery: () ->
that = this
db.open(
server: "BiomSample", version: 1,
schema:
"biomSample": key: keyPath: 'id', autoIncrement: true,
).done (s) =>
sampleToStore = {}
sampleToStore.name = filename
sampleToStore.type = 'sampleIDs'
sampleToStore.selected_sample = @selected_sample
sampleToStore.groupable = groupable_array
sampleToStore.selected_groupable_array = @selected_groupable_array
sampleToStore.selected_attributes_array = @selected_attributes_array
selected_phinchID_array = [] # last step to store the selected ones
for i in [0..@selected_sample.length-1]
selected_phinchID_array.push(phinchID_array[@selected_sample[i]])
sampleToStore.selected_phinchID_array = selected_phinchID_array
selected_attributes_units_array = @selected_attributes_units_array # store the units in case of changes
if @selected_attributes_units_array.length > 0
for i in [0..@selected_attributes_units_array.length-1]
if $('#unit_' + (i+1) ).val() != @selected_attributes_units_array[i] and $('#unit_' + (i+1) ).val() != ''
selected_attributes_units_array[i] = $('#unit_' + (i+1) ).val()
sampleToStore.selected_attributes_units_array = @selected_attributes_units_array
s.biomSample.add( sampleToStore ).done (item) ->
setTimeout( "window.location.href = 'viz.html'" )
# 1 Parse Data
generateColumns: () ->
for key of biom.columns[0].metadata
if key.toLowerCase().indexOf("date") != -1
date_array.push(key)
else if key == '<KEY>'
console.log 'PhinchID does exsit!'
else if (key.toLowerCase().indexOf("barcode") != -1) || (key.toLowerCase().indexOf("sequence") != -1) || (key.toLowerCase().indexOf("reverse") != -1) || (key.toLowerCase() == "internalcode") || (key.toLowerCase() == "description") || (key.toLowerCase().indexOf("adapter") !=-1)
no_data_attributes_array.push(key)
else if !isNaN(biom.columns[0].metadata[key].split(" ")[0].replace(",","")) || biom.columns[0].metadata[key] == "no_data"
idential_elements_in_array_flag = false
for i in [0..attr_length]
if biom.columns[i].metadata[key] != 'no_data'
idential_elements_in_array = biom.columns[i].metadata[key]
break
for i in [0..attr_length]
if biom.columns[i].metadata[key] != idential_elements_in_array and biom.columns[i].metadata[key] != 'no_data'
idential_elements_in_array_flag = true
unitsFlag = false
if idential_elements_in_array_flag
attributes_array.push(key)
for i in [0..attr_length] # in case 'no_data'
if biom.columns[i].metadata[key] != 'no_data' and unitsFlag is false
attributes_array_units.push(biom.columns[i].metadata[key].split(" ")[1])
unitsFlag = true
else
no_data_attributes_array.push(key)
else if typeof key == 'string'
groupable_array.push(key)
starting_flag = groupable_array_content.length
groupable_array_content.push(starting_flag)
for i in [0..attr_length]
flag = true
if groupable_array_content.length > 0
for j in [(starting_flag+1)..groupable_array_content.length-1]
if biom.columns[i].metadata[key] == groupable_array_content[j]
flag = false
break
if flag
groupable_array_content.push(biom.columns[i].metadata[key])
if groupable_array_content.length - starting_flag == 2
no_data_attributes_array.push(key)
groupable_array.splice(groupable_array.length-1,1)
groupable_array_content.splice(groupable_array_content.length-2, 2)
else
unknown_array.push(key)
generateColumnsSummary: () ->
columns_sample_total_count = 0 # Non empty sample ids, for new phinch file
for i in [0..attr_length]
columns_sample_count_list[i] = 0
columns_sample_name_array.push(biom.columns[i].id)
for i in [0..attr_length]
if biom.columns[i].metadata['phinchID']?
phinchID_array.push(biom.columns[i].metadata['phinchID'])
else
phinchID_array.push(i)
for i in [0..biom.data.length-1]
columns_sample_total_count += biom.data[i][2]
columns_sample_count_list[biom.data[i][1]] += biom.data[i][2]
for i in [0..attr_length]
if columns_sample_count_list[i] > 0
columns_non_empty_sample_count.push(i)
generateColumnsValues: () ->
@columns_metadata_array = [] # All column data values
@columns_metadata_array = new Array(attributes_array.length)
if attributes_array.length > 0
for i in [0..attributes_array.length-1]
@columns_metadata_array[i] = new Array(attr_length+1)
for i in [0..attr_length]
for key of biom.columns[i].metadata
for j in [0..attributes_array.length-1]
if key == attributes_array[j]
@columns_metadata_array[j][i] = parseFloat(biom.columns[i].metadata[key].split(" ")[0].replace(",","")) # in case there is between thousands
if isNaN(@columns_metadata_array[j][i])
@columns_metadata_array[j][i] = -99999
generateDate: () ->
@formatted_date_array = new Array(date_array.length)
@sorted_number_date_array_d = new Array(date_array.length)
@sorted_number_date_array_freq = new Array(date_array.length)
number_date_array = new Array(date_array.length)
if date_array.length > 0
for m in [0..date_array.length-1]
@formatted_date_array[m] = []
@sorted_number_date_array_d[m] = []
@sorted_number_date_array_freq[m] = []
date_meta_key = date_array[m]
number_date_array[m] = []
for i in [0..attr_length]
ori_timestamp = biom.columns[i].metadata[date_meta_key]
if ori_timestamp.length < 11 && ori_timestamp.indexOf(":") == -1 # No Hour Min Sec
@formatted_date_array[m].push(moment(ori_timestamp).format("YYYY-MM-DD"))
number_date_array[m].push(moment(ori_timestamp).format("YYYYMMDD"))
else
@formatted_date_array[m].push(moment(ori_timestamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format())
number_date_array[m].push( moment(ori_timestamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format("YYYYMMDDHHmmss") )
@sorted_number_date_array_d[m] = @sortByFrequency(number_date_array[m])[0]
@sorted_number_date_array_freq[m] = @sortByFrequency(number_date_array[m])[1]
# 2 Build Panels
generateLeftDates: () ->
content = ""
@range_dates_array = []
if date_array.length == 0
$('#att_head_dates').hide()
else
if date_array.length > 0
for m in [0..date_array.length-1]
if @check_unique(@formatted_date_array[m])
$('#dates').append("<div class = 'biom_valid_attr'><p>" + date_array[m] + ": " + @formatted_date_array[m][0] + "</p></div>")
@range_dates_array[m] = new Array(2)
@range_dates_array[m][0] = moment(@formatted_date_array[m][0]).utc().format("X")
@range_dates_array[m][1] = moment(@formatted_date_array[m][0]).utc().format("X")
else
content += "<div class = 'biom_valid_attr_dates'>"
content += date_array[m]
content += "<div class = 'icon-expand-collapse-c' id= 'expend_collapse_dates_icon_" + (m + 1) + "'><i class='icon-expand-alt'></i></div>"
# display smaller dates
if @sorted_number_date_array_d[m][0].length < 9
content += "<p class='range_new_dates' id='range_dates_" + (m+1) + "_new'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").format("MM/DD/YY") + " - " + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").format("MM/DD/YY") + "</p>"
else
content += "<p class='range_new_dates' id='range_dates_" + (m+1) + "_new'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss").format("MM/DD/YY") + " - " + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss").format("MM/DD/YY") + "</p>"
content += "<div style='display: none;' id = 'expend_collapse_dates_" + (m+1) + "'>" + "<div class= 'biom_valid_att_thumbnail_dates' id='thumb_dates_" + (m+1) + "'></div>"
content += "<div class='biom_valid_att_slider' id='slider_dates_" + (m+1) + "'></div>"
if @sorted_number_date_array_d[m][0].length < 9
content += "<div class='range range_left_dates' id='range_dates_" + (m+1) + "_left'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").format("YYYY-MM-DD") + "</div>"
content += "<div class='range range_right_dates' id='range_dates_" + (m+1) + "_right'>" + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").format("YYYY-MM-DD") + "</div>"
min_timestamp = moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").utc().format("X")
max_timestamp = moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").utc().format("X")
else
content += "<div class='range range_left_dates' id='range_dates_" + (m+1) + "_left'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss").format("YYYY-MM-DD<br/>HH:mm:ss") + "</div>"
content += "<div class='range range_right_dates' id='range_dates_" + (m+1) + "_right'>" + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss").format("YYYY-MM-DD<br/>HH:mm:ss") + "</div>"
min_timestamp = moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss Z").utc().format("X")
max_timestamp = moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss Z").utc().format("X")
content += "</div></div>"
$('#dates').append(content)
$('#expend_collapse_dates_icon_' + (m + 1) ).click (event) =>
id = event.currentTarget.id.replace('expend_collapse_dates_icon_','')
if $('#expend_collapse_dates_' + id).attr('style') == 'display: none;'
$('#expend_collapse_dates_' + id).show()
$('#expend_collapse_dates_icon_' + id).html('<i class="icon-collapse-alt"></i>')
else
$('#expend_collapse_dates_' + id).hide()
$('#expend_collapse_dates_icon_' + id).html('<i class="icon-expand-alt"></i>')
@drawBasicBars( '#thumb_dates_' + (m+1), null, @sorted_number_date_array_freq[m], null, [250, 50] )
$('#slider_dates_' + (m+1)).width( $('#thumb_dates_' + (m+1) + ' svg').attr('width') - 2 )
@range_dates_array[m] = new Array(2)
@range_dates_array[m][0] = min_timestamp
@range_dates_array[m][1] = max_timestamp
$( "#slider_dates_" + (m+1)).slider({
range: true,
min: 0,
max: @sorted_number_date_array_freq[m].length-1,
step: 1,
values: [ 0, @sorted_number_date_array_freq[m].length-1 ],
slide: ( event, ui ) =>
id = event.target.id.replace("slider_dates_","")
$("#range_dates_" + id + "_new").text( "[" + moment(@sorted_number_date_array_d[id-1][ui.values[0]], "YYYYMMDD").format("MM/DD/YY") + " — " + moment(@sorted_number_date_array_d[id-1][ui.values[1]], "YYYYMMDD").format("MM/DD/YY") + "]")
if @sorted_number_date_array_d[id-1][ui.values[0]].length < 9
@range_dates_array[id-1][0] = moment(@sorted_number_date_array_d[id-1][ui.values[0]],"YYYYMMDD").utc().format("X")
@range_dates_array[id-1][1] = moment(@sorted_number_date_array_d[id-1][ui.values[1]],"YYYYMMDD").utc().format("X")
else
@range_dates_array[id-1][0] = moment(@sorted_number_date_array_d[id-1][ui.values[0]], "YYYYMMDDHHmmss").utc().format("X")
@range_dates_array[id-1][1] = moment(@sorted_number_date_array_d[id-1][ui.values[1]], "YYYYMMDDHHmmss").utc().format("X")
@livePreview()
})
# find the attributes with numeric values
generateLeftNumeric: () ->
if attributes_array.length == 0
$('#att_head_numeric').hide()
else
if attributes_array.length > 0
for i in [0..attributes_array.length-1]
content = ""
content += "<input type='checkbox' name='numeric_check_group' id='numeric_check_" + (i+1) + "' checked='checked' /><label for='numeric_check_" + (i+1) + "'></label>"
content += "<span class = 'biom_valid_attr' id='att_" + (i+1) + "'>" + attributes_array[i] + "</span>"
if (typeof(attributes_array_units[i]) != 'undefined' && attributes_array_units[i] != null)
content += "<input type='text' class='biom_valid_attr_units' id='unit_" + (i+1) + "' placeholder='" + attributes_array_units[i] + "'>"
else
content += "<input type='text' class='biom_valid_attr_units' id='unit_" + (i+1) + "' placeholder='unit'>"
content += "<div class = 'icon-expand-collapse-c' id= 'expend_collapse_icon_" + (i+1) + "'><i class='icon-expand-alt'></i></div>"
content += "<div class='biom_valid_att_thumbnail_sm' id='thumb_sm_" + (i+1) + "'></div>"
content += "<p class='range range_new' id='range_" + (i+1) + "_new'></p>"
content += "<div style='display: none;' id = 'expend_collapse_" + (i+1) + "'>" + "<div class='biom_valid_att_thumbnail' id='thumb_" + (i+1) + "'></div>"
content += "<div class='biom_valid_att_slider' id='slider_" + (i+1) + "'></div>"
content += "<div class='blackSticks'></div>"
content += "<p class='range range_left' id='range_" + (i+1) + "_left'></p>"
content += "<p class='range range_right' id='range_" + (i+1) + "_right'></p>"
content += "<p class='biom_valid_notes' id='att_note_" + (i+1) + "'></p></div>"
$('#numeric_att').append("<div>" + content + "</div>")
$('#expend_collapse_icon_' + (i+1) ).click (event) =>
id = event.currentTarget.id.replace('expend_collapse_icon_','')
if $('#expend_collapse_' + id).attr('style') == 'display: none;'
$('#expend_collapse_' + id).show()
$('#att_' + id).css('font-weight', 'bold')
$('#unit_' + id).show()
$('#range_' + id + '_new').show()
$('#thumb_sm_' + id).hide()
$('#expend_collapse_icon_' + id).html('<i class="icon-collapse-alt"></i>')
else
$('#expend_collapse_' + id).hide()
$('#att_' + id).css('font-weight', 'normal')
$('#unit_' + id).hide()
$('#range_' + id + '_new').hide()
$('#thumb_sm_' + id).show()
$('#expend_collapse_icon_' + id).html('<i class="icon-expand-alt"></i>')
$('#numeric_check_' + (i+1) ).click () => @livePreview()
# deals with the non-numeric attributes
generateLeftNonNumeric: () ->
if no_data_attributes_array.length == 0
$('#att_head_descriptive').hide()
else
if no_data_attributes_array.length > 0
for i in [0..no_data_attributes_array.length-1]
content = ""
content += "<input type='checkbox' name='non_numeric_check_group' id='non_numeric_check_" + (i+1) + "' /><label for='non_numeric_check_" + (i+1) + "'></label><span class = 'biom_valid_attr'>" + no_data_attributes_array[i] + "</span>"
$('#non_numeric_att').append("<div>" + content + "</div>")
$('#non_numeric_check_' + (i+1)).click () => @livePreview()
# generate the groupable attributes panel
generateLeftGroupable: () ->
pointer_left = 1
pointer_right = groupable_array_content.length-1
check_count = 1
if groupable_array.length == 0
$('#att_head_groupable').hide()
else
if groupable_array.length > 0
for i in [0..groupable_array.length-1]
flag = true
toprocess = []
content = ""
content += "<span class = 'biom_valid_attr'>" + groupable_array[i] + "</span><br/>"
if groupable_array_content.length > 0
for j in [pointer_left..groupable_array_content.length-1]
if groupable_array_content[j] == j
pointer_right = j
flag = false
break
if flag
toprocess = groupable_array_content.slice(pointer_left, groupable_array_content.length)
else
toprocess = groupable_array_content.slice(pointer_left, pointer_right)
pointer_left = pointer_right + 1
pointer_right = groupable_array_content.length-1
if toprocess.length > 0
for k in [0..toprocess.length-1]
content += "<input type='checkbox' name='groupable_check_group' id='groupable_check_" + check_count + "' class='groupable_check' checked='checked' /><label for='groupable_check_" + check_count + "'></label><span class = 'biom_valid_attr_grp'>" + toprocess[k] + "</span><br/>"
check_count++
$('#groupable_att').append("<div class='overflowControl'>" + content + "</div>")
$('.groupable_check').click () => @livePreview()
# generate the thumbnails for users to filter
generateThumbnails: () ->
@range_array = []
@lines_array = new Array(@columns_metadata_array.length)
if @columns_metadata_array.length > 0
step = new Array(@columns_metadata_array.length) # keeps the step value between each bar
for i in [0..@columns_metadata_array.length-1]
nan_values = 0
each_numeric_linechart = @sortByFrequency(@columns_metadata_array[i])
if each_numeric_linechart[0][0] == -99999
nan_values = each_numeric_linechart[1][0]
each_numeric_linechart[0].shift()
each_numeric_linechart[1].shift()
if nan_values > 0
$("#att_note_" + (i+1)).text("* This column has " + nan_values + " empty values.")
@lines_array[i] = new Array(2)
@lines_array[i][0] = each_numeric_linechart[0]
@lines_array[i][1] = each_numeric_linechart[1]
each_numeric_linechart_min = Math.min.apply(Math, each_numeric_linechart[0])
each_numeric_linechart_max = Math.max.apply(Math, each_numeric_linechart[0])
@drawBasicBars( '#thumb_' + (i+1), each_numeric_linechart[0], each_numeric_linechart[1], null, [250, 50] )
@drawBasicBars( '#thumb_sm_' + (i+1), each_numeric_linechart[0], each_numeric_linechart[1], null, [130, 15])
@range_array[i] = new Array(2)
@range_array[i][0] = each_numeric_linechart_min
@range_array[i][1] = each_numeric_linechart_max
step[i] = (each_numeric_linechart_max - each_numeric_linechart_min) / each_numeric_linechart[1].length
$('#slider_' + (i+1)).width( $('#thumb_' + (i+1) + ' svg').attr('width') - 2 )
$( "#slider_" + (i+1)).slider({
range: true,
min: each_numeric_linechart_min,
max: each_numeric_linechart_max,
step: (each_numeric_linechart_max - each_numeric_linechart_min) / each_numeric_linechart[1].length, # step for adjustment, get the min between unit & 1
values: [ each_numeric_linechart_min, each_numeric_linechart_max ],
slide: ( event, ui ) =>
id = event.target.id.replace("slider_","")
if ui.value == ui.values[0]
order = Math.round( (ui.values[ 0 ] - @lines_array[id-1][0][0]) / step[id-1] )
leftValue = @lines_array[id-1][0][order]
@range_array[id-1][0] = leftValue # ui.values[0]
$("#range_" + id + "_left").text( leftValue ).css('margin-left', Math.max( event.clientX - 40, 20) )
$("#range_" + id + "_new").text( "range: [" + leftValue + " — " + @range_array[id-1][1] + "]")
else
order = Math.round( ( ui.values[ 1 ] - @lines_array[id-1][0][0]) / step[id-1] ) - 1
rightValue = @lines_array[id-1][0][order]
@range_array[id-1][1] = rightValue # ui.values[1]
$("#range_" + id + "_right").text( rightValue ).css('margin-left', Math.min( event.clientX - 40, 270) )
$("#range_" + id + "_new").text( "range: [" + @range_array[id-1][0] + " — " + rightValue + "]")
$('#numeric_check_' + id).prop('checked', true)
@drawBasicBars( '#thumb_sm_' + id, @lines_array[id-1][0], @lines_array[id-1][1], @range_array[id-1], [130, 15]) # values - ui.values
@livePreview()
})
$( "#range_" + (i+1) + "_left").text( each_numeric_linechart_min )
$( "#range_" + (i+1) + "_right").text(each_numeric_linechart_max )
$( "#range_" + (i+1) + "_new").text("range: [" + each_numeric_linechart_min + " — " + each_numeric_linechart_max + "]" )
# 3 Live Preview, the right section
livePreview: () ->
@selected_sample = []
@selected_groupable_array = []
@selected_attributes_array = []
@selected_attributes_units_array = []
@selected_no_data_attributes_array = []
selected_range_array = []
if attributes_array.length > 0
for i in [1..attributes_array.length]
if $('#numeric_check_' + i).is(':checked')
@selected_attributes_array.push(attributes_array[i-1])
@selected_attributes_units_array.push(attributes_array_units[i-1])
if no_data_attributes_array.length > 0
for i in [1..no_data_attributes_array.length]
if $('#non_numeric_check_' + i).is(':checked')
@selected_no_data_attributes_array.push(no_data_attributes_array[i-1])
if groupable_array_content.length > 0
for i in [1..groupable_array_content.length]
if $('#groupable_check_' + i).is(':checked')
@selected_groupable_array.push(groupable_array_content[i-1])
if @range_array.length > 0
for i in [1..@range_array.length]
if $('#numeric_check_' + i).is(':checked')
selected_range_array.push(@range_array[i-1])
$('#right_live_panel').html('')
# Step 1
for i in [0..biom.shape[1]-1]
@selected_sample.push(i)
if selected_range_array.length > 0
for i in [0..selected_range_array.length-1]
key = @selected_attributes_array[i]
for r in [0..biom.shape[1]-1]
if biom.columns[r].metadata[key].split(" ")[0] < selected_range_array[i][0] || biom.columns[r].metadata[key].split(" ")[0] > selected_range_array[i][1]
delete_index = @selected_sample.indexOf(r)
if delete_index != -1 then @selected_sample.splice(delete_index,1)
if date_array.length > 0
for i in [0..date_array.length-1]
key = date_array[i]
for r in [0..biom.shape[1]-1]
current_timeStamp = biom.columns[r].metadata[key]
if current_timeStamp.length < 11 # and current_timeStamp.indexOf(":") != -1
formatted_timeStamp = moment(current_timeStamp).utc().format("X")
else
formatted_timeStamp = moment(current_timeStamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format("X")
if formatted_timeStamp < @range_dates_array[i][0] || formatted_timeStamp > @range_dates_array[i][1]
delete_index = @selected_sample.indexOf(r)
if delete_index != -1
@selected_sample.splice(delete_index,1)
# console.log 'sample #' + delete_index + ' doesn't meet date range
# Step 2
if groupable_array.length > 0
for i in [0..groupable_array.length-1]
for k in [0..biom.shape[1]-1]
flag = true
if @selected_groupable_array.length > 0
for r in [0..@selected_groupable_array.length-1]
if biom.columns[k].metadata[ groupable_array[i] ] == @selected_groupable_array[r]
flag = false
break
if flag
delete_index = @selected_sample.indexOf(k)
if delete_index != -1 then @selected_sample.splice(delete_index,1)
else if @selected_groupable_array.length == 0
@selected_sample = []
# Add one more step here: get rid of _empty_sample_count, leave only the valid samples
delete_index = []
if @selected_sample.length > 0
for i in [0..@selected_sample.length-1]
flag = true
if columns_non_empty_sample_count.length > 1
for j in [0..columns_non_empty_sample_count.length-1]
if columns_non_empty_sample_count[j] == @selected_sample[i]
flag = false
break
if flag
delete_index.push(@selected_sample[i])
console.log 'Sample ' + (i+1) + ' has 0 count'
if delete_index.length > 0
for i in [0..delete_index.length-1]
@selected_sample.splice(@selected_sample.indexOf(delete_index[i]), 1)
# Step 3 Now based on the filters, selected sample now contains all the right sample # within that range.
content = "<table id='myTable'><thead><tr><th class = 'headerID myTableHeader'>PHINCH NAME</th><th class = 'headerID myTableHeader'>BIOM SAMPLE ID" + "</th><th class='myTableHeader'>SAMPLE NAME</th><th class='headerCount myTableHeader'>SEQUENCE READS</th></thead>"
if @selected_sample.length > 0
for i in [0..@selected_sample.length-1]
content += '<tr><td contenteditable="true" id="phinchID_' + @selected_sample[i] + '">' + phinchID_array[@selected_sample[i]] + '</td><td>' + (@selected_sample[i] + 1) + '</td><td>' + columns_sample_name_array[@selected_sample[i]] + '</td><td>' + columns_sample_count_list[@selected_sample[i]] + '</td></tr>'
content += "</table>"
$("#right_live_panel").html(content)
$('#myTable').dataTable({
"iDisplayLength": @selected_sample.length, # 50
"aaSorting": [[ 1, "asc" ]],
"oLanguage": {
# "sLengthMenu": "_MENU_ samples per page",
"sLengthMenu": "",
"sZeroRecords": "Nothing found - sorry",
"sInfo": "Showing _TOTAL_ Samples", # _START_ to _END_ of
"sInfoEmpty": "Showing 0 Samples", # 0 to 0 of
"sInfoFiltered": "(filtered from _MAX_ total samples)"
}
})
$('#myTable').on('input', 'td[contenteditable]', @editPhinchID );
$('tr td:first-child').on('mouseover', () -> $(this).addClass('phinchCol') ).on('mouseout', () -> $(this).removeClass('phinchCol') )
console.log 'selected_sample: ' + @selected_sample.length
# 4 Download button
downloadPhinch: (param) ->
that = this
phinch.generated_by = '<NAME>'
phinch.date = new Date()
# Step 1 - get data matrix ready
phinch_data_matrix = []
sum_rows = new Array(biom.shape[0])
for i in [0..biom.shape[0]-1]
sum_rows[i] = 0
index = 0
for i in [0..biom.data.length-1]
flag = false
for j in [0..@selected_sample.length-1]
if biom.data[i][1] == @selected_sample[j] # is selected
flag = true
break
if flag
phinch_data_matrix[index] = new Array(3)
phinch_data_matrix[index] = [biom.data[i][0], j ,biom.data[i][2]]
sum_rows[biom.data[i][0]] += biom.data[i][2]
index++
phinch.data = phinch_data_matrix
# Step 2 - get columns ready
for i in [0..biom.shape[1]-1]
# If this is a not selected descriptive attribute, delete it
for j in [0..no_data_attributes_array.length-1]
if @selected_no_data_attributes_array.indexOf(no_data_attributes_array[j]) == -1
@removeFromObjectByKey(phinch.columns[i].metadata, no_data_attributes_array[j])
# If this is not a selected attributes, delete it
for k in [0..attributes_array.length-1]
if @selected_attributes_array.indexOf(attributes_array[k]) == -1
@removeFromObjectByKey(phinch.columns[i].metadata, attributes_array[k])
# Add the new phinch Id column back in the file
phinch.columns[i].metadata['phinchID'] = phinchID_array[i]
# Step 2'2 - get rid of the deleted columns & also save the units
tempCol = new Array(@selected_sample.length)
for i in [0..@selected_sample.length-1]
tempCol[i] = phinch.columns[@selected_sample[i]]
if @selected_attributes_units_array.length > 0
for j in [0..@selected_attributes_array.length-1]
if $('#unit_' + (j+1) ).val() != ""
tStr = String(tempCol[i].metadata[@selected_attributes_array[j]]).replace( String(@selected_attributes_units_array[j]), $('#unit_' + (j+1) ).val() )
tempCol[i].metadata[@selected_attributes_array[j]] = tStr
phinch.columns = tempCol
# Step 3 - get rows ready, if sum == 0, get rid of that row
valid_rows_count = 0
for i in [0..sum_rows.length-1]
if parseInt(sum_rows[i]) > 0
valid_rows_count++
else
phinch.rows[i].metadata.taxonomy = ["k__", "p__", "c__", "o__", "f__", "g__", "s__"]
# console.log valid_rows_count # not change the shape[0], cuz otherwise we have to change all the row numbers
# phinch.shape[0] = valid_rows_count
phinch.shape[1] = @selected_sample.length
obj = JSON.stringify(phinch)
blob = new Blob([obj], {type: "text/plain;charset=utf-8"})
biomToStore = {}
biomToStore.name = filename
biomToStore.size = blob.size
biomToStore.data = obj
d = new Date();
biomToStore.date = d.getUTCFullYear() + "-" + (d.getUTCMonth() + 1) + "-" + d.getUTCDate() + "T" + d.getUTCHours() + ":" + d.getUTCMinutes() + ":" + d.getUTCSeconds() + " UTC"
@server.biom.add(biomToStore).done () ->
# Step 4 - stringify
if param == 0 # Download
saveAs(blob, filename)
# Step 5 - jump to gallery
else if param == 1
that.jumpToGallery()
# 5 Utilities & Control Parts
check_unique: (arr) ->
arr = $.grep arr, (v, k) ->
return $.inArray(v ,arr) is k
if arr.length == 1 then return true else return false
sortByFrequency: (arr) ->
a = []
b = []
arr.sort(@numberSort)
for i in [0..arr.length-1]
if arr[i] != prev
a.push(arr[i])
b.push(1)
else
b[b.length-1]++
prev = arr[i]
return [a,b]
numberSort: (a,b) -> return a - b
removeFromObjectByKey: (thisObject, key) -> delete thisObject[key]
editPhinchID: () -> # make the PhinchID column editable
changedID = parseInt( $(this)[0].id.replace('phinchID_','') )
phinchID_array[changedID] = $(this).html()
# draw basic bar chart on the canvas
drawBasicBars: (div, each_numeric_linechart0, each_numeric_linechart1, values, size) =>
d3.select(div + " svg").remove()
max_single = d3.max( each_numeric_linechart1 )
y = d3.scale.linear().domain([0, max_single]).range([1, size[1] ])
eachBarWidth = (size[0] + 2) / each_numeric_linechart1.length - 2
tooltipOverPanel = d3.select(div)
.append("div")
.attr('class', 'basicTooltip')
.style("visibility", "hidden")
tempViz = d3.select(div).append("svg")
.attr("width", size[0] )
.attr("height", size[1] )
tempBar = tempViz.selectAll('rect').data(each_numeric_linechart1)
.enter().append("rect")
.attr('height', (d) -> return y(d) )
.attr('width', Math.max(0.1, eachBarWidth) + 'px')
.attr('x', (d,i) -> return i * (eachBarWidth + 2) )
.attr('y', (d,i) -> return size[1] - y(d) )
.attr('fill', (d,i) ->
if values == null
return '#919396'
else if values != null and each_numeric_linechart0[i] >= values[0] and each_numeric_linechart0[i] <= values[1]
return '#919396'
else
return '#e5e6e7'
)
.on('mouseover', (d,i) ->
tooltipOverPanel.html( 'Value: ' + each_numeric_linechart0[i] + ', Freq: ' + d )
tooltipOverPanel.style( { "visibility": "visible", top: (d3.event.pageY ) + "px", left: (d3.event.pageX + 10) + "px" })
)
.on('mouseout', (d) ->
tooltipOverPanel.style("visibility", "hidden")
)
window.filter = filter
| true | class filter
biom = null
phinch = null
filename = null
attr_length = null
format = d3.format(',d')
date_array = []
no_data_attributes_array = []
unknown_array = []
attributes_array = []
attributes_array_units = []
groupable_array = []
groupable_array_content = []
columns_sample_name_array = [] # All sample names
columns_sample_count_list = [] # Each sample count
columns_non_empty_sample_count = [] # Add up all columns
phinchID_array = []
constructor: () ->
db.open(
server: "BiomData", version: 1,
schema:
"biom": key: keyPath: 'id', autoIncrement: true,
).done (s) =>
@server = s
s.biom.query().all().execute().done (results) =>
currentData = results[results.length-1]
filename = currentData.name
biom = JSON.parse(currentData.data)
phinch = JSON.parse(currentData.data)
# Parse
attr_length = biom.shape[1]-1
@generateColumns()
@generateColumnsSummary()
@generateColumnsValues()
@generateDate()
# Build
$("#file_details").append( "ANALYZING <span>" + filename.substring(0,52) + "</span> " + (parseFloat(currentData.size.valueOf() / 1000000)).toFixed(1) + " MB <br/><br />OBSERVATION <span>" + format(biom.shape[0]) + "</span> SELECTED SAMPLES <span>" + format(biom.shape[1]) + "</span>")
$('#goExport').click( () => @downloadPhinch(0) )
$('#goGallery').click () =>
$('#right_live_panel').html('<i class="icon-spinner icon-spin icon-large icon-4x" style="float:right;"></i>')
setTimeout(() =>
@downloadPhinch(1)
, 200)
@generateLeftDates()
@generateLeftNumeric()
@generateLeftNonNumeric()
@generateLeftGroupable()
# remove the numbers and leave the string values
if groupable_array_content.length > 0
for i in [0..groupable_array_content.length-1]
if typeof groupable_array_content[i] == 'number'
groupable_array_content.splice( groupable_array_content.indexOf(groupable_array_content[i]),1 )
@generateThumbnails()
@livePreview()
# 0 Jump to Gallery
jumpToGallery: () ->
that = this
db.open(
server: "BiomSample", version: 1,
schema:
"biomSample": key: keyPath: 'id', autoIncrement: true,
).done (s) =>
sampleToStore = {}
sampleToStore.name = filename
sampleToStore.type = 'sampleIDs'
sampleToStore.selected_sample = @selected_sample
sampleToStore.groupable = groupable_array
sampleToStore.selected_groupable_array = @selected_groupable_array
sampleToStore.selected_attributes_array = @selected_attributes_array
selected_phinchID_array = [] # last step to store the selected ones
for i in [0..@selected_sample.length-1]
selected_phinchID_array.push(phinchID_array[@selected_sample[i]])
sampleToStore.selected_phinchID_array = selected_phinchID_array
selected_attributes_units_array = @selected_attributes_units_array # store the units in case of changes
if @selected_attributes_units_array.length > 0
for i in [0..@selected_attributes_units_array.length-1]
if $('#unit_' + (i+1) ).val() != @selected_attributes_units_array[i] and $('#unit_' + (i+1) ).val() != ''
selected_attributes_units_array[i] = $('#unit_' + (i+1) ).val()
sampleToStore.selected_attributes_units_array = @selected_attributes_units_array
s.biomSample.add( sampleToStore ).done (item) ->
setTimeout( "window.location.href = 'viz.html'" )
# 1 Parse Data
generateColumns: () ->
for key of biom.columns[0].metadata
if key.toLowerCase().indexOf("date") != -1
date_array.push(key)
else if key == 'PI:KEY:<KEY>END_PI'
console.log 'PhinchID does exsit!'
else if (key.toLowerCase().indexOf("barcode") != -1) || (key.toLowerCase().indexOf("sequence") != -1) || (key.toLowerCase().indexOf("reverse") != -1) || (key.toLowerCase() == "internalcode") || (key.toLowerCase() == "description") || (key.toLowerCase().indexOf("adapter") !=-1)
no_data_attributes_array.push(key)
else if !isNaN(biom.columns[0].metadata[key].split(" ")[0].replace(",","")) || biom.columns[0].metadata[key] == "no_data"
idential_elements_in_array_flag = false
for i in [0..attr_length]
if biom.columns[i].metadata[key] != 'no_data'
idential_elements_in_array = biom.columns[i].metadata[key]
break
for i in [0..attr_length]
if biom.columns[i].metadata[key] != idential_elements_in_array and biom.columns[i].metadata[key] != 'no_data'
idential_elements_in_array_flag = true
unitsFlag = false
if idential_elements_in_array_flag
attributes_array.push(key)
for i in [0..attr_length] # in case 'no_data'
if biom.columns[i].metadata[key] != 'no_data' and unitsFlag is false
attributes_array_units.push(biom.columns[i].metadata[key].split(" ")[1])
unitsFlag = true
else
no_data_attributes_array.push(key)
else if typeof key == 'string'
groupable_array.push(key)
starting_flag = groupable_array_content.length
groupable_array_content.push(starting_flag)
for i in [0..attr_length]
flag = true
if groupable_array_content.length > 0
for j in [(starting_flag+1)..groupable_array_content.length-1]
if biom.columns[i].metadata[key] == groupable_array_content[j]
flag = false
break
if flag
groupable_array_content.push(biom.columns[i].metadata[key])
if groupable_array_content.length - starting_flag == 2
no_data_attributes_array.push(key)
groupable_array.splice(groupable_array.length-1,1)
groupable_array_content.splice(groupable_array_content.length-2, 2)
else
unknown_array.push(key)
generateColumnsSummary: () ->
columns_sample_total_count = 0 # Non empty sample ids, for new phinch file
for i in [0..attr_length]
columns_sample_count_list[i] = 0
columns_sample_name_array.push(biom.columns[i].id)
for i in [0..attr_length]
if biom.columns[i].metadata['phinchID']?
phinchID_array.push(biom.columns[i].metadata['phinchID'])
else
phinchID_array.push(i)
for i in [0..biom.data.length-1]
columns_sample_total_count += biom.data[i][2]
columns_sample_count_list[biom.data[i][1]] += biom.data[i][2]
for i in [0..attr_length]
if columns_sample_count_list[i] > 0
columns_non_empty_sample_count.push(i)
generateColumnsValues: () ->
@columns_metadata_array = [] # All column data values
@columns_metadata_array = new Array(attributes_array.length)
if attributes_array.length > 0
for i in [0..attributes_array.length-1]
@columns_metadata_array[i] = new Array(attr_length+1)
for i in [0..attr_length]
for key of biom.columns[i].metadata
for j in [0..attributes_array.length-1]
if key == attributes_array[j]
@columns_metadata_array[j][i] = parseFloat(biom.columns[i].metadata[key].split(" ")[0].replace(",","")) # in case there is between thousands
if isNaN(@columns_metadata_array[j][i])
@columns_metadata_array[j][i] = -99999
generateDate: () ->
@formatted_date_array = new Array(date_array.length)
@sorted_number_date_array_d = new Array(date_array.length)
@sorted_number_date_array_freq = new Array(date_array.length)
number_date_array = new Array(date_array.length)
if date_array.length > 0
for m in [0..date_array.length-1]
@formatted_date_array[m] = []
@sorted_number_date_array_d[m] = []
@sorted_number_date_array_freq[m] = []
date_meta_key = date_array[m]
number_date_array[m] = []
for i in [0..attr_length]
ori_timestamp = biom.columns[i].metadata[date_meta_key]
if ori_timestamp.length < 11 && ori_timestamp.indexOf(":") == -1 # No Hour Min Sec
@formatted_date_array[m].push(moment(ori_timestamp).format("YYYY-MM-DD"))
number_date_array[m].push(moment(ori_timestamp).format("YYYYMMDD"))
else
@formatted_date_array[m].push(moment(ori_timestamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format())
number_date_array[m].push( moment(ori_timestamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format("YYYYMMDDHHmmss") )
@sorted_number_date_array_d[m] = @sortByFrequency(number_date_array[m])[0]
@sorted_number_date_array_freq[m] = @sortByFrequency(number_date_array[m])[1]
# 2 Build Panels
generateLeftDates: () ->
content = ""
@range_dates_array = []
if date_array.length == 0
$('#att_head_dates').hide()
else
if date_array.length > 0
for m in [0..date_array.length-1]
if @check_unique(@formatted_date_array[m])
$('#dates').append("<div class = 'biom_valid_attr'><p>" + date_array[m] + ": " + @formatted_date_array[m][0] + "</p></div>")
@range_dates_array[m] = new Array(2)
@range_dates_array[m][0] = moment(@formatted_date_array[m][0]).utc().format("X")
@range_dates_array[m][1] = moment(@formatted_date_array[m][0]).utc().format("X")
else
content += "<div class = 'biom_valid_attr_dates'>"
content += date_array[m]
content += "<div class = 'icon-expand-collapse-c' id= 'expend_collapse_dates_icon_" + (m + 1) + "'><i class='icon-expand-alt'></i></div>"
# display smaller dates
if @sorted_number_date_array_d[m][0].length < 9
content += "<p class='range_new_dates' id='range_dates_" + (m+1) + "_new'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").format("MM/DD/YY") + " - " + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").format("MM/DD/YY") + "</p>"
else
content += "<p class='range_new_dates' id='range_dates_" + (m+1) + "_new'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss").format("MM/DD/YY") + " - " + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss").format("MM/DD/YY") + "</p>"
content += "<div style='display: none;' id = 'expend_collapse_dates_" + (m+1) + "'>" + "<div class= 'biom_valid_att_thumbnail_dates' id='thumb_dates_" + (m+1) + "'></div>"
content += "<div class='biom_valid_att_slider' id='slider_dates_" + (m+1) + "'></div>"
if @sorted_number_date_array_d[m][0].length < 9
content += "<div class='range range_left_dates' id='range_dates_" + (m+1) + "_left'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").format("YYYY-MM-DD") + "</div>"
content += "<div class='range range_right_dates' id='range_dates_" + (m+1) + "_right'>" + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").format("YYYY-MM-DD") + "</div>"
min_timestamp = moment(@sorted_number_date_array_d[m][0], "YYYYMMDD").utc().format("X")
max_timestamp = moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDD").utc().format("X")
else
content += "<div class='range range_left_dates' id='range_dates_" + (m+1) + "_left'>" + moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss").format("YYYY-MM-DD<br/>HH:mm:ss") + "</div>"
content += "<div class='range range_right_dates' id='range_dates_" + (m+1) + "_right'>" + moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss").format("YYYY-MM-DD<br/>HH:mm:ss") + "</div>"
min_timestamp = moment(@sorted_number_date_array_d[m][0], "YYYYMMDDHHmmss Z").utc().format("X")
max_timestamp = moment(@sorted_number_date_array_d[m][@sorted_number_date_array_d[m].length-1], "YYYYMMDDHHmmss Z").utc().format("X")
content += "</div></div>"
$('#dates').append(content)
$('#expend_collapse_dates_icon_' + (m + 1) ).click (event) =>
id = event.currentTarget.id.replace('expend_collapse_dates_icon_','')
if $('#expend_collapse_dates_' + id).attr('style') == 'display: none;'
$('#expend_collapse_dates_' + id).show()
$('#expend_collapse_dates_icon_' + id).html('<i class="icon-collapse-alt"></i>')
else
$('#expend_collapse_dates_' + id).hide()
$('#expend_collapse_dates_icon_' + id).html('<i class="icon-expand-alt"></i>')
@drawBasicBars( '#thumb_dates_' + (m+1), null, @sorted_number_date_array_freq[m], null, [250, 50] )
$('#slider_dates_' + (m+1)).width( $('#thumb_dates_' + (m+1) + ' svg').attr('width') - 2 )
@range_dates_array[m] = new Array(2)
@range_dates_array[m][0] = min_timestamp
@range_dates_array[m][1] = max_timestamp
$( "#slider_dates_" + (m+1)).slider({
range: true,
min: 0,
max: @sorted_number_date_array_freq[m].length-1,
step: 1,
values: [ 0, @sorted_number_date_array_freq[m].length-1 ],
slide: ( event, ui ) =>
id = event.target.id.replace("slider_dates_","")
$("#range_dates_" + id + "_new").text( "[" + moment(@sorted_number_date_array_d[id-1][ui.values[0]], "YYYYMMDD").format("MM/DD/YY") + " — " + moment(@sorted_number_date_array_d[id-1][ui.values[1]], "YYYYMMDD").format("MM/DD/YY") + "]")
if @sorted_number_date_array_d[id-1][ui.values[0]].length < 9
@range_dates_array[id-1][0] = moment(@sorted_number_date_array_d[id-1][ui.values[0]],"YYYYMMDD").utc().format("X")
@range_dates_array[id-1][1] = moment(@sorted_number_date_array_d[id-1][ui.values[1]],"YYYYMMDD").utc().format("X")
else
@range_dates_array[id-1][0] = moment(@sorted_number_date_array_d[id-1][ui.values[0]], "YYYYMMDDHHmmss").utc().format("X")
@range_dates_array[id-1][1] = moment(@sorted_number_date_array_d[id-1][ui.values[1]], "YYYYMMDDHHmmss").utc().format("X")
@livePreview()
})
# find the attributes with numeric values
generateLeftNumeric: () ->
if attributes_array.length == 0
$('#att_head_numeric').hide()
else
if attributes_array.length > 0
for i in [0..attributes_array.length-1]
content = ""
content += "<input type='checkbox' name='numeric_check_group' id='numeric_check_" + (i+1) + "' checked='checked' /><label for='numeric_check_" + (i+1) + "'></label>"
content += "<span class = 'biom_valid_attr' id='att_" + (i+1) + "'>" + attributes_array[i] + "</span>"
if (typeof(attributes_array_units[i]) != 'undefined' && attributes_array_units[i] != null)
content += "<input type='text' class='biom_valid_attr_units' id='unit_" + (i+1) + "' placeholder='" + attributes_array_units[i] + "'>"
else
content += "<input type='text' class='biom_valid_attr_units' id='unit_" + (i+1) + "' placeholder='unit'>"
content += "<div class = 'icon-expand-collapse-c' id= 'expend_collapse_icon_" + (i+1) + "'><i class='icon-expand-alt'></i></div>"
content += "<div class='biom_valid_att_thumbnail_sm' id='thumb_sm_" + (i+1) + "'></div>"
content += "<p class='range range_new' id='range_" + (i+1) + "_new'></p>"
content += "<div style='display: none;' id = 'expend_collapse_" + (i+1) + "'>" + "<div class='biom_valid_att_thumbnail' id='thumb_" + (i+1) + "'></div>"
content += "<div class='biom_valid_att_slider' id='slider_" + (i+1) + "'></div>"
content += "<div class='blackSticks'></div>"
content += "<p class='range range_left' id='range_" + (i+1) + "_left'></p>"
content += "<p class='range range_right' id='range_" + (i+1) + "_right'></p>"
content += "<p class='biom_valid_notes' id='att_note_" + (i+1) + "'></p></div>"
$('#numeric_att').append("<div>" + content + "</div>")
$('#expend_collapse_icon_' + (i+1) ).click (event) =>
id = event.currentTarget.id.replace('expend_collapse_icon_','')
if $('#expend_collapse_' + id).attr('style') == 'display: none;'
$('#expend_collapse_' + id).show()
$('#att_' + id).css('font-weight', 'bold')
$('#unit_' + id).show()
$('#range_' + id + '_new').show()
$('#thumb_sm_' + id).hide()
$('#expend_collapse_icon_' + id).html('<i class="icon-collapse-alt"></i>')
else
$('#expend_collapse_' + id).hide()
$('#att_' + id).css('font-weight', 'normal')
$('#unit_' + id).hide()
$('#range_' + id + '_new').hide()
$('#thumb_sm_' + id).show()
$('#expend_collapse_icon_' + id).html('<i class="icon-expand-alt"></i>')
$('#numeric_check_' + (i+1) ).click () => @livePreview()
# deals with the non-numeric attributes
generateLeftNonNumeric: () ->
if no_data_attributes_array.length == 0
$('#att_head_descriptive').hide()
else
if no_data_attributes_array.length > 0
for i in [0..no_data_attributes_array.length-1]
content = ""
content += "<input type='checkbox' name='non_numeric_check_group' id='non_numeric_check_" + (i+1) + "' /><label for='non_numeric_check_" + (i+1) + "'></label><span class = 'biom_valid_attr'>" + no_data_attributes_array[i] + "</span>"
$('#non_numeric_att').append("<div>" + content + "</div>")
$('#non_numeric_check_' + (i+1)).click () => @livePreview()
# generate the groupable attributes panel
generateLeftGroupable: () ->
pointer_left = 1
pointer_right = groupable_array_content.length-1
check_count = 1
if groupable_array.length == 0
$('#att_head_groupable').hide()
else
if groupable_array.length > 0
for i in [0..groupable_array.length-1]
flag = true
toprocess = []
content = ""
content += "<span class = 'biom_valid_attr'>" + groupable_array[i] + "</span><br/>"
if groupable_array_content.length > 0
for j in [pointer_left..groupable_array_content.length-1]
if groupable_array_content[j] == j
pointer_right = j
flag = false
break
if flag
toprocess = groupable_array_content.slice(pointer_left, groupable_array_content.length)
else
toprocess = groupable_array_content.slice(pointer_left, pointer_right)
pointer_left = pointer_right + 1
pointer_right = groupable_array_content.length-1
if toprocess.length > 0
for k in [0..toprocess.length-1]
content += "<input type='checkbox' name='groupable_check_group' id='groupable_check_" + check_count + "' class='groupable_check' checked='checked' /><label for='groupable_check_" + check_count + "'></label><span class = 'biom_valid_attr_grp'>" + toprocess[k] + "</span><br/>"
check_count++
$('#groupable_att').append("<div class='overflowControl'>" + content + "</div>")
$('.groupable_check').click () => @livePreview()
# generate the thumbnails for users to filter
generateThumbnails: () ->
@range_array = []
@lines_array = new Array(@columns_metadata_array.length)
if @columns_metadata_array.length > 0
step = new Array(@columns_metadata_array.length) # keeps the step value between each bar
for i in [0..@columns_metadata_array.length-1]
nan_values = 0
each_numeric_linechart = @sortByFrequency(@columns_metadata_array[i])
if each_numeric_linechart[0][0] == -99999
nan_values = each_numeric_linechart[1][0]
each_numeric_linechart[0].shift()
each_numeric_linechart[1].shift()
if nan_values > 0
$("#att_note_" + (i+1)).text("* This column has " + nan_values + " empty values.")
@lines_array[i] = new Array(2)
@lines_array[i][0] = each_numeric_linechart[0]
@lines_array[i][1] = each_numeric_linechart[1]
each_numeric_linechart_min = Math.min.apply(Math, each_numeric_linechart[0])
each_numeric_linechart_max = Math.max.apply(Math, each_numeric_linechart[0])
@drawBasicBars( '#thumb_' + (i+1), each_numeric_linechart[0], each_numeric_linechart[1], null, [250, 50] )
@drawBasicBars( '#thumb_sm_' + (i+1), each_numeric_linechart[0], each_numeric_linechart[1], null, [130, 15])
@range_array[i] = new Array(2)
@range_array[i][0] = each_numeric_linechart_min
@range_array[i][1] = each_numeric_linechart_max
step[i] = (each_numeric_linechart_max - each_numeric_linechart_min) / each_numeric_linechart[1].length
$('#slider_' + (i+1)).width( $('#thumb_' + (i+1) + ' svg').attr('width') - 2 )
$( "#slider_" + (i+1)).slider({
range: true,
min: each_numeric_linechart_min,
max: each_numeric_linechart_max,
step: (each_numeric_linechart_max - each_numeric_linechart_min) / each_numeric_linechart[1].length, # step for adjustment, get the min between unit & 1
values: [ each_numeric_linechart_min, each_numeric_linechart_max ],
slide: ( event, ui ) =>
id = event.target.id.replace("slider_","")
if ui.value == ui.values[0]
order = Math.round( (ui.values[ 0 ] - @lines_array[id-1][0][0]) / step[id-1] )
leftValue = @lines_array[id-1][0][order]
@range_array[id-1][0] = leftValue # ui.values[0]
$("#range_" + id + "_left").text( leftValue ).css('margin-left', Math.max( event.clientX - 40, 20) )
$("#range_" + id + "_new").text( "range: [" + leftValue + " — " + @range_array[id-1][1] + "]")
else
order = Math.round( ( ui.values[ 1 ] - @lines_array[id-1][0][0]) / step[id-1] ) - 1
rightValue = @lines_array[id-1][0][order]
@range_array[id-1][1] = rightValue # ui.values[1]
$("#range_" + id + "_right").text( rightValue ).css('margin-left', Math.min( event.clientX - 40, 270) )
$("#range_" + id + "_new").text( "range: [" + @range_array[id-1][0] + " — " + rightValue + "]")
$('#numeric_check_' + id).prop('checked', true)
@drawBasicBars( '#thumb_sm_' + id, @lines_array[id-1][0], @lines_array[id-1][1], @range_array[id-1], [130, 15]) # values - ui.values
@livePreview()
})
$( "#range_" + (i+1) + "_left").text( each_numeric_linechart_min )
$( "#range_" + (i+1) + "_right").text(each_numeric_linechart_max )
$( "#range_" + (i+1) + "_new").text("range: [" + each_numeric_linechart_min + " — " + each_numeric_linechart_max + "]" )
# 3 Live Preview, the right section
livePreview: () ->
@selected_sample = []
@selected_groupable_array = []
@selected_attributes_array = []
@selected_attributes_units_array = []
@selected_no_data_attributes_array = []
selected_range_array = []
if attributes_array.length > 0
for i in [1..attributes_array.length]
if $('#numeric_check_' + i).is(':checked')
@selected_attributes_array.push(attributes_array[i-1])
@selected_attributes_units_array.push(attributes_array_units[i-1])
if no_data_attributes_array.length > 0
for i in [1..no_data_attributes_array.length]
if $('#non_numeric_check_' + i).is(':checked')
@selected_no_data_attributes_array.push(no_data_attributes_array[i-1])
if groupable_array_content.length > 0
for i in [1..groupable_array_content.length]
if $('#groupable_check_' + i).is(':checked')
@selected_groupable_array.push(groupable_array_content[i-1])
if @range_array.length > 0
for i in [1..@range_array.length]
if $('#numeric_check_' + i).is(':checked')
selected_range_array.push(@range_array[i-1])
$('#right_live_panel').html('')
# Step 1
for i in [0..biom.shape[1]-1]
@selected_sample.push(i)
if selected_range_array.length > 0
for i in [0..selected_range_array.length-1]
key = @selected_attributes_array[i]
for r in [0..biom.shape[1]-1]
if biom.columns[r].metadata[key].split(" ")[0] < selected_range_array[i][0] || biom.columns[r].metadata[key].split(" ")[0] > selected_range_array[i][1]
delete_index = @selected_sample.indexOf(r)
if delete_index != -1 then @selected_sample.splice(delete_index,1)
if date_array.length > 0
for i in [0..date_array.length-1]
key = date_array[i]
for r in [0..biom.shape[1]-1]
current_timeStamp = biom.columns[r].metadata[key]
if current_timeStamp.length < 11 # and current_timeStamp.indexOf(":") != -1
formatted_timeStamp = moment(current_timeStamp).utc().format("X")
else
formatted_timeStamp = moment(current_timeStamp, "YYYY-MM-DDTHH:mm:ss Z").utc().format("X")
if formatted_timeStamp < @range_dates_array[i][0] || formatted_timeStamp > @range_dates_array[i][1]
delete_index = @selected_sample.indexOf(r)
if delete_index != -1
@selected_sample.splice(delete_index,1)
# console.log 'sample #' + delete_index + ' doesn't meet date range
# Step 2
if groupable_array.length > 0
for i in [0..groupable_array.length-1]
for k in [0..biom.shape[1]-1]
flag = true
if @selected_groupable_array.length > 0
for r in [0..@selected_groupable_array.length-1]
if biom.columns[k].metadata[ groupable_array[i] ] == @selected_groupable_array[r]
flag = false
break
if flag
delete_index = @selected_sample.indexOf(k)
if delete_index != -1 then @selected_sample.splice(delete_index,1)
else if @selected_groupable_array.length == 0
@selected_sample = []
# Add one more step here: get rid of _empty_sample_count, leave only the valid samples
delete_index = []
if @selected_sample.length > 0
for i in [0..@selected_sample.length-1]
flag = true
if columns_non_empty_sample_count.length > 1
for j in [0..columns_non_empty_sample_count.length-1]
if columns_non_empty_sample_count[j] == @selected_sample[i]
flag = false
break
if flag
delete_index.push(@selected_sample[i])
console.log 'Sample ' + (i+1) + ' has 0 count'
if delete_index.length > 0
for i in [0..delete_index.length-1]
@selected_sample.splice(@selected_sample.indexOf(delete_index[i]), 1)
# Step 3 Now based on the filters, selected sample now contains all the right sample # within that range.
content = "<table id='myTable'><thead><tr><th class = 'headerID myTableHeader'>PHINCH NAME</th><th class = 'headerID myTableHeader'>BIOM SAMPLE ID" + "</th><th class='myTableHeader'>SAMPLE NAME</th><th class='headerCount myTableHeader'>SEQUENCE READS</th></thead>"
if @selected_sample.length > 0
for i in [0..@selected_sample.length-1]
content += '<tr><td contenteditable="true" id="phinchID_' + @selected_sample[i] + '">' + phinchID_array[@selected_sample[i]] + '</td><td>' + (@selected_sample[i] + 1) + '</td><td>' + columns_sample_name_array[@selected_sample[i]] + '</td><td>' + columns_sample_count_list[@selected_sample[i]] + '</td></tr>'
content += "</table>"
$("#right_live_panel").html(content)
$('#myTable').dataTable({
"iDisplayLength": @selected_sample.length, # 50
"aaSorting": [[ 1, "asc" ]],
"oLanguage": {
# "sLengthMenu": "_MENU_ samples per page",
"sLengthMenu": "",
"sZeroRecords": "Nothing found - sorry",
"sInfo": "Showing _TOTAL_ Samples", # _START_ to _END_ of
"sInfoEmpty": "Showing 0 Samples", # 0 to 0 of
"sInfoFiltered": "(filtered from _MAX_ total samples)"
}
})
$('#myTable').on('input', 'td[contenteditable]', @editPhinchID );
$('tr td:first-child').on('mouseover', () -> $(this).addClass('phinchCol') ).on('mouseout', () -> $(this).removeClass('phinchCol') )
console.log 'selected_sample: ' + @selected_sample.length
# 4 Download button
downloadPhinch: (param) ->
that = this
phinch.generated_by = 'PI:NAME:<NAME>END_PI'
phinch.date = new Date()
# Step 1 - get data matrix ready
phinch_data_matrix = []
sum_rows = new Array(biom.shape[0])
for i in [0..biom.shape[0]-1]
sum_rows[i] = 0
index = 0
for i in [0..biom.data.length-1]
flag = false
for j in [0..@selected_sample.length-1]
if biom.data[i][1] == @selected_sample[j] # is selected
flag = true
break
if flag
phinch_data_matrix[index] = new Array(3)
phinch_data_matrix[index] = [biom.data[i][0], j ,biom.data[i][2]]
sum_rows[biom.data[i][0]] += biom.data[i][2]
index++
phinch.data = phinch_data_matrix
# Step 2 - get columns ready
for i in [0..biom.shape[1]-1]
# If this is a not selected descriptive attribute, delete it
for j in [0..no_data_attributes_array.length-1]
if @selected_no_data_attributes_array.indexOf(no_data_attributes_array[j]) == -1
@removeFromObjectByKey(phinch.columns[i].metadata, no_data_attributes_array[j])
# If this is not a selected attributes, delete it
for k in [0..attributes_array.length-1]
if @selected_attributes_array.indexOf(attributes_array[k]) == -1
@removeFromObjectByKey(phinch.columns[i].metadata, attributes_array[k])
# Add the new phinch Id column back in the file
phinch.columns[i].metadata['phinchID'] = phinchID_array[i]
# Step 2'2 - get rid of the deleted columns & also save the units
tempCol = new Array(@selected_sample.length)
for i in [0..@selected_sample.length-1]
tempCol[i] = phinch.columns[@selected_sample[i]]
if @selected_attributes_units_array.length > 0
for j in [0..@selected_attributes_array.length-1]
if $('#unit_' + (j+1) ).val() != ""
tStr = String(tempCol[i].metadata[@selected_attributes_array[j]]).replace( String(@selected_attributes_units_array[j]), $('#unit_' + (j+1) ).val() )
tempCol[i].metadata[@selected_attributes_array[j]] = tStr
phinch.columns = tempCol
# Step 3 - get rows ready, if sum == 0, get rid of that row
valid_rows_count = 0
for i in [0..sum_rows.length-1]
if parseInt(sum_rows[i]) > 0
valid_rows_count++
else
phinch.rows[i].metadata.taxonomy = ["k__", "p__", "c__", "o__", "f__", "g__", "s__"]
# console.log valid_rows_count # not change the shape[0], cuz otherwise we have to change all the row numbers
# phinch.shape[0] = valid_rows_count
phinch.shape[1] = @selected_sample.length
obj = JSON.stringify(phinch)
blob = new Blob([obj], {type: "text/plain;charset=utf-8"})
biomToStore = {}
biomToStore.name = filename
biomToStore.size = blob.size
biomToStore.data = obj
d = new Date();
biomToStore.date = d.getUTCFullYear() + "-" + (d.getUTCMonth() + 1) + "-" + d.getUTCDate() + "T" + d.getUTCHours() + ":" + d.getUTCMinutes() + ":" + d.getUTCSeconds() + " UTC"
@server.biom.add(biomToStore).done () ->
# Step 4 - stringify
if param == 0 # Download
saveAs(blob, filename)
# Step 5 - jump to gallery
else if param == 1
that.jumpToGallery()
# 5 Utilities & Control Parts
check_unique: (arr) ->
arr = $.grep arr, (v, k) ->
return $.inArray(v ,arr) is k
if arr.length == 1 then return true else return false
sortByFrequency: (arr) ->
a = []
b = []
arr.sort(@numberSort)
for i in [0..arr.length-1]
if arr[i] != prev
a.push(arr[i])
b.push(1)
else
b[b.length-1]++
prev = arr[i]
return [a,b]
numberSort: (a,b) -> return a - b
removeFromObjectByKey: (thisObject, key) -> delete thisObject[key]
editPhinchID: () -> # make the PhinchID column editable
changedID = parseInt( $(this)[0].id.replace('phinchID_','') )
phinchID_array[changedID] = $(this).html()
# draw basic bar chart on the canvas
drawBasicBars: (div, each_numeric_linechart0, each_numeric_linechart1, values, size) =>
d3.select(div + " svg").remove()
max_single = d3.max( each_numeric_linechart1 )
y = d3.scale.linear().domain([0, max_single]).range([1, size[1] ])
eachBarWidth = (size[0] + 2) / each_numeric_linechart1.length - 2
tooltipOverPanel = d3.select(div)
.append("div")
.attr('class', 'basicTooltip')
.style("visibility", "hidden")
tempViz = d3.select(div).append("svg")
.attr("width", size[0] )
.attr("height", size[1] )
tempBar = tempViz.selectAll('rect').data(each_numeric_linechart1)
.enter().append("rect")
.attr('height', (d) -> return y(d) )
.attr('width', Math.max(0.1, eachBarWidth) + 'px')
.attr('x', (d,i) -> return i * (eachBarWidth + 2) )
.attr('y', (d,i) -> return size[1] - y(d) )
.attr('fill', (d,i) ->
if values == null
return '#919396'
else if values != null and each_numeric_linechart0[i] >= values[0] and each_numeric_linechart0[i] <= values[1]
return '#919396'
else
return '#e5e6e7'
)
.on('mouseover', (d,i) ->
tooltipOverPanel.html( 'Value: ' + each_numeric_linechart0[i] + ', Freq: ' + d )
tooltipOverPanel.style( { "visibility": "visible", top: (d3.event.pageY ) + "px", left: (d3.event.pageX + 10) + "px" })
)
.on('mouseout', (d) ->
tooltipOverPanel.style("visibility", "hidden")
)
window.filter = filter
|
[
{
"context": "tSettings, (err, client) ->\n client.addUser 'Fred', 'Flintstone', (err) ->\n unless err?.code",
"end": 334,
"score": 0.9858277440071106,
"start": 330,
"tag": "USERNAME",
"value": "Fred"
},
{
"context": "s, (err, client) ->\n client.addUser 'Fred', 'Fli... | test/connect.coffee | o2r-project/mongo-watch | 34 | should = require 'should'
_ = require 'lodash'
connect = require '../lib/connect'
testSettings = {host: 'localhost', port: 27017, db: 'admin', dbOpts: {w: 1, journal: true}}
describe 'connect', ->
before (done) ->
# add a user that we can connect to later
connect testSettings, (err, client) ->
client.addUser 'Fred', 'Flintstone', (err) ->
unless err?.code is 11000 # don't care about duplicate
should.not.exist err
done()
it 'should connect anonymous', (done) ->
connect testSettings, (err, client) ->
should.not.exist err
should.exist client
client.close()
done()
it 'should connect with username/password', (done) ->
credentials = {username: 'Fred', password: 'Flintstone'}
opts = _.merge {}, testSettings, credentials
connect opts, (err, client) ->
should.not.exist err
should.exist client
client.close()
done()
| 72781 | should = require 'should'
_ = require 'lodash'
connect = require '../lib/connect'
testSettings = {host: 'localhost', port: 27017, db: 'admin', dbOpts: {w: 1, journal: true}}
describe 'connect', ->
before (done) ->
# add a user that we can connect to later
connect testSettings, (err, client) ->
client.addUser 'Fred', 'Flintstone', (err) ->
unless err?.code is 11000 # don't care about duplicate
should.not.exist err
done()
it 'should connect anonymous', (done) ->
connect testSettings, (err, client) ->
should.not.exist err
should.exist client
client.close()
done()
it 'should connect with username/password', (done) ->
credentials = {username: 'Fred', password: '<PASSWORD>'}
opts = _.merge {}, testSettings, credentials
connect opts, (err, client) ->
should.not.exist err
should.exist client
client.close()
done()
| true | should = require 'should'
_ = require 'lodash'
connect = require '../lib/connect'
testSettings = {host: 'localhost', port: 27017, db: 'admin', dbOpts: {w: 1, journal: true}}
describe 'connect', ->
before (done) ->
# add a user that we can connect to later
connect testSettings, (err, client) ->
client.addUser 'Fred', 'Flintstone', (err) ->
unless err?.code is 11000 # don't care about duplicate
should.not.exist err
done()
it 'should connect anonymous', (done) ->
connect testSettings, (err, client) ->
should.not.exist err
should.exist client
client.close()
done()
it 'should connect with username/password', (done) ->
credentials = {username: 'Fred', password: 'PI:PASSWORD:<PASSWORD>END_PI'}
opts = _.merge {}, testSettings, credentials
connect opts, (err, client) ->
should.not.exist err
should.exist client
client.close()
done()
|
[
{
"context": "# Try POH\n# author: Leonardone @ NEETSDKASU\n\nprocess.stdin.resume()\nprocess.stdi",
"end": 30,
"score": 0.9996099472045898,
"start": 20,
"tag": "NAME",
"value": "Leonardone"
},
{
"context": "# Try POH\n# author: Leonardone @ NEETSDKASU\n\nprocess.stdin.resume()\nproc... | POH7/Megane/Main.coffee | neetsdkasu/Paiza-POH-MyAnswers | 3 | # Try POH
# author: Leonardone @ NEETSDKASU
process.stdin.resume()
process.stdin.setEncoding 'utf8'
process.stdin.on 'data', (data) ->
lines = data.toString().trim().split('\n').map (s) -> s.split(' ').map (x) -> parseInt x
[n] = lines.shift()
q = lines[0...n]
p = lines[n..]
[m] = p.shift()
for i in [0..n-m]
for j in [0..n-m]
flag = true
for y in [0...m]
for x in [0...m]
flag &= q[i+y][j+x] == p[y][x]
if flag
console.log i + ' ' + j
| 30052 | # Try POH
# author: <NAME> @ NEETSDKASU
process.stdin.resume()
process.stdin.setEncoding 'utf8'
process.stdin.on 'data', (data) ->
lines = data.toString().trim().split('\n').map (s) -> s.split(' ').map (x) -> parseInt x
[n] = lines.shift()
q = lines[0...n]
p = lines[n..]
[m] = p.shift()
for i in [0..n-m]
for j in [0..n-m]
flag = true
for y in [0...m]
for x in [0...m]
flag &= q[i+y][j+x] == p[y][x]
if flag
console.log i + ' ' + j
| true | # Try POH
# author: PI:NAME:<NAME>END_PI @ NEETSDKASU
process.stdin.resume()
process.stdin.setEncoding 'utf8'
process.stdin.on 'data', (data) ->
lines = data.toString().trim().split('\n').map (s) -> s.split(' ').map (x) -> parseInt x
[n] = lines.shift()
q = lines[0...n]
p = lines[n..]
[m] = p.shift()
for i in [0..n-m]
for j in [0..n-m]
flag = true
for y in [0...m]
for x in [0...m]
flag &= q[i+y][j+x] == p[y][x]
if flag
console.log i + ' ' + j
|
[
{
"context": "-dev-test'\n\n SettingsHelper.setCredentials 'foo@bar.baz', '0123456789abcdef0123456789abcdef'\n\n ",
"end": 755,
"score": 0.7673155665397644,
"start": 755,
"tag": "EMAIL",
"value": ""
},
{
"context": "-test'\n\n SettingsHelper.setCredentials 'foo@bar.baz', '01... | spec/views/listening-mode-view-spec.coffee | lukehoban/spark-dev | 1 | {WorkspaceView} = require 'atom'
$ = require('atom').$
SettingsHelper = require '../../lib/utils/settings-helper'
SerialHelper = require '../../lib/utils/serial-helper'
require 'serialport'
describe 'Listening Mode View', ->
activationPromise = null
sparkIde = null
listeningModeView = null
originalProfile = null
beforeEach ->
atom.workspaceView = new WorkspaceView
activationPromise = atom.packages.activatePackage('spark-dev').then ({mainModule}) ->
sparkIde = mainModule
sparkIde.listeningModeView = null
originalProfile = SettingsHelper.getProfile()
# For tests not to mess up our profile, we have to switch to test one...
SettingsHelper.setProfile 'spark-dev-test'
SettingsHelper.setCredentials 'foo@bar.baz', '0123456789abcdef0123456789abcdef'
# Mock serial
require.cache[require.resolve('serialport')].exports = require('spark-dev-spec-stubs').serialportNoPorts
waitsForPromise ->
activationPromise
afterEach ->
SettingsHelper.clearCredentials()
SettingsHelper.setProfile originalProfile
describe '', ->
it 'tests hiding and showing', ->
# Test core:cancel
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
atom.workspaceView.trigger 'core:cancel'
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
# Test core:close
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
atom.workspaceView.trigger 'core:close'
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
# Test cancel button
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
listeningModeView = sparkIde.listeningModeView
listeningModeView.find('button').click()
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
it 'tests interval for dialog dismissal', ->
jasmine.Clock.useMock()
sparkIde.identifyCore()
spyOn SerialHelper, 'listPorts'
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(SerialHelper.listPorts).not.toHaveBeenCalled()
jasmine.Clock.tick(1001)
expect(SerialHelper.listPorts).toHaveBeenCalled()
jasmine.unspy SerialHelper, 'listPorts'
atom.workspaceView.trigger 'core:cancel'
| 157623 | {WorkspaceView} = require 'atom'
$ = require('atom').$
SettingsHelper = require '../../lib/utils/settings-helper'
SerialHelper = require '../../lib/utils/serial-helper'
require 'serialport'
describe 'Listening Mode View', ->
activationPromise = null
sparkIde = null
listeningModeView = null
originalProfile = null
beforeEach ->
atom.workspaceView = new WorkspaceView
activationPromise = atom.packages.activatePackage('spark-dev').then ({mainModule}) ->
sparkIde = mainModule
sparkIde.listeningModeView = null
originalProfile = SettingsHelper.getProfile()
# For tests not to mess up our profile, we have to switch to test one...
SettingsHelper.setProfile 'spark-dev-test'
SettingsHelper.setCredentials 'foo<EMAIL>@bar.<EMAIL>', '0123456789abcdef0123456789abcdef'
# Mock serial
require.cache[require.resolve('serialport')].exports = require('spark-dev-spec-stubs').serialportNoPorts
waitsForPromise ->
activationPromise
afterEach ->
SettingsHelper.clearCredentials()
SettingsHelper.setProfile originalProfile
describe '', ->
it 'tests hiding and showing', ->
# Test core:cancel
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
atom.workspaceView.trigger 'core:cancel'
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
# Test core:close
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
atom.workspaceView.trigger 'core:close'
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
# Test cancel button
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
listeningModeView = sparkIde.listeningModeView
listeningModeView.find('button').click()
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
it 'tests interval for dialog dismissal', ->
jasmine.Clock.useMock()
sparkIde.identifyCore()
spyOn SerialHelper, 'listPorts'
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(SerialHelper.listPorts).not.toHaveBeenCalled()
jasmine.Clock.tick(1001)
expect(SerialHelper.listPorts).toHaveBeenCalled()
jasmine.unspy SerialHelper, 'listPorts'
atom.workspaceView.trigger 'core:cancel'
| true | {WorkspaceView} = require 'atom'
$ = require('atom').$
SettingsHelper = require '../../lib/utils/settings-helper'
SerialHelper = require '../../lib/utils/serial-helper'
require 'serialport'
describe 'Listening Mode View', ->
activationPromise = null
sparkIde = null
listeningModeView = null
originalProfile = null
beforeEach ->
atom.workspaceView = new WorkspaceView
activationPromise = atom.packages.activatePackage('spark-dev').then ({mainModule}) ->
sparkIde = mainModule
sparkIde.listeningModeView = null
originalProfile = SettingsHelper.getProfile()
# For tests not to mess up our profile, we have to switch to test one...
SettingsHelper.setProfile 'spark-dev-test'
SettingsHelper.setCredentials 'fooPI:EMAIL:<EMAIL>END_PI@bar.PI:EMAIL:<EMAIL>END_PI', '0123456789abcdef0123456789abcdef'
# Mock serial
require.cache[require.resolve('serialport')].exports = require('spark-dev-spec-stubs').serialportNoPorts
waitsForPromise ->
activationPromise
afterEach ->
SettingsHelper.clearCredentials()
SettingsHelper.setProfile originalProfile
describe '', ->
it 'tests hiding and showing', ->
# Test core:cancel
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
atom.workspaceView.trigger 'core:cancel'
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
# Test core:close
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
atom.workspaceView.trigger 'core:close'
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
# Test cancel button
sparkIde.identifyCore()
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).toExist()
listeningModeView = sparkIde.listeningModeView
listeningModeView.find('button').click()
expect(atom.workspaceView.find('#spark-dev-listening-mode-view')).not.toExist()
it 'tests interval for dialog dismissal', ->
jasmine.Clock.useMock()
sparkIde.identifyCore()
spyOn SerialHelper, 'listPorts'
waitsFor ->
!sparkIde.listPortsPromise
runs ->
expect(SerialHelper.listPorts).not.toHaveBeenCalled()
jasmine.Clock.tick(1001)
expect(SerialHelper.listPorts).toHaveBeenCalled()
jasmine.unspy SerialHelper, 'listPorts'
atom.workspaceView.trigger 'core:cancel'
|
[
{
"context": "ort: 0xd00d\n uuid: 'peter'\n token: 'i-could-eat'\n privateKey: @privateKey\n appOctoblu",
"end": 1614,
"score": 0.9713614583015442,
"start": 1603,
"tag": "PASSWORD",
"value": "i-could-eat"
}
] | test/integration/form-schema-spec.coffee | octoblu/endo-lib | 0 | {afterEach, beforeEach, describe, it} = global
{expect} = require 'chai'
sinon = require 'sinon'
fs = require 'fs'
Encryption = require 'meshblu-encryption'
request = require 'request'
enableDestroy = require 'server-destroy'
shmock = require 'shmock'
MockStrategy = require '../mock-strategy'
Server = require '../..'
describe 'form schema', ->
beforeEach (done) ->
@privateKey = fs.readFileSync "#{__dirname}/../data/private-key.pem", 'utf8'
@encryption = Encryption.fromPem @privateKey
encrypted =
secrets:
credentials:
secret: 'this is secret'
@encrypted = @encryption.encrypt encrypted
@publicKey = @encryption.key.exportKey 'public'
@meshblu = shmock 0xd00d
enableDestroy @meshblu
@apiStrategy = new MockStrategy name: 'api'
@octobluStrategy = new MockStrategy name: 'octoblu'
@messageHandler = formSchema: sinon.stub()
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic cGV0ZXI6aS1jb3VsZC1lYXQ="
.reply 200, {
options:
imageUrl: "http://this-is-an-image.exe"
}
@meshblu
.get '/publickey'
.reply 200, {@publicKey}
serverOptions =
logFn: ->
port: undefined,
disableLogging: true
apiStrategy: @apiStrategy
octobluStrategy: @octobluStrategy
messageHandler: @messageHandler
serviceUrl: 'http://octoblu.xxx'
deviceType: 'endo-endor'
meshbluConfig:
hostname: 'localhost'
protocol: 'http'
port: 0xd00d
uuid: 'peter'
token: 'i-could-eat'
privateKey: @privateKey
appOctobluHost: 'http://app.octoblu.bikes'
userDeviceManagerUrl: 'http://manage-my.endo'
meshbluPublicKeyUri: 'http://localhost:53261/publickey'
healthcheckService: healthcheck: =>
@server = new Server serverOptions
@server.run (error) =>
return done error if error?
@serverPort = @server.address().port
done()
afterEach (done) ->
@server.stop done
afterEach (done) ->
@meshblu.destroy done
describe 'On GET /v1/form-schema', ->
describe 'when the message-handler yields an empty object', ->
beforeEach (done) ->
@messageHandler.formSchema.yields null, {}
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200, JSON.stringify @body
it 'should return the empty object', ->
expect(@body).to.deep.equal {}
describe 'when the message-handler yields a larger schema', ->
beforeEach (done) ->
@messageHandler.formSchema.yields null, {
doSomething:
type: 'object'
required: ['name', 'color']
properties:
name:
type: 'string'
color:
type: 'string'
}
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200, JSON.stringify @body
it 'should return the schema', ->
expect(@body).to.deep.equal {
doSomething:
type: 'object'
required: ['name', 'color']
properties:
name:
type: 'string'
color:
type: 'string'
}
describe 'when the message-handler yields an error', ->
beforeEach (done) ->
error = new Error 'Something is awry'
error.code = 418
@messageHandler.formSchema.yields error
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 418', ->
expect(@response.statusCode).to.equal 418, JSON.stringify @body
it 'should return the schema', ->
expect(@body).to.deep.equal error: 'Something is awry'
| 136713 | {afterEach, beforeEach, describe, it} = global
{expect} = require 'chai'
sinon = require 'sinon'
fs = require 'fs'
Encryption = require 'meshblu-encryption'
request = require 'request'
enableDestroy = require 'server-destroy'
shmock = require 'shmock'
MockStrategy = require '../mock-strategy'
Server = require '../..'
describe 'form schema', ->
beforeEach (done) ->
@privateKey = fs.readFileSync "#{__dirname}/../data/private-key.pem", 'utf8'
@encryption = Encryption.fromPem @privateKey
encrypted =
secrets:
credentials:
secret: 'this is secret'
@encrypted = @encryption.encrypt encrypted
@publicKey = @encryption.key.exportKey 'public'
@meshblu = shmock 0xd00d
enableDestroy @meshblu
@apiStrategy = new MockStrategy name: 'api'
@octobluStrategy = new MockStrategy name: 'octoblu'
@messageHandler = formSchema: sinon.stub()
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic cGV0ZXI6aS1jb3VsZC1lYXQ="
.reply 200, {
options:
imageUrl: "http://this-is-an-image.exe"
}
@meshblu
.get '/publickey'
.reply 200, {@publicKey}
serverOptions =
logFn: ->
port: undefined,
disableLogging: true
apiStrategy: @apiStrategy
octobluStrategy: @octobluStrategy
messageHandler: @messageHandler
serviceUrl: 'http://octoblu.xxx'
deviceType: 'endo-endor'
meshbluConfig:
hostname: 'localhost'
protocol: 'http'
port: 0xd00d
uuid: 'peter'
token: '<PASSWORD>'
privateKey: @privateKey
appOctobluHost: 'http://app.octoblu.bikes'
userDeviceManagerUrl: 'http://manage-my.endo'
meshbluPublicKeyUri: 'http://localhost:53261/publickey'
healthcheckService: healthcheck: =>
@server = new Server serverOptions
@server.run (error) =>
return done error if error?
@serverPort = @server.address().port
done()
afterEach (done) ->
@server.stop done
afterEach (done) ->
@meshblu.destroy done
describe 'On GET /v1/form-schema', ->
describe 'when the message-handler yields an empty object', ->
beforeEach (done) ->
@messageHandler.formSchema.yields null, {}
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200, JSON.stringify @body
it 'should return the empty object', ->
expect(@body).to.deep.equal {}
describe 'when the message-handler yields a larger schema', ->
beforeEach (done) ->
@messageHandler.formSchema.yields null, {
doSomething:
type: 'object'
required: ['name', 'color']
properties:
name:
type: 'string'
color:
type: 'string'
}
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200, JSON.stringify @body
it 'should return the schema', ->
expect(@body).to.deep.equal {
doSomething:
type: 'object'
required: ['name', 'color']
properties:
name:
type: 'string'
color:
type: 'string'
}
describe 'when the message-handler yields an error', ->
beforeEach (done) ->
error = new Error 'Something is awry'
error.code = 418
@messageHandler.formSchema.yields error
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 418', ->
expect(@response.statusCode).to.equal 418, JSON.stringify @body
it 'should return the schema', ->
expect(@body).to.deep.equal error: 'Something is awry'
| true | {afterEach, beforeEach, describe, it} = global
{expect} = require 'chai'
sinon = require 'sinon'
fs = require 'fs'
Encryption = require 'meshblu-encryption'
request = require 'request'
enableDestroy = require 'server-destroy'
shmock = require 'shmock'
MockStrategy = require '../mock-strategy'
Server = require '../..'
describe 'form schema', ->
beforeEach (done) ->
@privateKey = fs.readFileSync "#{__dirname}/../data/private-key.pem", 'utf8'
@encryption = Encryption.fromPem @privateKey
encrypted =
secrets:
credentials:
secret: 'this is secret'
@encrypted = @encryption.encrypt encrypted
@publicKey = @encryption.key.exportKey 'public'
@meshblu = shmock 0xd00d
enableDestroy @meshblu
@apiStrategy = new MockStrategy name: 'api'
@octobluStrategy = new MockStrategy name: 'octoblu'
@messageHandler = formSchema: sinon.stub()
@meshblu
.get '/v2/whoami'
.set 'Authorization', "Basic cGV0ZXI6aS1jb3VsZC1lYXQ="
.reply 200, {
options:
imageUrl: "http://this-is-an-image.exe"
}
@meshblu
.get '/publickey'
.reply 200, {@publicKey}
serverOptions =
logFn: ->
port: undefined,
disableLogging: true
apiStrategy: @apiStrategy
octobluStrategy: @octobluStrategy
messageHandler: @messageHandler
serviceUrl: 'http://octoblu.xxx'
deviceType: 'endo-endor'
meshbluConfig:
hostname: 'localhost'
protocol: 'http'
port: 0xd00d
uuid: 'peter'
token: 'PI:PASSWORD:<PASSWORD>END_PI'
privateKey: @privateKey
appOctobluHost: 'http://app.octoblu.bikes'
userDeviceManagerUrl: 'http://manage-my.endo'
meshbluPublicKeyUri: 'http://localhost:53261/publickey'
healthcheckService: healthcheck: =>
@server = new Server serverOptions
@server.run (error) =>
return done error if error?
@serverPort = @server.address().port
done()
afterEach (done) ->
@server.stop done
afterEach (done) ->
@meshblu.destroy done
describe 'On GET /v1/form-schema', ->
describe 'when the message-handler yields an empty object', ->
beforeEach (done) ->
@messageHandler.formSchema.yields null, {}
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200, JSON.stringify @body
it 'should return the empty object', ->
expect(@body).to.deep.equal {}
describe 'when the message-handler yields a larger schema', ->
beforeEach (done) ->
@messageHandler.formSchema.yields null, {
doSomething:
type: 'object'
required: ['name', 'color']
properties:
name:
type: 'string'
color:
type: 'string'
}
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200, JSON.stringify @body
it 'should return the schema', ->
expect(@body).to.deep.equal {
doSomething:
type: 'object'
required: ['name', 'color']
properties:
name:
type: 'string'
color:
type: 'string'
}
describe 'when the message-handler yields an error', ->
beforeEach (done) ->
error = new Error 'Something is awry'
error.code = 418
@messageHandler.formSchema.yields error
options =
baseUrl: "http://localhost:#{@serverPort}"
json: true
request.get '/v1/form-schema', options, (error, @response, @body) =>
done error
it 'should return a 418', ->
expect(@response.statusCode).to.equal 418, JSON.stringify @body
it 'should return the schema', ->
expect(@body).to.deep.equal error: 'Something is awry'
|
[
{
"context": "+ 'signer' + config.secret)\n\n hmacKey = shasum.digest()\n\n hmac = crypto.createHmac('sha1', hmacKey)\n",
"end": 662,
"score": 0.5737859010696411,
"start": 656,
"tag": "KEY",
"value": "digest"
}
] | signing.coffee | threefoldtech/Threefold-Circles-events | 0 | crypto = require('crypto')
base64url = require('base64-url')
config = require('./events-config').config
salt = 'django.core.signing'
rsplit = (token, sep, maxsplit) ->
split = token.split(sep)
if maxsplit
return [ split.slice(0, -maxsplit).join(sep) ].concat(split.slice(-maxsplit))
return split
exports.getUserId = (token) ->
value = token.split(':')[0]
value = JSON.parse(base64url.decode(value))
return value?.user_authentication_id
exports.verify = (token) ->
[value, sig] = rsplit(token, ':', 1)
shasum = crypto.createHash('sha1')
shasum.update(salt + 'signer' + config.secret)
hmacKey = shasum.digest()
hmac = crypto.createHmac('sha1', hmacKey)
hmac.setEncoding('base64')
hmac.update(value)
key = base64url.escape(hmac.digest('base64'))
return key == sig
| 194055 | crypto = require('crypto')
base64url = require('base64-url')
config = require('./events-config').config
salt = 'django.core.signing'
rsplit = (token, sep, maxsplit) ->
split = token.split(sep)
if maxsplit
return [ split.slice(0, -maxsplit).join(sep) ].concat(split.slice(-maxsplit))
return split
exports.getUserId = (token) ->
value = token.split(':')[0]
value = JSON.parse(base64url.decode(value))
return value?.user_authentication_id
exports.verify = (token) ->
[value, sig] = rsplit(token, ':', 1)
shasum = crypto.createHash('sha1')
shasum.update(salt + 'signer' + config.secret)
hmacKey = shasum.<KEY>()
hmac = crypto.createHmac('sha1', hmacKey)
hmac.setEncoding('base64')
hmac.update(value)
key = base64url.escape(hmac.digest('base64'))
return key == sig
| true | crypto = require('crypto')
base64url = require('base64-url')
config = require('./events-config').config
salt = 'django.core.signing'
rsplit = (token, sep, maxsplit) ->
split = token.split(sep)
if maxsplit
return [ split.slice(0, -maxsplit).join(sep) ].concat(split.slice(-maxsplit))
return split
exports.getUserId = (token) ->
value = token.split(':')[0]
value = JSON.parse(base64url.decode(value))
return value?.user_authentication_id
exports.verify = (token) ->
[value, sig] = rsplit(token, ':', 1)
shasum = crypto.createHash('sha1')
shasum.update(salt + 'signer' + config.secret)
hmacKey = shasum.PI:KEY:<KEY>END_PI()
hmac = crypto.createHmac('sha1', hmacKey)
hmac.setEncoding('base64')
hmac.update(value)
key = base64url.escape(hmac.digest('base64'))
return key == sig
|
[
{
"context": "0,t:@screenSize.h,l:0,r:@screenSize.w}\n @name=\"plrrr\"\n\n @state = 0\n\n start: =>\n if @state == 0\n",
"end": 457,
"score": 0.9976305365562439,
"start": 452,
"tag": "USERNAME",
"value": "plrrr"
},
{
"context": "ver.sizey)},{x:0,y:0},\"plr0\")\n @sprit... | lib/universe_player.coffee | tomdionysus/1945 | 0 | _ = require 'underscore'
{Sprite} = require "../lib/universe_sprite"
{Bullet} = require "../lib/universe_bullet"
{CompGeo} = require "../lib/universe_compgeo"
exports.Player = class Player
constructor: (@socket, @server) ->
@img = 0
@screenSize = {w:800, h:600}
@score = 0
@health = 100
@bearing = 180
@speed = 50
@lastFire = 0
@fireLimit = 300
@lastBounds = {b:0,t:@screenSize.h,l:0,r:@screenSize.w}
@name="plrrr"
@state = 0
start: =>
if @state == 0
@state = 1
@sprite = new Sprite(@server,{x:Math.round(Math.random()*@server.sizex), y:Math.round(Math.random()*@server.sizey)},{x:0,y:0},"plr0")
@sprite.name = @name
@sprite.player = @
@sprite.shootable = true
@sprite.collisionIn = (sprite) =>
@hit() if sprite.bullet and sprite.owner != @sprite.id
@health = 100
@fireMode = 0
@lowerSpeedLimit = 40
@upperSpeedLimit = 200
@sprite.bearing = @bearing
@server.addSprite(@sprite)
die: =>
console.log("player died")
p = @sprite.getCentrePoint()
s = {x:@sprite.speed.x, y:@sprite.speed.y}
r = [100,200,300,400,500]
for y in r
_.delay( =>
ps = {x:p.x-16+Math.round(Math.random()*32),y:p.y-16+Math.round(Math.random()*32)}
@server.explosionAt(ps, s ,'expl', @sprite)
,y)
_.delay( =>
@sprite.destroy()
@state = 0
@lastBounds = @getScreenBounds()
,300)
getState: =>
x =
t:'ps'
id:@socket.id
sc:@score
hl:@health
st:@state
if @sprite?
x.pos = @sprite.pos
x.sp = @sprite.speed
x.br = @bearing
x.cs = @sprite.cls
x
getScreenBounds: =>
if @sprite?
w2 = (@screenSize.w/2)*1.1
h2 = (@screenSize.h/2)*1.1
{b:@sprite.pos.y-h2,t:@sprite.pos.y+h2,l:@sprite.pos.x-w2,r:@sprite.pos.x+w2}
else
@lastBounds
update: (t) =>
if @sprite?
@sprite.speed = CompGeo.speedAtBearing(@bearing, @speed)
hit: =>
s = @sprite
s.cls = "plr0-hit"
_.delay(->
s.cls = "plr0" if s?
,200)
@server.explosionAt(@sprite.getCentrePoint(), {x:@sprite.speed.x/2,y:@sprite.speed.y/2} ,'exps', null)
@health = Math.max(@health-10,0)
@die() if @health == 0
turnLeft: =>
if @sprite?
@sprite.bearing = @bearing = CompGeo.fixBearing(@bearing-2)
turnRight: =>
if @sprite?
@sprite.bearing = @bearing = CompGeo.fixBearing(@bearing+2)
slowDown: =>
@speed = Math.max(@speed-2,@lowerSpeedLimit)
speedUp: =>
@speed = Math.min(@speed+2,@upperSpeedLimit)
destroy: =>
if @sprite?
@sprite.destroy()
delete @['sprite']
fire: =>
t = new Date().getTime()
return if @lastFire? and @lastFire > t-@fireLimit
x = new Bullet(@server, @sprite.getCentrePoint(), CompGeo.speedAtBearing(@bearing,@speed+300), "blt0" )
x.bearing = @bearing
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >0
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-90,@speed+300), @sprite.speed)
s2 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing+90,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt1" )
x.bearing = @bearing-90
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
x = new Bullet(@server, @sprite.getCentrePoint(), s2, "blt1" )
x.bearing = @bearing+90
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >1
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-180,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt1" )
x.bearing = @bearing-180
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >2
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-45,@speed+300), @sprite.speed)
s2 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing+45,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt0" )
x.bearing = @bearing-45
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
x = new Bullet(@server, @sprite.getCentrePoint(), s2, "blt0" )
x.bearing = @bearing+45
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
@lastFire = t
copyVector: (v) ->
{x:v.x,y:v.y}
| 120287 | _ = require 'underscore'
{Sprite} = require "../lib/universe_sprite"
{Bullet} = require "../lib/universe_bullet"
{CompGeo} = require "../lib/universe_compgeo"
exports.Player = class Player
constructor: (@socket, @server) ->
@img = 0
@screenSize = {w:800, h:600}
@score = 0
@health = 100
@bearing = 180
@speed = 50
@lastFire = 0
@fireLimit = 300
@lastBounds = {b:0,t:@screenSize.h,l:0,r:@screenSize.w}
@name="plrrr"
@state = 0
start: =>
if @state == 0
@state = 1
@sprite = new Sprite(@server,{x:Math.round(Math.random()*@server.sizex), y:Math.round(Math.random()*@server.sizey)},{x:0,y:0},"plr0")
@sprite.name =<NAME> @name
@sprite.player = @
@sprite.shootable = true
@sprite.collisionIn = (sprite) =>
@hit() if sprite.bullet and sprite.owner != @sprite.id
@health = 100
@fireMode = 0
@lowerSpeedLimit = 40
@upperSpeedLimit = 200
@sprite.bearing = @bearing
@server.addSprite(@sprite)
die: =>
console.log("player died")
p = @sprite.getCentrePoint()
s = {x:@sprite.speed.x, y:@sprite.speed.y}
r = [100,200,300,400,500]
for y in r
_.delay( =>
ps = {x:p.x-16+Math.round(Math.random()*32),y:p.y-16+Math.round(Math.random()*32)}
@server.explosionAt(ps, s ,'expl', @sprite)
,y)
_.delay( =>
@sprite.destroy()
@state = 0
@lastBounds = @getScreenBounds()
,300)
getState: =>
x =
t:'ps'
id:@socket.id
sc:@score
hl:@health
st:@state
if @sprite?
x.pos = @sprite.pos
x.sp = @sprite.speed
x.br = @bearing
x.cs = @sprite.cls
x
getScreenBounds: =>
if @sprite?
w2 = (@screenSize.w/2)*1.1
h2 = (@screenSize.h/2)*1.1
{b:@sprite.pos.y-h2,t:@sprite.pos.y+h2,l:@sprite.pos.x-w2,r:@sprite.pos.x+w2}
else
@lastBounds
update: (t) =>
if @sprite?
@sprite.speed = CompGeo.speedAtBearing(@bearing, @speed)
hit: =>
s = @sprite
s.cls = "plr0-hit"
_.delay(->
s.cls = "plr0" if s?
,200)
@server.explosionAt(@sprite.getCentrePoint(), {x:@sprite.speed.x/2,y:@sprite.speed.y/2} ,'exps', null)
@health = Math.max(@health-10,0)
@die() if @health == 0
turnLeft: =>
if @sprite?
@sprite.bearing = @bearing = CompGeo.fixBearing(@bearing-2)
turnRight: =>
if @sprite?
@sprite.bearing = @bearing = CompGeo.fixBearing(@bearing+2)
slowDown: =>
@speed = Math.max(@speed-2,@lowerSpeedLimit)
speedUp: =>
@speed = Math.min(@speed+2,@upperSpeedLimit)
destroy: =>
if @sprite?
@sprite.destroy()
delete @['sprite']
fire: =>
t = new Date().getTime()
return if @lastFire? and @lastFire > t-@fireLimit
x = new Bullet(@server, @sprite.getCentrePoint(), CompGeo.speedAtBearing(@bearing,@speed+300), "blt0" )
x.bearing = @bearing
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >0
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-90,@speed+300), @sprite.speed)
s2 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing+90,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt1" )
x.bearing = @bearing-90
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
x = new Bullet(@server, @sprite.getCentrePoint(), s2, "blt1" )
x.bearing = @bearing+90
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >1
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-180,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt1" )
x.bearing = @bearing-180
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >2
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-45,@speed+300), @sprite.speed)
s2 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing+45,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt0" )
x.bearing = @bearing-45
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
x = new Bullet(@server, @sprite.getCentrePoint(), s2, "blt0" )
x.bearing = @bearing+45
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
@lastFire = t
copyVector: (v) ->
{x:v.x,y:v.y}
| true | _ = require 'underscore'
{Sprite} = require "../lib/universe_sprite"
{Bullet} = require "../lib/universe_bullet"
{CompGeo} = require "../lib/universe_compgeo"
exports.Player = class Player
constructor: (@socket, @server) ->
@img = 0
@screenSize = {w:800, h:600}
@score = 0
@health = 100
@bearing = 180
@speed = 50
@lastFire = 0
@fireLimit = 300
@lastBounds = {b:0,t:@screenSize.h,l:0,r:@screenSize.w}
@name="plrrr"
@state = 0
start: =>
if @state == 0
@state = 1
@sprite = new Sprite(@server,{x:Math.round(Math.random()*@server.sizex), y:Math.round(Math.random()*@server.sizey)},{x:0,y:0},"plr0")
@sprite.name =PI:NAME:<NAME>END_PI @name
@sprite.player = @
@sprite.shootable = true
@sprite.collisionIn = (sprite) =>
@hit() if sprite.bullet and sprite.owner != @sprite.id
@health = 100
@fireMode = 0
@lowerSpeedLimit = 40
@upperSpeedLimit = 200
@sprite.bearing = @bearing
@server.addSprite(@sprite)
die: =>
console.log("player died")
p = @sprite.getCentrePoint()
s = {x:@sprite.speed.x, y:@sprite.speed.y}
r = [100,200,300,400,500]
for y in r
_.delay( =>
ps = {x:p.x-16+Math.round(Math.random()*32),y:p.y-16+Math.round(Math.random()*32)}
@server.explosionAt(ps, s ,'expl', @sprite)
,y)
_.delay( =>
@sprite.destroy()
@state = 0
@lastBounds = @getScreenBounds()
,300)
getState: =>
x =
t:'ps'
id:@socket.id
sc:@score
hl:@health
st:@state
if @sprite?
x.pos = @sprite.pos
x.sp = @sprite.speed
x.br = @bearing
x.cs = @sprite.cls
x
getScreenBounds: =>
if @sprite?
w2 = (@screenSize.w/2)*1.1
h2 = (@screenSize.h/2)*1.1
{b:@sprite.pos.y-h2,t:@sprite.pos.y+h2,l:@sprite.pos.x-w2,r:@sprite.pos.x+w2}
else
@lastBounds
update: (t) =>
if @sprite?
@sprite.speed = CompGeo.speedAtBearing(@bearing, @speed)
hit: =>
s = @sprite
s.cls = "plr0-hit"
_.delay(->
s.cls = "plr0" if s?
,200)
@server.explosionAt(@sprite.getCentrePoint(), {x:@sprite.speed.x/2,y:@sprite.speed.y/2} ,'exps', null)
@health = Math.max(@health-10,0)
@die() if @health == 0
turnLeft: =>
if @sprite?
@sprite.bearing = @bearing = CompGeo.fixBearing(@bearing-2)
turnRight: =>
if @sprite?
@sprite.bearing = @bearing = CompGeo.fixBearing(@bearing+2)
slowDown: =>
@speed = Math.max(@speed-2,@lowerSpeedLimit)
speedUp: =>
@speed = Math.min(@speed+2,@upperSpeedLimit)
destroy: =>
if @sprite?
@sprite.destroy()
delete @['sprite']
fire: =>
t = new Date().getTime()
return if @lastFire? and @lastFire > t-@fireLimit
x = new Bullet(@server, @sprite.getCentrePoint(), CompGeo.speedAtBearing(@bearing,@speed+300), "blt0" )
x.bearing = @bearing
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >0
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-90,@speed+300), @sprite.speed)
s2 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing+90,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt1" )
x.bearing = @bearing-90
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
x = new Bullet(@server, @sprite.getCentrePoint(), s2, "blt1" )
x.bearing = @bearing+90
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >1
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-180,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt1" )
x.bearing = @bearing-180
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
if @fireMode >2
s1 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing-45,@speed+300), @sprite.speed)
s2 = CompGeo.addVector(CompGeo.speedAtBearing(@bearing+45,@speed+300), @sprite.speed)
x = new Bullet(@server, @sprite.getCentrePoint(), s1, "blt0" )
x.bearing = @bearing-45
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
x = new Bullet(@server, @sprite.getCentrePoint(), s2, "blt0" )
x.bearing = @bearing+45
x.expiry = t+3000
x.owner = @sprite.id
@server.addSprite(x)
@lastFire = t
copyVector: (v) ->
{x:v.x,y:v.y}
|
[
{
"context": "\": \"tmus\",\n \"account_id\": \"13874236\",\n \"product_orders\": [\n ",
"end": 512,
"score": 0.9977472424507141,
"start": 504,
"tag": "KEY",
"value": "13874236"
},
{
"context": " },\n \"acc... | models/shopping_cart/fixture/createaccount.coffee | signonsridhar/sridhar_hbs | 0 | define([ 'can_fixture', 'Faker'], (can, Faker)->
can.fixture('POST /bss/account?action=createaccount', (req, res)->
return {
"response": {
"service": "createaccount",
"response_code": 100,
"execution_time": 3151,
"timestamp": "2013-10-15T22:22:13+0000",
"response_data": {
"shopping_cart": {
"partner_code": "tmus",
"account_id": "13874236",
"product_orders": [
{
"quantity": 1,
"product_order_id": 10933233
},
{
"quantity": 1,
"product_order_id": 10933234
}
]
},
"account_id": "13874236"
}
}
}
)
)
| 636 | define([ 'can_fixture', 'Faker'], (can, Faker)->
can.fixture('POST /bss/account?action=createaccount', (req, res)->
return {
"response": {
"service": "createaccount",
"response_code": 100,
"execution_time": 3151,
"timestamp": "2013-10-15T22:22:13+0000",
"response_data": {
"shopping_cart": {
"partner_code": "tmus",
"account_id": "<KEY>",
"product_orders": [
{
"quantity": 1,
"product_order_id": 10933233
},
{
"quantity": 1,
"product_order_id": 10933234
}
]
},
"account_id": "<KEY>"
}
}
}
)
)
| true | define([ 'can_fixture', 'Faker'], (can, Faker)->
can.fixture('POST /bss/account?action=createaccount', (req, res)->
return {
"response": {
"service": "createaccount",
"response_code": 100,
"execution_time": 3151,
"timestamp": "2013-10-15T22:22:13+0000",
"response_data": {
"shopping_cart": {
"partner_code": "tmus",
"account_id": "PI:KEY:<KEY>END_PI",
"product_orders": [
{
"quantity": 1,
"product_order_id": 10933233
},
{
"quantity": 1,
"product_order_id": 10933234
}
]
},
"account_id": "PI:KEY:<KEY>END_PI"
}
}
}
)
)
|
[
{
"context": "wner: \"BitBucket Repository Owner\"\n username: \"BitBucket API Username\"\n password: \"BitBucket API Password\"\n\n @reaso",
"end": 832,
"score": 0.993333637714386,
"start": 810,
"tag": "USERNAME",
"value": "BitBucket API Username"
},
{
"context": "username: \... | lib/util/configuration.coffee | ExentriqLtd/mapr-preview | 0 | {app} = require 'remote'
CSON = require('cson')
path = require('path')
fs = require('fs')
log = require './logger'
moment = require('moment')
AWEConfiguration = require './configuration-adv-web-editor'
{File, Directory} = require 'atom'
FILE_PATH = path.join(app.getPath("userData"), "mapr-preview.cson")
# Project folders with contents allowed in preview
LOCALES = ['en', 'fr', 'ja', 'ko']
getRepoName = (uri) ->
tmp = uri.split('/')
name = tmp[tmp.length-1]
tmp = name.split('.')
[..., last] = tmp
if last is 'git'
name = tmp[...-1].join('.')
else
name
class Configuration
@labels:
repoUrl: "MapR.com Project Clone URL"
targetDir: "MapR.com Project Directory"
contentDir: "MapR.com-content Project Directory"
repoOwner: "BitBucket Repository Owner"
username: "BitBucket API Username"
password: "BitBucket API Password"
@reasons:
repoUrl: "MapR.com Project Clone URL must be a valid SSH repository"
contentDir: "MapR.com-content Project Directory must exist"
targetDir: "MapR.com Project Directory must be set"
repoOwner: "BitBucket Repository Owner must be set"
username: "BitBucket API Username must be set"
password: "BitBucket API Password must be set"
@validators:
isValidRepo: (value) ->
return Configuration.validators.isNotBlank(value) &&
(Configuration.validators.isValidHttp(value) || Configuration.validators.isValidSsh(value))
isNotBlank: (value) ->
return value?.trim?().length > 0
whatever: (value) ->
return true
isValidHttp: (value) ->
return value.startsWith("http")
isValidSsh: (value) ->
return !value.startsWith("http") && value.indexOf '@' >= 0
isEmail: (value) ->
re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
return re.test(value)
dirExists: (value) ->
dir = new Directory(value)
return dir.existsSync()
@validationRules:
repoUrl: @validators.isValidRepo
contentDir: @validators.dirExists
targetDir: @validators.isNotBlank
username: @validators.isNotBlank
password: @validators.isNotBlank
repoOwner: @validators.isNotBlank
constructor: () ->
@read()
exists: () ->
return @confFile.existsSync()
read: () ->
log.debug "MaprPreview::read", FILE_PATH
@confFile = new File(FILE_PATH)
if @exists()
try
@conf = CSON.parseCSONFile(FILE_PATH)
# log.debug "Read configuration: ", @conf
catch error
log.warn "Invalid configuration detected"
@conf = null
else
@confFile.create()
@conf = {
repoUrl: ''
}
return @conf
get: () ->
if !@conf
@conf = {
####
# Empty Object
# initialization
####
}
log.debug "configuration::get", @conf
return @conf
set: (c) ->
@conf = c
# log.debug "configuration::set", @conf
return this
setValues: (values) ->
Object.keys(values).forEach (key) => @conf[key] = values[key]
save: () ->
log.debug "MaprPreview::save", FILE_PATH
s = CSON.stringify(@conf)
#@confFile.create().then =>
@confFile.writeSync(s)
# log.debug "configuration::save", @conf
acquireFromAwe: () ->
aweConf = new AWEConfiguration()
if !(aweConf.exists() && aweConf.isValid())
return
innerAweConf = aweConf.get()
@conf.contentDir = path.join(innerAweConf.cloneDir, getRepoName(innerAweConf.repoUrl))
@conf.targetDir = innerAweConf.cloneDir
# @conf.repoOwner = innerAweConf.repoOwner
# @conf.username = innerAweConf.repoUsername
# @conf.password = innerAweConf.password
isAweConfValid: () ->
aweConf = new AWEConfiguration()
return aweConf.exists() && aweConf.isValid()
isValid: () ->
allKeys = @conf && Object.keys(@conf).filter (k) ->
keys.find (j) ->
k == j
.length == keys.length
return allKeys && @validateAll().length == 0
validateAll: () ->
return Object.keys(Configuration.validationRules).map (rule) =>
res = Configuration.validationRules[rule](@conf[rule])
return if res then null else rule
.filter (x) -> x
isStringEmpty: (s) ->
return !(s && s.trim && s.trim().length > 0)
getTargetDir: () ->
return path.join(@conf.targetDir, getRepoName(@conf.repoUrl))
getOutDir: () ->
return path.join(@conf.targetDir, "out")
getTempPreviewStorageDirectory: ->
return path.join(@getOutDir(), moment().format("YYYYMMDD-HHmmss"))
getRepoName: getRepoName
shouldClone: () ->
return !Configuration.validators.dirExists(@getTargetDir())
isPathFromProject: (path) ->
root = @conf.contentDir
return path.indexOf(root) >= 0
#strip down mapr.com-content path from the given path
relativePath: (path) ->
root = @conf.contentDir
if root.endsWith path.sep
root = root.substring(0, root.length-2)
return path.replace(root, '')
isPreviewAllowed: (filePath) ->
# log.debug 'Is preview allowed?', filePath
fromProject = @isPathFromProject(filePath)
if !fromProject
# log.debug 'No, path is not from project'
return false
if !filePath.endsWith '.md'
# log.debug 'No, it is not a markdown'
return false
folders = @relativePath(filePath).split(path.sep).filter (x) -> x
# log.debug folders
if folders[0] in LOCALES
# log.debug 'Yep.'
return true
else
# log.debug 'Nope.'
return false
deleteGitLock: () ->
theLock = path.join(@getTargetDir(), '.git', 'index.lock')
if fs.existsSync(theLock)
fs.unlinkSync(theLock)
keys = Object.keys(Configuration.labels)
module.exports = Configuration
| 176819 | {app} = require 'remote'
CSON = require('cson')
path = require('path')
fs = require('fs')
log = require './logger'
moment = require('moment')
AWEConfiguration = require './configuration-adv-web-editor'
{File, Directory} = require 'atom'
FILE_PATH = path.join(app.getPath("userData"), "mapr-preview.cson")
# Project folders with contents allowed in preview
LOCALES = ['en', 'fr', 'ja', 'ko']
getRepoName = (uri) ->
tmp = uri.split('/')
name = tmp[tmp.length-1]
tmp = name.split('.')
[..., last] = tmp
if last is 'git'
name = tmp[...-1].join('.')
else
name
class Configuration
@labels:
repoUrl: "MapR.com Project Clone URL"
targetDir: "MapR.com Project Directory"
contentDir: "MapR.com-content Project Directory"
repoOwner: "BitBucket Repository Owner"
username: "BitBucket API Username"
password: "<PASSWORD>"
@reasons:
repoUrl: "MapR.com Project Clone URL must be a valid SSH repository"
contentDir: "MapR.com-content Project Directory must exist"
targetDir: "MapR.com Project Directory must be set"
repoOwner: "BitBucket Repository Owner must be set"
username: "BitBucket API Username must be set"
password: "<PASSWORD>"
@validators:
isValidRepo: (value) ->
return Configuration.validators.isNotBlank(value) &&
(Configuration.validators.isValidHttp(value) || Configuration.validators.isValidSsh(value))
isNotBlank: (value) ->
return value?.trim?().length > 0
whatever: (value) ->
return true
isValidHttp: (value) ->
return value.startsWith("http")
isValidSsh: (value) ->
return !value.startsWith("http") && value.indexOf '@' >= 0
isEmail: (value) ->
re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
return re.test(value)
dirExists: (value) ->
dir = new Directory(value)
return dir.existsSync()
@validationRules:
repoUrl: @validators.isValidRepo
contentDir: @validators.dirExists
targetDir: @validators.isNotBlank
username: @validators.isNotBlank
password: <PASSWORD>
repoOwner: @validators.isNotBlank
constructor: () ->
@read()
exists: () ->
return @confFile.existsSync()
read: () ->
log.debug "MaprPreview::read", FILE_PATH
@confFile = new File(FILE_PATH)
if @exists()
try
@conf = CSON.parseCSONFile(FILE_PATH)
# log.debug "Read configuration: ", @conf
catch error
log.warn "Invalid configuration detected"
@conf = null
else
@confFile.create()
@conf = {
repoUrl: ''
}
return @conf
get: () ->
if !@conf
@conf = {
####
# Empty Object
# initialization
####
}
log.debug "configuration::get", @conf
return @conf
set: (c) ->
@conf = c
# log.debug "configuration::set", @conf
return this
setValues: (values) ->
Object.keys(values).forEach (key) => @conf[key] = values[key]
save: () ->
log.debug "MaprPreview::save", FILE_PATH
s = CSON.stringify(@conf)
#@confFile.create().then =>
@confFile.writeSync(s)
# log.debug "configuration::save", @conf
acquireFromAwe: () ->
aweConf = new AWEConfiguration()
if !(aweConf.exists() && aweConf.isValid())
return
innerAweConf = aweConf.get()
@conf.contentDir = path.join(innerAweConf.cloneDir, getRepoName(innerAweConf.repoUrl))
@conf.targetDir = innerAweConf.cloneDir
# @conf.repoOwner = innerAweConf.repoOwner
# @conf.username = innerAweConf.repoUsername
# @conf.password = <PASSWORD>AweConf.password
isAweConfValid: () ->
aweConf = new AWEConfiguration()
return aweConf.exists() && aweConf.isValid()
isValid: () ->
allKeys = @conf && Object.keys(@conf).filter (k) ->
keys.find (j) ->
k == j
.length == keys.length
return allKeys && @validateAll().length == 0
validateAll: () ->
return Object.keys(Configuration.validationRules).map (rule) =>
res = Configuration.validationRules[rule](@conf[rule])
return if res then null else rule
.filter (x) -> x
isStringEmpty: (s) ->
return !(s && s.trim && s.trim().length > 0)
getTargetDir: () ->
return path.join(@conf.targetDir, getRepoName(@conf.repoUrl))
getOutDir: () ->
return path.join(@conf.targetDir, "out")
getTempPreviewStorageDirectory: ->
return path.join(@getOutDir(), moment().format("YYYYMMDD-HHmmss"))
getRepoName: getRepoName
shouldClone: () ->
return !Configuration.validators.dirExists(@getTargetDir())
isPathFromProject: (path) ->
root = @conf.contentDir
return path.indexOf(root) >= 0
#strip down mapr.com-content path from the given path
relativePath: (path) ->
root = @conf.contentDir
if root.endsWith path.sep
root = root.substring(0, root.length-2)
return path.replace(root, '')
isPreviewAllowed: (filePath) ->
# log.debug 'Is preview allowed?', filePath
fromProject = @isPathFromProject(filePath)
if !fromProject
# log.debug 'No, path is not from project'
return false
if !filePath.endsWith '.md'
# log.debug 'No, it is not a markdown'
return false
folders = @relativePath(filePath).split(path.sep).filter (x) -> x
# log.debug folders
if folders[0] in LOCALES
# log.debug 'Yep.'
return true
else
# log.debug 'Nope.'
return false
deleteGitLock: () ->
theLock = path.join(@getTargetDir(), '.git', 'index.lock')
if fs.existsSync(theLock)
fs.unlinkSync(theLock)
keys = Object.keys(Configuration.labels)
module.exports = Configuration
| true | {app} = require 'remote'
CSON = require('cson')
path = require('path')
fs = require('fs')
log = require './logger'
moment = require('moment')
AWEConfiguration = require './configuration-adv-web-editor'
{File, Directory} = require 'atom'
FILE_PATH = path.join(app.getPath("userData"), "mapr-preview.cson")
# Project folders with contents allowed in preview
LOCALES = ['en', 'fr', 'ja', 'ko']
getRepoName = (uri) ->
tmp = uri.split('/')
name = tmp[tmp.length-1]
tmp = name.split('.')
[..., last] = tmp
if last is 'git'
name = tmp[...-1].join('.')
else
name
class Configuration
@labels:
repoUrl: "MapR.com Project Clone URL"
targetDir: "MapR.com Project Directory"
contentDir: "MapR.com-content Project Directory"
repoOwner: "BitBucket Repository Owner"
username: "BitBucket API Username"
password: "PI:PASSWORD:<PASSWORD>END_PI"
@reasons:
repoUrl: "MapR.com Project Clone URL must be a valid SSH repository"
contentDir: "MapR.com-content Project Directory must exist"
targetDir: "MapR.com Project Directory must be set"
repoOwner: "BitBucket Repository Owner must be set"
username: "BitBucket API Username must be set"
password: "PI:PASSWORD:<PASSWORD>END_PI"
@validators:
isValidRepo: (value) ->
return Configuration.validators.isNotBlank(value) &&
(Configuration.validators.isValidHttp(value) || Configuration.validators.isValidSsh(value))
isNotBlank: (value) ->
return value?.trim?().length > 0
whatever: (value) ->
return true
isValidHttp: (value) ->
return value.startsWith("http")
isValidSsh: (value) ->
return !value.startsWith("http") && value.indexOf '@' >= 0
isEmail: (value) ->
re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
return re.test(value)
dirExists: (value) ->
dir = new Directory(value)
return dir.existsSync()
@validationRules:
repoUrl: @validators.isValidRepo
contentDir: @validators.dirExists
targetDir: @validators.isNotBlank
username: @validators.isNotBlank
password: PI:PASSWORD:<PASSWORD>END_PI
repoOwner: @validators.isNotBlank
constructor: () ->
@read()
exists: () ->
return @confFile.existsSync()
read: () ->
log.debug "MaprPreview::read", FILE_PATH
@confFile = new File(FILE_PATH)
if @exists()
try
@conf = CSON.parseCSONFile(FILE_PATH)
# log.debug "Read configuration: ", @conf
catch error
log.warn "Invalid configuration detected"
@conf = null
else
@confFile.create()
@conf = {
repoUrl: ''
}
return @conf
get: () ->
if !@conf
@conf = {
####
# Empty Object
# initialization
####
}
log.debug "configuration::get", @conf
return @conf
set: (c) ->
@conf = c
# log.debug "configuration::set", @conf
return this
setValues: (values) ->
Object.keys(values).forEach (key) => @conf[key] = values[key]
save: () ->
log.debug "MaprPreview::save", FILE_PATH
s = CSON.stringify(@conf)
#@confFile.create().then =>
@confFile.writeSync(s)
# log.debug "configuration::save", @conf
acquireFromAwe: () ->
aweConf = new AWEConfiguration()
if !(aweConf.exists() && aweConf.isValid())
return
innerAweConf = aweConf.get()
@conf.contentDir = path.join(innerAweConf.cloneDir, getRepoName(innerAweConf.repoUrl))
@conf.targetDir = innerAweConf.cloneDir
# @conf.repoOwner = innerAweConf.repoOwner
# @conf.username = innerAweConf.repoUsername
# @conf.password = PI:PASSWORD:<PASSWORD>END_PIAweConf.password
isAweConfValid: () ->
aweConf = new AWEConfiguration()
return aweConf.exists() && aweConf.isValid()
isValid: () ->
allKeys = @conf && Object.keys(@conf).filter (k) ->
keys.find (j) ->
k == j
.length == keys.length
return allKeys && @validateAll().length == 0
validateAll: () ->
return Object.keys(Configuration.validationRules).map (rule) =>
res = Configuration.validationRules[rule](@conf[rule])
return if res then null else rule
.filter (x) -> x
isStringEmpty: (s) ->
return !(s && s.trim && s.trim().length > 0)
getTargetDir: () ->
return path.join(@conf.targetDir, getRepoName(@conf.repoUrl))
getOutDir: () ->
return path.join(@conf.targetDir, "out")
getTempPreviewStorageDirectory: ->
return path.join(@getOutDir(), moment().format("YYYYMMDD-HHmmss"))
getRepoName: getRepoName
shouldClone: () ->
return !Configuration.validators.dirExists(@getTargetDir())
isPathFromProject: (path) ->
root = @conf.contentDir
return path.indexOf(root) >= 0
#strip down mapr.com-content path from the given path
relativePath: (path) ->
root = @conf.contentDir
if root.endsWith path.sep
root = root.substring(0, root.length-2)
return path.replace(root, '')
isPreviewAllowed: (filePath) ->
# log.debug 'Is preview allowed?', filePath
fromProject = @isPathFromProject(filePath)
if !fromProject
# log.debug 'No, path is not from project'
return false
if !filePath.endsWith '.md'
# log.debug 'No, it is not a markdown'
return false
folders = @relativePath(filePath).split(path.sep).filter (x) -> x
# log.debug folders
if folders[0] in LOCALES
# log.debug 'Yep.'
return true
else
# log.debug 'Nope.'
return false
deleteGitLock: () ->
theLock = path.join(@getTargetDir(), '.git', 'index.lock')
if fs.existsSync(theLock)
fs.unlinkSync(theLock)
keys = Object.keys(Configuration.labels)
module.exports = Configuration
|
[
{
"context": "s\", ->\n expect(capitalizeString 'bob').toBe 'Bob'\n expect(capitalizeString 'BILL').toBe 'BILL",
"end": 1352,
"score": 0.9365530014038086,
"start": 1349,
"tag": "NAME",
"value": "Bob"
}
] | test/util_test.coffee | nornagon/ircv | 20 | describe "Util provides the following functions:", ->
class TestClass1
class TestClass2
describe "pluralize", ->
it "does nothing if there is one of something", ->
expect(pluralize 'dog', 1).toBe 'dog'
expect(pluralize 'stress', 1).toBe 'stress'
it "adds an 's' when there is 0 or > 1 of something", ->
expect(pluralize 'cat', 2).toBe 'cats'
expect(pluralize 'cat', 0).toBe 'cats'
it "adds an 'es' when there is 0 or > 1 of something and the word ends in 's'", ->
expect(pluralize 'stress', 2).toBe 'stresses'
describe "getLogger", ->
it "logs debug info", ->
spyOn(window.console, 'log')
spyOn(window.console, 'error')
spyOn(window.console, 'warn')
a = new TestClass1
b = new TestClass2
logA = getLogger a
logB = getLogger b
logA 'this is my message!'
logB 'w', 'warning', 5, 'is a great number'
logA 'e', 'error!', 'error msg'
expect(console.log).toHaveBeenCalledWith 'TestClass1:', 'this is my message!'
expect(console.warn).toHaveBeenCalledWith 'TestClass2:', 'warning', 5, 'is a great number'
expect(console.error).toHaveBeenCalledWith 'TestClass1:', 'error!', 'error msg'
describe "capitalizeString", ->
it "capitalizes the first letter of words", ->
expect(capitalizeString 'bob').toBe 'Bob'
expect(capitalizeString 'BILL').toBe 'BILL'
expect(capitalizeString '').toBe '' | 191717 | describe "Util provides the following functions:", ->
class TestClass1
class TestClass2
describe "pluralize", ->
it "does nothing if there is one of something", ->
expect(pluralize 'dog', 1).toBe 'dog'
expect(pluralize 'stress', 1).toBe 'stress'
it "adds an 's' when there is 0 or > 1 of something", ->
expect(pluralize 'cat', 2).toBe 'cats'
expect(pluralize 'cat', 0).toBe 'cats'
it "adds an 'es' when there is 0 or > 1 of something and the word ends in 's'", ->
expect(pluralize 'stress', 2).toBe 'stresses'
describe "getLogger", ->
it "logs debug info", ->
spyOn(window.console, 'log')
spyOn(window.console, 'error')
spyOn(window.console, 'warn')
a = new TestClass1
b = new TestClass2
logA = getLogger a
logB = getLogger b
logA 'this is my message!'
logB 'w', 'warning', 5, 'is a great number'
logA 'e', 'error!', 'error msg'
expect(console.log).toHaveBeenCalledWith 'TestClass1:', 'this is my message!'
expect(console.warn).toHaveBeenCalledWith 'TestClass2:', 'warning', 5, 'is a great number'
expect(console.error).toHaveBeenCalledWith 'TestClass1:', 'error!', 'error msg'
describe "capitalizeString", ->
it "capitalizes the first letter of words", ->
expect(capitalizeString 'bob').toBe '<NAME>'
expect(capitalizeString 'BILL').toBe 'BILL'
expect(capitalizeString '').toBe '' | true | describe "Util provides the following functions:", ->
class TestClass1
class TestClass2
describe "pluralize", ->
it "does nothing if there is one of something", ->
expect(pluralize 'dog', 1).toBe 'dog'
expect(pluralize 'stress', 1).toBe 'stress'
it "adds an 's' when there is 0 or > 1 of something", ->
expect(pluralize 'cat', 2).toBe 'cats'
expect(pluralize 'cat', 0).toBe 'cats'
it "adds an 'es' when there is 0 or > 1 of something and the word ends in 's'", ->
expect(pluralize 'stress', 2).toBe 'stresses'
describe "getLogger", ->
it "logs debug info", ->
spyOn(window.console, 'log')
spyOn(window.console, 'error')
spyOn(window.console, 'warn')
a = new TestClass1
b = new TestClass2
logA = getLogger a
logB = getLogger b
logA 'this is my message!'
logB 'w', 'warning', 5, 'is a great number'
logA 'e', 'error!', 'error msg'
expect(console.log).toHaveBeenCalledWith 'TestClass1:', 'this is my message!'
expect(console.warn).toHaveBeenCalledWith 'TestClass2:', 'warning', 5, 'is a great number'
expect(console.error).toHaveBeenCalledWith 'TestClass1:', 'error!', 'error msg'
describe "capitalizeString", ->
it "capitalizes the first letter of words", ->
expect(capitalizeString 'bob').toBe 'PI:NAME:<NAME>END_PI'
expect(capitalizeString 'BILL').toBe 'BILL'
expect(capitalizeString '').toBe '' |
[
{
"context": "l as name where false union all\n select 'alice' union all\n select 'bob",
"end": 8504,
"score": 0.9995731711387634,
"start": 8499,
"tag": "NAME",
"value": "alice"
},
{
"context": "ice' union all\n select 'bob... | src/experiments/using-icql-with-sqlite.coffee | loveencounterflow/mingkwai-typewriter-companion | 0 |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = '明快打字机/EXPERIMENTS/ICQL+SQLITE'
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
info = CND.get_logger 'info', badge
urge = CND.get_logger 'urge', badge
help = CND.get_logger 'help', badge
whisper = CND.get_logger 'whisper', badge
echo = CND.echo.bind CND
#...........................................................................................................
PATH = require 'path'
# FS = require 'fs'
PD = require 'pipedreams'
{ $
$async
select } = PD
{ assign
jr } = CND
#...........................................................................................................
join_path = ( P... ) -> PATH.resolve PATH.join P...
boolean_as_int = ( x ) -> if x then 1 else 0
{ inspect, } = require 'util'
xrpr = ( x ) -> inspect x, { colors: yes, breakLength: Infinity, maxArrayLength: Infinity, depth: Infinity, }
xrpr2 = ( x ) -> inspect x, { colors: yes, breakLength: 80, maxArrayLength: Infinity, depth: Infinity, }
#...........................................................................................................
ICQL = require 'icql'
INTERTYPE = require '../types'
DB = require '../db'
#-----------------------------------------------------------------------------------------------------------
@_prepare_db = ( db ) ->
db.import_table_unames()
db.import_table_uname_tokens()
db.import_table_unicode_test()
db.create_view_unicode_test_with_end_markers()
db.fts5_create_and_populate_token_tables()
db.spellfix_create_editcosts()
db.spellfix_create_and_populate_token_tables()
db.spellfix_populate_custom_codes()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_fts5_token_phrases = ( db ) ->
#.........................................................................................................
whisper '-'.repeat 108
urge 'demo_fts5_token_phrases'
token_phrases = [
'latin alpha'
'latin alpha small'
'latin alpha capital'
'greek alpha'
'greek alpha small'
'cyrillic small a'
]
for q in token_phrases
urge rpr q
info ( xrpr row ) for row from db.fts5_fetch_uname_token_matches { q, limit: 5, }
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_fts5_broken_phrases = ( db ) ->
#.........................................................................................................
whisper '-'.repeat 108
urge 'demo_fts5_broken_phrases'
cache = {}
broken_phrases = [
'latn alp'
'latn alp smll'
'latn alp cap'
'greek alpha'
'cap greek alpha'
'greek alpha small'
'cyrillic small a'
'ktkn'
'katakana'
'hirag no'
'no'
'xxx'
'istanbul'
'capital'
'mycode'
'123'
'^'
'´'
'`'
'"'
'~'
'~ a'
'~ a small'
'~ a capital'
'_'
'-'
'~~'
'%'
'_'
'~~'
'%'
'%0'
'%0 sign'
'kxr'
'kxr tree'
'n14 circled'
'circled n14'
'fourteen circled'
'- l'
]
### TAINT `initials` should be in `db.$.settings` ###
initials = 2
tokens = []
for broken_phrase in broken_phrases
#.......................................................................................................
for attempt in broken_phrase.split /\s+/
if ( hit = cache[ attempt ] ) is undefined
hit = db.$.first_value db.match_uname_tokens_spellfix { q: attempt, initials, limit: 1, }
cache[ attempt ] = hit ? null
# debug '27762', attempt, hit
tokens.push hit if hit?
#.......................................................................................................
debug tokens
if tokens.length < 1
warn "no token matches for #{rpr broken_phrase}"
continue
#.......................................................................................................
q = tokens.join ' '
tokens.length = 0
#.......................................................................................................
urge ( CND.white broken_phrase ), ( CND.grey '-->' ), ( CND.orange rpr q )
info ( xrpr row ) for row from db.fts5_fetch_uname_token_matches { q, limit: 5, }
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_uname_tokens = ( db ) ->
info ( xrpr row ) for row from db.$.query """select * from uname_tokens;"""
#-----------------------------------------------------------------------------------------------------------
@demo_spellfix = ( db ) ->
whisper '-'.repeat 108
urge 'demo_spellfix'
# info ( xrpr row ) for row from db.$.query 'select * from spellfix_editcosts;'
# db.$.execute """update spellfix_uname_tokens_vocab set k2 = upper( word );"""
# db.$.execute """update spellfix_uname_tokens_vocab set k2 = 'CDACM';"""
# info ( xrpr row ) for row from db.$.query """select * from spellfix_uname_tokens_vocab where word regexp '^[^0-9]' limit 30;"""
words = [
# 'were'
# 'whether'
# 'whater'
# 'thosand'
# 'fancy'
# 'fort'
# 'trof'
# 'latn'
# 'cap'
# 'letr'
# 'alif'
# 'hirag'
# 'hrg'
# 'hrgn'
# 'cyr'
# 'grk'
# 'grek'
# 'no'
# 'kata'
# 'katak'
# 'ktkn'
# 'katkn'
# 'ktkna'
# 'ktakn'
# 'standard'
# 'hiero'
# 'egt'
'egyp'
'hgl'
'xxx'
'istanbul'
'capital'
'mycode'
'123'
'^'
'´'
'`'
'"'
'~'
'_'
'-'
'~~'
'%'
'_'
'~~'
'%'
'%0'
'kxr'
]
### TAINT `initials` should be in `db.$.settings` ###
initials = 2
t0 = Date.now()
for q in words
qphonehash = db.$.first_value db.get_spellfix1_phonehash { q, }
# for row from db.match_uname_tokens_spellfix_with_scores { q, initials, limit: 15, }
# debug '----', q, 'I', initials, 'S', row.score, 'L', row.matchlen, 'D', row.distance, row.source, row.qphonehash, row.wphonehash, row.word
hits = db.$.all_first_values db.match_uname_tokens_spellfix { q, initials, limit: 5, }
hits = hits.join ', '
info "#{q} (#{qphonehash}) --> #{hits}"
t1 = Date.now()
dt = t1 - t0
tps = dt / words.length
urge "took #{dt} ms (#{tps.toFixed 1} ms per search)"
return null
#-----------------------------------------------------------------------------------------------------------
@demo_json = ( db ) ->
whisper '-'.repeat 108
urge 'demo_json'
info db.$.all_rows db.$.query """
select
x.words as words,
json_array_length ( x.words ) as word_count
from ( select
json( get_words( 'helo world these are many words' ) ) as words ) as x
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select
id,
-- key,
type,
value
from json_each( json( get_words( 'helo world these are many words' ) ) )
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select
id,
-- key,
type,
value
from json_each( json( '[1,1.5,1e6,true,false,"x",null,{"a":42},[1,2,3]]' ) )
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select json_group_array( names.name )
from (
select null as name where false union all
select 'alice' union all
select 'bob' union all
select 'carlito' union all
select 'domian' union all
select 'franz' union all
select null where false
) as names
;"""
whisper '---------------------------------------------'
info rpr JSON.parse db.$.first_value db.$.query """
select
json_group_object( staff.name, staff.extension ) as staff
from (
select null as name, null as extension where false union all
select 'alice', 123 union all
select 'bob', 150 union all
select 'carlito', 177 union all
select 'domian', 204 union all
select 'franz', 231 union all
select null, null where false
) as staff
;"""
whisper '---------------------------------------------'
info xrpr row for row from db.$.query """
select
id as nr,
replace( fullkey, '$', '' ) as path,
key as key,
atom as value
from json_tree( json( '[1,1.5,1e6,true,false,"x",null,{"a":42,"c":[1,{"2":"sub"},3]}]' ) ) as t
where t.fullkey != '$'
;"""
return null
#-----------------------------------------------------------------------------------------------------------
@demo_catalog = ( db ) ->
for row from db.$.catalog()
entry = []
entry.push CND.grey row.type
entry.push CND.white row.name
entry.push CND.yellow "(#{row.tbl_name})" if row.name isnt row.tbl_name
info entry.join ' '
return null
#-----------------------------------------------------------------------------------------------------------
@demo_db_type_of = ( db, name ) ->
return db.$.type_of name
# for row from db.$.catalog()
# return row.type if row.name is name
# return null
#-----------------------------------------------------------------------------------------------------------
@demo_longest_matching_prefix = ( db ) ->
count = db.$.first_value db.$.query """select count(*) from uname_tokens;"""
info "selecting from #{count} entries in uname_tokens"
probes = [
'gr'
'alpha'
'beta'
'c'
'ca'
'cap'
'capi'
'omega'
'circ'
'circle'
]
for probe in probes
info ( CND.grey '--------------------------------------------------------' )
nr = 0
#.......................................................................................................
for row from db.longest_matching_prefix_in_uname_tokens { q: probe, limit: 10, }
nr += +1
# info probe, ( xrpr row )
info ( CND.grey nr ), ( CND.grey row.delta_length ), ( CND.blue probe ), ( CND.grey '->' ), ( CND.lime row.uname_token )
#.......................................................................................................
table = 'uname_tokens'
field = 'uname_token'
chrs = Array.from db.$.first_value db.next_characters { prefix: probe, table, field, }
info probe, '...', ( chrs.join ' ' )
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_nextchr = ( db ) ->
#.........................................................................................................
# whisper '-'.repeat 108
# for row from db.$.query """select * from unicode_test;"""
# info ( xrpr row )
#.........................................................................................................
whisper '-'.repeat 108
probes = [
'-'
'っ'
'か'
'\\'
'ku'
'a'
'x' ]
# table = 'unicode_test'
table = 'unicode_test_with_end_markers'
field = 'word'
for probe in probes
chrs = Array.from db.$.first_value db.next_characters { prefix: probe, table, field, }
info probe, '...', ( chrs.join ' ' )
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_edict2u = ( db ) ->
# debug INTERTYPE.all_keys_of db.$
db.create_table_edict2u()
console.time 'populate-edict2u'
path = join_path __dirname, '../../.cache/edict2u.sql'
help "reading #{PATH.relative process.cwd(), path}"
db.$.read path
help "creating indexes"
db.create_indexes_for_table_edict2u()
console.timeEnd 'populate-edict2u'
probes = [
'ち'
'ちゅ'
'ちゅう'
'ちゅうご'
'ちゅうごく'
'ちゅうごくの'
'ちゅうごくのせ'
'ちゅうごくのせい'
'ちゅうごくのせいふ'
]
limit = 10
for probe in probes
whisper '-'.repeat 108
info probe
nr = 0
for row from db.longest_matching_prefix_in_edict2u { q: probe, limit, }
nr += +1
info ( CND.grey nr ), ( CND.grey row.delta_length ), ( CND.grey '->' ), ( CND.lime row.candidate ), ( CND.white row.reading )
# for row from db.$.query "select * from edict2u where reading like 'ちゅうごく%' order by reading limit 5;"
# info row.candidate
#.........................................................................................................
return null
############################################################################################################
unless module.parent?
DEMO = @
do ->
db = DB.new_db { clear: false, }
# db = DB.new_db { clear: true, }
# DEMO._prepare_db db
# db = await DEMO.new_db()
# DEMO.demo_uname_tokens db
# DEMO.demo_fts5_token_phrases db
# urge '33342', db.$.first_value db.$.query """select plus( 34, 56 );"""
# urge '33342', db.$.first_value db.$.query """select e( plus( 'here', 'there' ) );"""
# info row for row from db.$.query """
# select split( 'helo world whassup', s.value ) as word
# from generate_series( 1, 10 ) as s
# where word is not null
# ;
# """
# DEMO.demo_spellfix db
# DEMO.demo_fts5_broken_phrases db
# DEMO.demo_json db
DEMO.demo_catalog db
info 'µ33344', rpr DEMO.demo_db_type_of db, 'edict2u'
info 'µ33344', rpr DEMO.demo_db_type_of db, 'xxx'
# DEMO.demo_longest_matching_prefix db
# DEMO.demo_edict2u db
# DEMO.demo_nextchr db
return null
| 89213 |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = '明快打字机/EXPERIMENTS/ICQL+SQLITE'
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
info = CND.get_logger 'info', badge
urge = CND.get_logger 'urge', badge
help = CND.get_logger 'help', badge
whisper = CND.get_logger 'whisper', badge
echo = CND.echo.bind CND
#...........................................................................................................
PATH = require 'path'
# FS = require 'fs'
PD = require 'pipedreams'
{ $
$async
select } = PD
{ assign
jr } = CND
#...........................................................................................................
join_path = ( P... ) -> PATH.resolve PATH.join P...
boolean_as_int = ( x ) -> if x then 1 else 0
{ inspect, } = require 'util'
xrpr = ( x ) -> inspect x, { colors: yes, breakLength: Infinity, maxArrayLength: Infinity, depth: Infinity, }
xrpr2 = ( x ) -> inspect x, { colors: yes, breakLength: 80, maxArrayLength: Infinity, depth: Infinity, }
#...........................................................................................................
ICQL = require 'icql'
INTERTYPE = require '../types'
DB = require '../db'
#-----------------------------------------------------------------------------------------------------------
@_prepare_db = ( db ) ->
db.import_table_unames()
db.import_table_uname_tokens()
db.import_table_unicode_test()
db.create_view_unicode_test_with_end_markers()
db.fts5_create_and_populate_token_tables()
db.spellfix_create_editcosts()
db.spellfix_create_and_populate_token_tables()
db.spellfix_populate_custom_codes()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_fts5_token_phrases = ( db ) ->
#.........................................................................................................
whisper '-'.repeat 108
urge 'demo_fts5_token_phrases'
token_phrases = [
'latin alpha'
'latin alpha small'
'latin alpha capital'
'greek alpha'
'greek alpha small'
'cyrillic small a'
]
for q in token_phrases
urge rpr q
info ( xrpr row ) for row from db.fts5_fetch_uname_token_matches { q, limit: 5, }
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_fts5_broken_phrases = ( db ) ->
#.........................................................................................................
whisper '-'.repeat 108
urge 'demo_fts5_broken_phrases'
cache = {}
broken_phrases = [
'latn alp'
'latn alp smll'
'latn alp cap'
'greek alpha'
'cap greek alpha'
'greek alpha small'
'cyrillic small a'
'ktkn'
'katakana'
'hirag no'
'no'
'xxx'
'istanbul'
'capital'
'mycode'
'123'
'^'
'´'
'`'
'"'
'~'
'~ a'
'~ a small'
'~ a capital'
'_'
'-'
'~~'
'%'
'_'
'~~'
'%'
'%0'
'%0 sign'
'kxr'
'kxr tree'
'n14 circled'
'circled n14'
'fourteen circled'
'- l'
]
### TAINT `initials` should be in `db.$.settings` ###
initials = 2
tokens = []
for broken_phrase in broken_phrases
#.......................................................................................................
for attempt in broken_phrase.split /\s+/
if ( hit = cache[ attempt ] ) is undefined
hit = db.$.first_value db.match_uname_tokens_spellfix { q: attempt, initials, limit: 1, }
cache[ attempt ] = hit ? null
# debug '27762', attempt, hit
tokens.push hit if hit?
#.......................................................................................................
debug tokens
if tokens.length < 1
warn "no token matches for #{rpr broken_phrase}"
continue
#.......................................................................................................
q = tokens.join ' '
tokens.length = 0
#.......................................................................................................
urge ( CND.white broken_phrase ), ( CND.grey '-->' ), ( CND.orange rpr q )
info ( xrpr row ) for row from db.fts5_fetch_uname_token_matches { q, limit: 5, }
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_uname_tokens = ( db ) ->
info ( xrpr row ) for row from db.$.query """select * from uname_tokens;"""
#-----------------------------------------------------------------------------------------------------------
@demo_spellfix = ( db ) ->
whisper '-'.repeat 108
urge 'demo_spellfix'
# info ( xrpr row ) for row from db.$.query 'select * from spellfix_editcosts;'
# db.$.execute """update spellfix_uname_tokens_vocab set k2 = upper( word );"""
# db.$.execute """update spellfix_uname_tokens_vocab set k2 = 'CDACM';"""
# info ( xrpr row ) for row from db.$.query """select * from spellfix_uname_tokens_vocab where word regexp '^[^0-9]' limit 30;"""
words = [
# 'were'
# 'whether'
# 'whater'
# 'thosand'
# 'fancy'
# 'fort'
# 'trof'
# 'latn'
# 'cap'
# 'letr'
# 'alif'
# 'hirag'
# 'hrg'
# 'hrgn'
# 'cyr'
# 'grk'
# 'grek'
# 'no'
# 'kata'
# 'katak'
# 'ktkn'
# 'katkn'
# 'ktkna'
# 'ktakn'
# 'standard'
# 'hiero'
# 'egt'
'egyp'
'hgl'
'xxx'
'istanbul'
'capital'
'mycode'
'123'
'^'
'´'
'`'
'"'
'~'
'_'
'-'
'~~'
'%'
'_'
'~~'
'%'
'%0'
'kxr'
]
### TAINT `initials` should be in `db.$.settings` ###
initials = 2
t0 = Date.now()
for q in words
qphonehash = db.$.first_value db.get_spellfix1_phonehash { q, }
# for row from db.match_uname_tokens_spellfix_with_scores { q, initials, limit: 15, }
# debug '----', q, 'I', initials, 'S', row.score, 'L', row.matchlen, 'D', row.distance, row.source, row.qphonehash, row.wphonehash, row.word
hits = db.$.all_first_values db.match_uname_tokens_spellfix { q, initials, limit: 5, }
hits = hits.join ', '
info "#{q} (#{qphonehash}) --> #{hits}"
t1 = Date.now()
dt = t1 - t0
tps = dt / words.length
urge "took #{dt} ms (#{tps.toFixed 1} ms per search)"
return null
#-----------------------------------------------------------------------------------------------------------
@demo_json = ( db ) ->
whisper '-'.repeat 108
urge 'demo_json'
info db.$.all_rows db.$.query """
select
x.words as words,
json_array_length ( x.words ) as word_count
from ( select
json( get_words( 'helo world these are many words' ) ) as words ) as x
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select
id,
-- key,
type,
value
from json_each( json( get_words( 'helo world these are many words' ) ) )
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select
id,
-- key,
type,
value
from json_each( json( '[1,1.5,1e6,true,false,"x",null,{"a":42},[1,2,3]]' ) )
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select json_group_array( names.name )
from (
select null as name where false union all
select '<NAME>' union all
select '<NAME>' union all
select '<NAME>' union all
select '<NAME>' union all
select '<NAME>' union all
select null where false
) as names
;"""
whisper '---------------------------------------------'
info rpr JSON.parse db.$.first_value db.$.query """
select
json_group_object( staff.name, staff.extension ) as staff
from (
select null as name, null as extension where false union all
select '<NAME>', 123 union all
select '<NAME>', 150 union all
select '<NAME>', 177 union all
select '<NAME>', 204 union all
select '<NAME>', 231 union all
select null, null where false
) as staff
;"""
whisper '---------------------------------------------'
info xrpr row for row from db.$.query """
select
id as nr,
replace( fullkey, '$', '' ) as path,
key as key,
atom as value
from json_tree( json( '[1,1.5,1e6,true,false,"x",null,{"a":42,"c":[1,{"2":"sub"},3]}]' ) ) as t
where t.fullkey != '$'
;"""
return null
#-----------------------------------------------------------------------------------------------------------
@demo_catalog = ( db ) ->
for row from db.$.catalog()
entry = []
entry.push CND.grey row.type
entry.push CND.white row.name
entry.push CND.yellow "(#{row.tbl_name})" if row.name isnt row.tbl_name
info entry.join ' '
return null
#-----------------------------------------------------------------------------------------------------------
@demo_db_type_of = ( db, name ) ->
return db.$.type_of name
# for row from db.$.catalog()
# return row.type if row.name is name
# return null
#-----------------------------------------------------------------------------------------------------------
@demo_longest_matching_prefix = ( db ) ->
count = db.$.first_value db.$.query """select count(*) from uname_tokens;"""
info "selecting from #{count} entries in uname_tokens"
probes = [
'gr'
'alpha'
'beta'
'c'
'ca'
'cap'
'capi'
'omega'
'circ'
'circle'
]
for probe in probes
info ( CND.grey '--------------------------------------------------------' )
nr = 0
#.......................................................................................................
for row from db.longest_matching_prefix_in_uname_tokens { q: probe, limit: 10, }
nr += +1
# info probe, ( xrpr row )
info ( CND.grey nr ), ( CND.grey row.delta_length ), ( CND.blue probe ), ( CND.grey '->' ), ( CND.lime row.uname_token )
#.......................................................................................................
table = 'uname_tokens'
field = 'uname_token'
chrs = Array.from db.$.first_value db.next_characters { prefix: probe, table, field, }
info probe, '...', ( chrs.join ' ' )
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_nextchr = ( db ) ->
#.........................................................................................................
# whisper '-'.repeat 108
# for row from db.$.query """select * from unicode_test;"""
# info ( xrpr row )
#.........................................................................................................
whisper '-'.repeat 108
probes = [
'-'
'っ'
'か'
'\\'
'ku'
'a'
'x' ]
# table = 'unicode_test'
table = 'unicode_test_with_end_markers'
field = 'word'
for probe in probes
chrs = Array.from db.$.first_value db.next_characters { prefix: probe, table, field, }
info probe, '...', ( chrs.join ' ' )
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_edict2u = ( db ) ->
# debug INTERTYPE.all_keys_of db.$
db.create_table_edict2u()
console.time 'populate-edict2u'
path = join_path __dirname, '../../.cache/edict2u.sql'
help "reading #{PATH.relative process.cwd(), path}"
db.$.read path
help "creating indexes"
db.create_indexes_for_table_edict2u()
console.timeEnd 'populate-edict2u'
probes = [
'ち'
'ちゅ'
'ちゅう'
'ちゅうご'
'ちゅうごく'
'ちゅうごくの'
'ちゅうごくのせ'
'ちゅうごくのせい'
'ちゅうごくのせいふ'
]
limit = 10
for probe in probes
whisper '-'.repeat 108
info probe
nr = 0
for row from db.longest_matching_prefix_in_edict2u { q: probe, limit, }
nr += +1
info ( CND.grey nr ), ( CND.grey row.delta_length ), ( CND.grey '->' ), ( CND.lime row.candidate ), ( CND.white row.reading )
# for row from db.$.query "select * from edict2u where reading like 'ちゅうごく%' order by reading limit 5;"
# info row.candidate
#.........................................................................................................
return null
############################################################################################################
unless module.parent?
DEMO = @
do ->
db = DB.new_db { clear: false, }
# db = DB.new_db { clear: true, }
# DEMO._prepare_db db
# db = await DEMO.new_db()
# DEMO.demo_uname_tokens db
# DEMO.demo_fts5_token_phrases db
# urge '33342', db.$.first_value db.$.query """select plus( 34, 56 );"""
# urge '33342', db.$.first_value db.$.query """select e( plus( 'here', 'there' ) );"""
# info row for row from db.$.query """
# select split( 'helo world whassup', s.value ) as word
# from generate_series( 1, 10 ) as s
# where word is not null
# ;
# """
# DEMO.demo_spellfix db
# DEMO.demo_fts5_broken_phrases db
# DEMO.demo_json db
DEMO.demo_catalog db
info 'µ33344', rpr DEMO.demo_db_type_of db, 'edict2u'
info 'µ33344', rpr DEMO.demo_db_type_of db, 'xxx'
# DEMO.demo_longest_matching_prefix db
# DEMO.demo_edict2u db
# DEMO.demo_nextchr db
return null
| true |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = '明快打字机/EXPERIMENTS/ICQL+SQLITE'
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
info = CND.get_logger 'info', badge
urge = CND.get_logger 'urge', badge
help = CND.get_logger 'help', badge
whisper = CND.get_logger 'whisper', badge
echo = CND.echo.bind CND
#...........................................................................................................
PATH = require 'path'
# FS = require 'fs'
PD = require 'pipedreams'
{ $
$async
select } = PD
{ assign
jr } = CND
#...........................................................................................................
join_path = ( P... ) -> PATH.resolve PATH.join P...
boolean_as_int = ( x ) -> if x then 1 else 0
{ inspect, } = require 'util'
xrpr = ( x ) -> inspect x, { colors: yes, breakLength: Infinity, maxArrayLength: Infinity, depth: Infinity, }
xrpr2 = ( x ) -> inspect x, { colors: yes, breakLength: 80, maxArrayLength: Infinity, depth: Infinity, }
#...........................................................................................................
ICQL = require 'icql'
INTERTYPE = require '../types'
DB = require '../db'
#-----------------------------------------------------------------------------------------------------------
@_prepare_db = ( db ) ->
db.import_table_unames()
db.import_table_uname_tokens()
db.import_table_unicode_test()
db.create_view_unicode_test_with_end_markers()
db.fts5_create_and_populate_token_tables()
db.spellfix_create_editcosts()
db.spellfix_create_and_populate_token_tables()
db.spellfix_populate_custom_codes()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_fts5_token_phrases = ( db ) ->
#.........................................................................................................
whisper '-'.repeat 108
urge 'demo_fts5_token_phrases'
token_phrases = [
'latin alpha'
'latin alpha small'
'latin alpha capital'
'greek alpha'
'greek alpha small'
'cyrillic small a'
]
for q in token_phrases
urge rpr q
info ( xrpr row ) for row from db.fts5_fetch_uname_token_matches { q, limit: 5, }
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_fts5_broken_phrases = ( db ) ->
#.........................................................................................................
whisper '-'.repeat 108
urge 'demo_fts5_broken_phrases'
cache = {}
broken_phrases = [
'latn alp'
'latn alp smll'
'latn alp cap'
'greek alpha'
'cap greek alpha'
'greek alpha small'
'cyrillic small a'
'ktkn'
'katakana'
'hirag no'
'no'
'xxx'
'istanbul'
'capital'
'mycode'
'123'
'^'
'´'
'`'
'"'
'~'
'~ a'
'~ a small'
'~ a capital'
'_'
'-'
'~~'
'%'
'_'
'~~'
'%'
'%0'
'%0 sign'
'kxr'
'kxr tree'
'n14 circled'
'circled n14'
'fourteen circled'
'- l'
]
### TAINT `initials` should be in `db.$.settings` ###
initials = 2
tokens = []
for broken_phrase in broken_phrases
#.......................................................................................................
for attempt in broken_phrase.split /\s+/
if ( hit = cache[ attempt ] ) is undefined
hit = db.$.first_value db.match_uname_tokens_spellfix { q: attempt, initials, limit: 1, }
cache[ attempt ] = hit ? null
# debug '27762', attempt, hit
tokens.push hit if hit?
#.......................................................................................................
debug tokens
if tokens.length < 1
warn "no token matches for #{rpr broken_phrase}"
continue
#.......................................................................................................
q = tokens.join ' '
tokens.length = 0
#.......................................................................................................
urge ( CND.white broken_phrase ), ( CND.grey '-->' ), ( CND.orange rpr q )
info ( xrpr row ) for row from db.fts5_fetch_uname_token_matches { q, limit: 5, }
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_uname_tokens = ( db ) ->
info ( xrpr row ) for row from db.$.query """select * from uname_tokens;"""
#-----------------------------------------------------------------------------------------------------------
@demo_spellfix = ( db ) ->
whisper '-'.repeat 108
urge 'demo_spellfix'
# info ( xrpr row ) for row from db.$.query 'select * from spellfix_editcosts;'
# db.$.execute """update spellfix_uname_tokens_vocab set k2 = upper( word );"""
# db.$.execute """update spellfix_uname_tokens_vocab set k2 = 'CDACM';"""
# info ( xrpr row ) for row from db.$.query """select * from spellfix_uname_tokens_vocab where word regexp '^[^0-9]' limit 30;"""
words = [
# 'were'
# 'whether'
# 'whater'
# 'thosand'
# 'fancy'
# 'fort'
# 'trof'
# 'latn'
# 'cap'
# 'letr'
# 'alif'
# 'hirag'
# 'hrg'
# 'hrgn'
# 'cyr'
# 'grk'
# 'grek'
# 'no'
# 'kata'
# 'katak'
# 'ktkn'
# 'katkn'
# 'ktkna'
# 'ktakn'
# 'standard'
# 'hiero'
# 'egt'
'egyp'
'hgl'
'xxx'
'istanbul'
'capital'
'mycode'
'123'
'^'
'´'
'`'
'"'
'~'
'_'
'-'
'~~'
'%'
'_'
'~~'
'%'
'%0'
'kxr'
]
### TAINT `initials` should be in `db.$.settings` ###
initials = 2
t0 = Date.now()
for q in words
qphonehash = db.$.first_value db.get_spellfix1_phonehash { q, }
# for row from db.match_uname_tokens_spellfix_with_scores { q, initials, limit: 15, }
# debug '----', q, 'I', initials, 'S', row.score, 'L', row.matchlen, 'D', row.distance, row.source, row.qphonehash, row.wphonehash, row.word
hits = db.$.all_first_values db.match_uname_tokens_spellfix { q, initials, limit: 5, }
hits = hits.join ', '
info "#{q} (#{qphonehash}) --> #{hits}"
t1 = Date.now()
dt = t1 - t0
tps = dt / words.length
urge "took #{dt} ms (#{tps.toFixed 1} ms per search)"
return null
#-----------------------------------------------------------------------------------------------------------
@demo_json = ( db ) ->
whisper '-'.repeat 108
urge 'demo_json'
info db.$.all_rows db.$.query """
select
x.words as words,
json_array_length ( x.words ) as word_count
from ( select
json( get_words( 'helo world these are many words' ) ) as words ) as x
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select
id,
-- key,
type,
value
from json_each( json( get_words( 'helo world these are many words' ) ) )
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select
id,
-- key,
type,
value
from json_each( json( '[1,1.5,1e6,true,false,"x",null,{"a":42},[1,2,3]]' ) )
;"""
whisper '---------------------------------------------'
info row for row from db.$.query """
select json_group_array( names.name )
from (
select null as name where false union all
select 'PI:NAME:<NAME>END_PI' union all
select 'PI:NAME:<NAME>END_PI' union all
select 'PI:NAME:<NAME>END_PI' union all
select 'PI:NAME:<NAME>END_PI' union all
select 'PI:NAME:<NAME>END_PI' union all
select null where false
) as names
;"""
whisper '---------------------------------------------'
info rpr JSON.parse db.$.first_value db.$.query """
select
json_group_object( staff.name, staff.extension ) as staff
from (
select null as name, null as extension where false union all
select 'PI:NAME:<NAME>END_PI', 123 union all
select 'PI:NAME:<NAME>END_PI', 150 union all
select 'PI:NAME:<NAME>END_PI', 177 union all
select 'PI:NAME:<NAME>END_PI', 204 union all
select 'PI:NAME:<NAME>END_PI', 231 union all
select null, null where false
) as staff
;"""
whisper '---------------------------------------------'
info xrpr row for row from db.$.query """
select
id as nr,
replace( fullkey, '$', '' ) as path,
key as key,
atom as value
from json_tree( json( '[1,1.5,1e6,true,false,"x",null,{"a":42,"c":[1,{"2":"sub"},3]}]' ) ) as t
where t.fullkey != '$'
;"""
return null
#-----------------------------------------------------------------------------------------------------------
@demo_catalog = ( db ) ->
for row from db.$.catalog()
entry = []
entry.push CND.grey row.type
entry.push CND.white row.name
entry.push CND.yellow "(#{row.tbl_name})" if row.name isnt row.tbl_name
info entry.join ' '
return null
#-----------------------------------------------------------------------------------------------------------
@demo_db_type_of = ( db, name ) ->
return db.$.type_of name
# for row from db.$.catalog()
# return row.type if row.name is name
# return null
#-----------------------------------------------------------------------------------------------------------
@demo_longest_matching_prefix = ( db ) ->
count = db.$.first_value db.$.query """select count(*) from uname_tokens;"""
info "selecting from #{count} entries in uname_tokens"
probes = [
'gr'
'alpha'
'beta'
'c'
'ca'
'cap'
'capi'
'omega'
'circ'
'circle'
]
for probe in probes
info ( CND.grey '--------------------------------------------------------' )
nr = 0
#.......................................................................................................
for row from db.longest_matching_prefix_in_uname_tokens { q: probe, limit: 10, }
nr += +1
# info probe, ( xrpr row )
info ( CND.grey nr ), ( CND.grey row.delta_length ), ( CND.blue probe ), ( CND.grey '->' ), ( CND.lime row.uname_token )
#.......................................................................................................
table = 'uname_tokens'
field = 'uname_token'
chrs = Array.from db.$.first_value db.next_characters { prefix: probe, table, field, }
info probe, '...', ( chrs.join ' ' )
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_nextchr = ( db ) ->
#.........................................................................................................
# whisper '-'.repeat 108
# for row from db.$.query """select * from unicode_test;"""
# info ( xrpr row )
#.........................................................................................................
whisper '-'.repeat 108
probes = [
'-'
'っ'
'か'
'\\'
'ku'
'a'
'x' ]
# table = 'unicode_test'
table = 'unicode_test_with_end_markers'
field = 'word'
for probe in probes
chrs = Array.from db.$.first_value db.next_characters { prefix: probe, table, field, }
info probe, '...', ( chrs.join ' ' )
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@demo_edict2u = ( db ) ->
# debug INTERTYPE.all_keys_of db.$
db.create_table_edict2u()
console.time 'populate-edict2u'
path = join_path __dirname, '../../.cache/edict2u.sql'
help "reading #{PATH.relative process.cwd(), path}"
db.$.read path
help "creating indexes"
db.create_indexes_for_table_edict2u()
console.timeEnd 'populate-edict2u'
probes = [
'ち'
'ちゅ'
'ちゅう'
'ちゅうご'
'ちゅうごく'
'ちゅうごくの'
'ちゅうごくのせ'
'ちゅうごくのせい'
'ちゅうごくのせいふ'
]
limit = 10
for probe in probes
whisper '-'.repeat 108
info probe
nr = 0
for row from db.longest_matching_prefix_in_edict2u { q: probe, limit, }
nr += +1
info ( CND.grey nr ), ( CND.grey row.delta_length ), ( CND.grey '->' ), ( CND.lime row.candidate ), ( CND.white row.reading )
# for row from db.$.query "select * from edict2u where reading like 'ちゅうごく%' order by reading limit 5;"
# info row.candidate
#.........................................................................................................
return null
############################################################################################################
unless module.parent?
DEMO = @
do ->
db = DB.new_db { clear: false, }
# db = DB.new_db { clear: true, }
# DEMO._prepare_db db
# db = await DEMO.new_db()
# DEMO.demo_uname_tokens db
# DEMO.demo_fts5_token_phrases db
# urge '33342', db.$.first_value db.$.query """select plus( 34, 56 );"""
# urge '33342', db.$.first_value db.$.query """select e( plus( 'here', 'there' ) );"""
# info row for row from db.$.query """
# select split( 'helo world whassup', s.value ) as word
# from generate_series( 1, 10 ) as s
# where word is not null
# ;
# """
# DEMO.demo_spellfix db
# DEMO.demo_fts5_broken_phrases db
# DEMO.demo_json db
DEMO.demo_catalog db
info 'µ33344', rpr DEMO.demo_db_type_of db, 'edict2u'
info 'µ33344', rpr DEMO.demo_db_type_of db, 'xxx'
# DEMO.demo_longest_matching_prefix db
# DEMO.demo_edict2u db
# DEMO.demo_nextchr db
return null
|
[
{
"context": "# beholder\n# Copyright (c) 2013 Charles Moncrief <cmoncrief@gmail.com>\n# MIT Licensed\n\n{EventEmitt",
"end": 48,
"score": 0.9998685717582703,
"start": 32,
"tag": "NAME",
"value": "Charles Moncrief"
},
{
"context": "# beholder\n# Copyright (c) 2013 Charles Moncrief <c... | src/beholder.coffee | cmoncrief/beholder | 0 | # beholder
# Copyright (c) 2013 Charles Moncrief <cmoncrief@gmail.com>
# MIT Licensed
{EventEmitter} = require 'events'
fs = require 'fs'
path = require 'path'
async = require 'async'
glob = require 'glob'
minimatch = require 'minimatch'
class Beholder extends EventEmitter
# Main entry point. Set up the options and initiate watching the
# supplied pattern
constructor: (@pattern, @options = {}, cb) ->
@files = []
@dirs = []
@patterns = []
@init = true
@options.interval ?= 5007
@options.persistent ?= true
@options.includeHidden ?= false
@options.exclude ?= []
@options.debug ?= false
@pollOpts = {interval: @options.interval, persistent: @options.persistent}
@startWatch @pattern, cb
#
# Private API Functions
#
# Start watching a given pattern. Invokes callback if supplied, and pauses
# briefly to give things time to settle down if needed.
startWatch: (pattern, cb) ->
if process.platform is 'win32'
`pattern = pattern.replace(/\\/g, "/")`
@patterns.push pattern
glob pattern, (err, matches) =>
if pattern.indexOf '*' isnt -1
@addDir pattern.replace /\/\*.*/, ''
async.each matches, @processPath, (err) =>
return cb(err) if err and cb
return handleError(err) if err
@init = false
finish = =>
cb(null, this) if cb
@emit 'ready'
setTimeout(finish, matches.length)
# Traverse a directory path looking for items to watch. Called recursively via
# a sub function. This is only run when a directory receives a change event.
walkPath: (base) =>
fs.stat base, (err, stats) =>
return @handleError(err) if err
if stats.isDirectory()
@addDir base
fs.readdir base, (err, files) =>
return @handleError(err) if err
for file in files
filePath = path.join base, file
if process.platform is 'win32'
`filePath = filePath.replace(/\\/g, "/")`
@processPath filePath, null, true
return
else
@addFile base, stats
# Evaluates a given path and adds it to the appropriate watcher list.
processPath: (filePath, cb, descend) =>
fs.stat filePath, (err, stats) =>
return @handleError(err, true) if err
if stats.isDirectory()
@addDir filePath
@walkPath filePath if descend
else
@addFile filePath, stats, @init
cb() if cb
# Adds a directory to the watch list
addDir: (dirPath) ->
return if @hiddenPath(dirPath)
return if dirPath in (i.name for i in @dirs)
@dirs.push {name: dirPath, watch: @initWatch(dirPath, @processDir)}
# Adds a file to the watch list.
addFile: (filePath, stats, silent) =>
return if @invalidFile filePath
@files.push
name: filePath
mtime: stats.mtime
watcher: @initWatch(filePath, @processFile)
@processFile(filePath, 'new') unless silent
if path.dirname(filePath) not in (i.name for i in @dirs)
@addDir path.dirname(filePath)
# Start watching a given path. Handles switching between watching and
# polling depending on the current number of watchers.
initWatch: (watchPath, watchFn) =>
if @maxFiles? and @files.length >= @maxFiles
return @initPoll(watchPath, watchFn)
try
fs.watch path.normalize(watchPath), (event, filename) =>
watchFn watchPath, event
catch err
if err.code is 'EMFILE'
@maxFiles = @files.length
@swapWatchers()
@initPoll watchPath, watchFn
else @handleError(err)
# Start polling a given path.
initPoll: (watchPath, watchFn) ->
fs.watchFile path.normalize(watchPath), @pollOpts, (curr, prev) =>
return if curr.mtime.getTime() and curr.mtime.getTime() < prev.mtime.getTime()
watchFn watchPath, 'change'
# Handle a raised event on a watched directory by traversing its path
# and looking for changes.
processDir: (dir, event) =>
@walkPath dir
# Handle a raised event on a watched file. After handling the event, removes
# the watcher and restarts it of rmemory handling purposes.
processFile: (filePath, event) =>
file = i for i in @files when i.name is filePath
fs.stat filePath, (err, stats) =>
if err?.code is 'ENOENT' then @removeWatch(file)
return @handleError(err) if err
return if event isnt 'new' and stats.mtime.getTime() is file.mtime.getTime()
file.mtime = stats.mtime
@emit 'any', filePath, event
@emit event, filePath
@removeWatch file, true
@addFile filePath, stats, true
file = null
filePath = null
event = null
return
# Stop watching a file.
removeWatch: (file, silent) =>
if file.watcher?.close?
file.watcher.close()
else
fs.unwatchFile file.name
@files = (i for i in @files when i.name isnt file.name)
@dirs = (i for i in @dirs when i.name isnt file.name)
@emit('remove', file.name) unless silent
@emit('any', file.name, 'remove') unless silent
file.watcher = null
file = null
# When the maximum number of files has been hit, this function
# will swap out several watchers for pollers in order to create
# available file handler headroom.
swapWatchers: =>
for file, index in @files when index > @maxFiles - 25
file.watcher.close() if file.watcher.close
file.watcher = null
file.watcher = @initPoll file.name, @processFile
return
# Returns true if this file should not be added to the watch list
invalidFile: (filePath) =>
return true if @hiddenPath(filePath)
return true if filePath in (i.name for i in @files)
return true unless @patternMatch(filePath)
(return true if minimatch filePath, i) for i in @options.exclude
return false
# Returns true if the file matches at least one of the stored patterns.
patternMatch: (filePath) ->
for pattern in @patterns
return true if minimatch(filePath, pattern)
return false
# Returns true if this is a hidden dotfile.
hiddenPath: (filePath) =>
path.basename(filePath)[0] is '.' and !@options.includeHidden
# Emits the error event and returns the error. Suppress ENOENT
handleError: (error) =>
console.log(error) if @options.debug
return error if error.code is 'ENOENT'
@emit 'error', error
error
#
# Public API Functions
#
# Remove a specified file path from the watch list.
remove: (filePath, silent) =>
file = i for i in @files when i.name is filePath
unless file then file = i for i in @dirs when i.name is filePath
return console.log(new Error("File not found")) unless file
@removeWatch file, silent
# Remove all files from the watch list.
removeAll: (silent) =>
@removeWatch file, silent for file in @files
return
# Add new paths to the watch list that match pattern
add: (pattern, cb) =>
@startWatch pattern, cb
# Returns an array of all file names on the watch list
list: =>
(i.name for i in @files)
# Returns an array of all directory names on the watch list
listDir: =>
(i.name for i in @dirs)
# Main entry point. Returns a new instance of Beholder.
module.exports = (pattern, options, cb) ->
if !cb and typeof options is 'function'
cb = options
options = {}
new Beholder(pattern, options, cb)
| 150079 | # beholder
# Copyright (c) 2013 <NAME> <<EMAIL>>
# MIT Licensed
{EventEmitter} = require 'events'
fs = require 'fs'
path = require 'path'
async = require 'async'
glob = require 'glob'
minimatch = require 'minimatch'
class Beholder extends EventEmitter
# Main entry point. Set up the options and initiate watching the
# supplied pattern
constructor: (@pattern, @options = {}, cb) ->
@files = []
@dirs = []
@patterns = []
@init = true
@options.interval ?= 5007
@options.persistent ?= true
@options.includeHidden ?= false
@options.exclude ?= []
@options.debug ?= false
@pollOpts = {interval: @options.interval, persistent: @options.persistent}
@startWatch @pattern, cb
#
# Private API Functions
#
# Start watching a given pattern. Invokes callback if supplied, and pauses
# briefly to give things time to settle down if needed.
startWatch: (pattern, cb) ->
if process.platform is 'win32'
`pattern = pattern.replace(/\\/g, "/")`
@patterns.push pattern
glob pattern, (err, matches) =>
if pattern.indexOf '*' isnt -1
@addDir pattern.replace /\/\*.*/, ''
async.each matches, @processPath, (err) =>
return cb(err) if err and cb
return handleError(err) if err
@init = false
finish = =>
cb(null, this) if cb
@emit 'ready'
setTimeout(finish, matches.length)
# Traverse a directory path looking for items to watch. Called recursively via
# a sub function. This is only run when a directory receives a change event.
walkPath: (base) =>
fs.stat base, (err, stats) =>
return @handleError(err) if err
if stats.isDirectory()
@addDir base
fs.readdir base, (err, files) =>
return @handleError(err) if err
for file in files
filePath = path.join base, file
if process.platform is 'win32'
`filePath = filePath.replace(/\\/g, "/")`
@processPath filePath, null, true
return
else
@addFile base, stats
# Evaluates a given path and adds it to the appropriate watcher list.
processPath: (filePath, cb, descend) =>
fs.stat filePath, (err, stats) =>
return @handleError(err, true) if err
if stats.isDirectory()
@addDir filePath
@walkPath filePath if descend
else
@addFile filePath, stats, @init
cb() if cb
# Adds a directory to the watch list
addDir: (dirPath) ->
return if @hiddenPath(dirPath)
return if dirPath in (i.name for i in @dirs)
@dirs.push {name: dirPath, watch: @initWatch(dirPath, @processDir)}
# Adds a file to the watch list.
addFile: (filePath, stats, silent) =>
return if @invalidFile filePath
@files.push
name: filePath
mtime: stats.mtime
watcher: @initWatch(filePath, @processFile)
@processFile(filePath, 'new') unless silent
if path.dirname(filePath) not in (i.name for i in @dirs)
@addDir path.dirname(filePath)
# Start watching a given path. Handles switching between watching and
# polling depending on the current number of watchers.
initWatch: (watchPath, watchFn) =>
if @maxFiles? and @files.length >= @maxFiles
return @initPoll(watchPath, watchFn)
try
fs.watch path.normalize(watchPath), (event, filename) =>
watchFn watchPath, event
catch err
if err.code is 'EMFILE'
@maxFiles = @files.length
@swapWatchers()
@initPoll watchPath, watchFn
else @handleError(err)
# Start polling a given path.
initPoll: (watchPath, watchFn) ->
fs.watchFile path.normalize(watchPath), @pollOpts, (curr, prev) =>
return if curr.mtime.getTime() and curr.mtime.getTime() < prev.mtime.getTime()
watchFn watchPath, 'change'
# Handle a raised event on a watched directory by traversing its path
# and looking for changes.
processDir: (dir, event) =>
@walkPath dir
# Handle a raised event on a watched file. After handling the event, removes
# the watcher and restarts it of rmemory handling purposes.
processFile: (filePath, event) =>
file = i for i in @files when i.name is filePath
fs.stat filePath, (err, stats) =>
if err?.code is 'ENOENT' then @removeWatch(file)
return @handleError(err) if err
return if event isnt 'new' and stats.mtime.getTime() is file.mtime.getTime()
file.mtime = stats.mtime
@emit 'any', filePath, event
@emit event, filePath
@removeWatch file, true
@addFile filePath, stats, true
file = null
filePath = null
event = null
return
# Stop watching a file.
removeWatch: (file, silent) =>
if file.watcher?.close?
file.watcher.close()
else
fs.unwatchFile file.name
@files = (i for i in @files when i.name isnt file.name)
@dirs = (i for i in @dirs when i.name isnt file.name)
@emit('remove', file.name) unless silent
@emit('any', file.name, 'remove') unless silent
file.watcher = null
file = null
# When the maximum number of files has been hit, this function
# will swap out several watchers for pollers in order to create
# available file handler headroom.
swapWatchers: =>
for file, index in @files when index > @maxFiles - 25
file.watcher.close() if file.watcher.close
file.watcher = null
file.watcher = @initPoll file.name, @processFile
return
# Returns true if this file should not be added to the watch list
invalidFile: (filePath) =>
return true if @hiddenPath(filePath)
return true if filePath in (i.name for i in @files)
return true unless @patternMatch(filePath)
(return true if minimatch filePath, i) for i in @options.exclude
return false
# Returns true if the file matches at least one of the stored patterns.
patternMatch: (filePath) ->
for pattern in @patterns
return true if minimatch(filePath, pattern)
return false
# Returns true if this is a hidden dotfile.
hiddenPath: (filePath) =>
path.basename(filePath)[0] is '.' and !@options.includeHidden
# Emits the error event and returns the error. Suppress ENOENT
handleError: (error) =>
console.log(error) if @options.debug
return error if error.code is 'ENOENT'
@emit 'error', error
error
#
# Public API Functions
#
# Remove a specified file path from the watch list.
remove: (filePath, silent) =>
file = i for i in @files when i.name is filePath
unless file then file = i for i in @dirs when i.name is filePath
return console.log(new Error("File not found")) unless file
@removeWatch file, silent
# Remove all files from the watch list.
removeAll: (silent) =>
@removeWatch file, silent for file in @files
return
# Add new paths to the watch list that match pattern
add: (pattern, cb) =>
@startWatch pattern, cb
# Returns an array of all file names on the watch list
list: =>
(i.name for i in @files)
# Returns an array of all directory names on the watch list
listDir: =>
(i.name for i in @dirs)
# Main entry point. Returns a new instance of Beholder.
module.exports = (pattern, options, cb) ->
if !cb and typeof options is 'function'
cb = options
options = {}
new Beholder(pattern, options, cb)
| true | # beholder
# Copyright (c) 2013 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# MIT Licensed
{EventEmitter} = require 'events'
fs = require 'fs'
path = require 'path'
async = require 'async'
glob = require 'glob'
minimatch = require 'minimatch'
class Beholder extends EventEmitter
# Main entry point. Set up the options and initiate watching the
# supplied pattern
constructor: (@pattern, @options = {}, cb) ->
@files = []
@dirs = []
@patterns = []
@init = true
@options.interval ?= 5007
@options.persistent ?= true
@options.includeHidden ?= false
@options.exclude ?= []
@options.debug ?= false
@pollOpts = {interval: @options.interval, persistent: @options.persistent}
@startWatch @pattern, cb
#
# Private API Functions
#
# Start watching a given pattern. Invokes callback if supplied, and pauses
# briefly to give things time to settle down if needed.
startWatch: (pattern, cb) ->
if process.platform is 'win32'
`pattern = pattern.replace(/\\/g, "/")`
@patterns.push pattern
glob pattern, (err, matches) =>
if pattern.indexOf '*' isnt -1
@addDir pattern.replace /\/\*.*/, ''
async.each matches, @processPath, (err) =>
return cb(err) if err and cb
return handleError(err) if err
@init = false
finish = =>
cb(null, this) if cb
@emit 'ready'
setTimeout(finish, matches.length)
# Traverse a directory path looking for items to watch. Called recursively via
# a sub function. This is only run when a directory receives a change event.
walkPath: (base) =>
fs.stat base, (err, stats) =>
return @handleError(err) if err
if stats.isDirectory()
@addDir base
fs.readdir base, (err, files) =>
return @handleError(err) if err
for file in files
filePath = path.join base, file
if process.platform is 'win32'
`filePath = filePath.replace(/\\/g, "/")`
@processPath filePath, null, true
return
else
@addFile base, stats
# Evaluates a given path and adds it to the appropriate watcher list.
processPath: (filePath, cb, descend) =>
fs.stat filePath, (err, stats) =>
return @handleError(err, true) if err
if stats.isDirectory()
@addDir filePath
@walkPath filePath if descend
else
@addFile filePath, stats, @init
cb() if cb
# Adds a directory to the watch list
addDir: (dirPath) ->
return if @hiddenPath(dirPath)
return if dirPath in (i.name for i in @dirs)
@dirs.push {name: dirPath, watch: @initWatch(dirPath, @processDir)}
# Adds a file to the watch list.
addFile: (filePath, stats, silent) =>
return if @invalidFile filePath
@files.push
name: filePath
mtime: stats.mtime
watcher: @initWatch(filePath, @processFile)
@processFile(filePath, 'new') unless silent
if path.dirname(filePath) not in (i.name for i in @dirs)
@addDir path.dirname(filePath)
# Start watching a given path. Handles switching between watching and
# polling depending on the current number of watchers.
initWatch: (watchPath, watchFn) =>
if @maxFiles? and @files.length >= @maxFiles
return @initPoll(watchPath, watchFn)
try
fs.watch path.normalize(watchPath), (event, filename) =>
watchFn watchPath, event
catch err
if err.code is 'EMFILE'
@maxFiles = @files.length
@swapWatchers()
@initPoll watchPath, watchFn
else @handleError(err)
# Start polling a given path.
initPoll: (watchPath, watchFn) ->
fs.watchFile path.normalize(watchPath), @pollOpts, (curr, prev) =>
return if curr.mtime.getTime() and curr.mtime.getTime() < prev.mtime.getTime()
watchFn watchPath, 'change'
# Handle a raised event on a watched directory by traversing its path
# and looking for changes.
processDir: (dir, event) =>
@walkPath dir
# Handle a raised event on a watched file. After handling the event, removes
# the watcher and restarts it of rmemory handling purposes.
processFile: (filePath, event) =>
file = i for i in @files when i.name is filePath
fs.stat filePath, (err, stats) =>
if err?.code is 'ENOENT' then @removeWatch(file)
return @handleError(err) if err
return if event isnt 'new' and stats.mtime.getTime() is file.mtime.getTime()
file.mtime = stats.mtime
@emit 'any', filePath, event
@emit event, filePath
@removeWatch file, true
@addFile filePath, stats, true
file = null
filePath = null
event = null
return
# Stop watching a file.
removeWatch: (file, silent) =>
if file.watcher?.close?
file.watcher.close()
else
fs.unwatchFile file.name
@files = (i for i in @files when i.name isnt file.name)
@dirs = (i for i in @dirs when i.name isnt file.name)
@emit('remove', file.name) unless silent
@emit('any', file.name, 'remove') unless silent
file.watcher = null
file = null
# When the maximum number of files has been hit, this function
# will swap out several watchers for pollers in order to create
# available file handler headroom.
swapWatchers: =>
for file, index in @files when index > @maxFiles - 25
file.watcher.close() if file.watcher.close
file.watcher = null
file.watcher = @initPoll file.name, @processFile
return
# Returns true if this file should not be added to the watch list
invalidFile: (filePath) =>
return true if @hiddenPath(filePath)
return true if filePath in (i.name for i in @files)
return true unless @patternMatch(filePath)
(return true if minimatch filePath, i) for i in @options.exclude
return false
# Returns true if the file matches at least one of the stored patterns.
patternMatch: (filePath) ->
for pattern in @patterns
return true if minimatch(filePath, pattern)
return false
# Returns true if this is a hidden dotfile.
hiddenPath: (filePath) =>
path.basename(filePath)[0] is '.' and !@options.includeHidden
# Emits the error event and returns the error. Suppress ENOENT
handleError: (error) =>
console.log(error) if @options.debug
return error if error.code is 'ENOENT'
@emit 'error', error
error
#
# Public API Functions
#
# Remove a specified file path from the watch list.
remove: (filePath, silent) =>
file = i for i in @files when i.name is filePath
unless file then file = i for i in @dirs when i.name is filePath
return console.log(new Error("File not found")) unless file
@removeWatch file, silent
# Remove all files from the watch list.
removeAll: (silent) =>
@removeWatch file, silent for file in @files
return
# Add new paths to the watch list that match pattern
add: (pattern, cb) =>
@startWatch pattern, cb
# Returns an array of all file names on the watch list
list: =>
(i.name for i in @files)
# Returns an array of all directory names on the watch list
listDir: =>
(i.name for i in @dirs)
# Main entry point. Returns a new instance of Beholder.
module.exports = (pattern, options, cb) ->
if !cb and typeof options is 'function'
cb = options
options = {}
new Beholder(pattern, options, cb)
|
[
{
"context": "ted property\", ->\n person1 = Serenade(name: \"Jonas\")\n person2 = Serenade(name: \"Kim\")\n boo",
"end": 1139,
"score": 0.9997895359992981,
"start": 1134,
"tag": "NAME",
"value": "Jonas"
},
{
"context": "de(name: \"Jonas\")\n person2 = Serenade(na... | test/channel.spec.coffee | jnicklas/serenade.js | 1 | require "./spec_helper"
Serenade = require("../lib/serenade")
{Channel} = Serenade
{defineProperty, defineAttribute} = Serenade
describe "Serenade.Channel", ->
describe ".all", ->
it "combines multiple channels", ->
channel1 = Channel.of(1)
channel2 = Channel.of(2)
channel3 = Channel.of(3)
combined = Channel.all([channel1, channel2, channel3]).map((args) => args.join(","))
expect(combined.value).to.equal("1,2,3")
expect(-> channel1.emit(4)).to.emit(combined, with: "4,2,3")
expect(-> channel2.emit(5)).to.emit(combined, with: "4,5,3")
expect(-> channel3.emit(6)).to.emit(combined, with: "4,5,6")
it "can combine channels and non channels", ->
channel1 = Channel.of(1)
channel2 = undefined
channel3 = 123
combined = Channel.all([channel1, channel2, channel3]).map((args) => args.join(","))
expect(combined.value).to.equal("1,,123")
expect(-> channel1.emit(2)).to.emit(combined, with: "2,,123")
describe ".pluck", ->
it "creates a channel which listens to changes in a nested property", ->
person1 = Serenade(name: "Jonas")
person2 = Serenade(name: "Kim")
book = Serenade(author: person1)
channel = Channel.pluck(book, "author.name")
expect(channel.value).to.equal("Jonas")
expect(-> book.author = person2).to.emit(channel, with: "Kim")
expect(-> person2.name = "Eli").to.emit(channel, with: "Eli")
expect(channel.value).to.equal("Eli")
it "creates a channel which listens to changes in all nested properties", ->
person1 = Serenade(name: "Jonas")
person2 = Serenade(name: "Kim")
person3 = Serenade(name: "Eli")
book = Serenade(authors: [person1, person2])
channel = Channel.pluck(book, "authors:name")
expect(channel.value).to.eql(["Jonas", "Kim"])
expect(-> book.authors = [person2, person3]).to.emit(channel, with: ["Kim", "Eli"])
expect(-> person2.name = "Anna").to.emit(channel, with: ["Anna", "Eli"])
expect(channel.value).to.eql(["Anna", "Eli"])
it "creates a channel which listens to a given property", ->
person = Serenade(name: "Jonas")
channel = Channel.pluck(person, "name")
expect(channel.value).to.eql("Jonas")
expect(-> person.name = "Eli").to.emit(channel, with: "Eli")
expect(channel.value).to.eql("Eli")
describe "#emit", ->
it "broadcasts a new value to all subscribers in order", ->
sum = 0
channel = Channel.of(2)
channel.bind((x) => sum = sum + x)
channel.bind((x) => sum = sum * x)
channel.emit(3)
expect(sum).to.equal(21)
describe "#static", ->
it "creates a new channel which never emits", ->
channel = Channel.of(2)
staticChannel = channel.static()
expect(staticChannel.value).to.equal(2)
expect(-> channel.emit(3)).not.to.emit(staticChannel)
expect(staticChannel.value).to.equal(3)
describe "#map", ->
it "creates a new channel which maps over the existing channel", ->
channel = Channel.of(2)
double = channel.map((x) => x * 2)
expect(channel.value).to.equal(2)
expect(double.value).to.equal(4)
expect(-> channel.emit(3)).to.emit(double, with: 6)
expect(channel.value).to.equal(3)
expect(double.value).to.equal(6)
describe "#filter", ->
it "creates a new channel which filters the channel", ->
channel = Channel.of(2)
even = channel.filter((x) => x % 2 is 0)
expect(channel.value).to.equal(2)
expect(even.value).to.equal(2)
expect(-> channel.emit(6)).to.emit(even, with: 6)
expect(-> channel.emit(3)).not.to.emit(even)
expect(channel.value).to.equal(3)
expect(even.value).to.equal(3)
it "can take boolean option", ->
channel = Channel.of(2)
always = channel.filter(true)
never = channel.filter(false)
expect(-> channel.emit(3)).to.emit(always, with: 3)
expect(-> channel.emit(3)).not.to.emit(never)
expect(always.value).to.equal(3)
expect(never.value).to.equal(3)
describe "#pluck", ->
it "creates a new channel which returns the given property", ->
channel = Channel.of(name: "Jonas")
names = channel.pluck("name")
expect(names.value).to.equal("Jonas")
expect(-> channel.emit(name: "Hanna")).to.emit(names, with: "Hanna")
expect(names.value).to.equal("Hanna")
it "gets value from attached channel and emits event when it changes", ->
object = Serenade(name: "Jonas")
channel = Channel.of(object)
names = channel.pluck("name")
expect(names.value).to.equal("Jonas")
expect(-> object.name = "Eli").to.emit(names, with: "Eli")
expect(names.value).to.equal("Eli")
it "doesn't emit values from old objects", ->
person1 = Serenade(name: "Jonas")
person2 = Serenade(name: "Eli")
channel = Channel.of(person1)
names = channel.pluck("name")
expect(-> channel.emit(person2)).to.emit(names, with: "Eli")
expect(-> person1.name = "Harry").not.to.emit(names)
expect(-> person2.name = "Fia").to.emit(names, with: "Fia")
describe "#pluckAll", ->
it "creates a new channel which returns the the given property of each element", ->
channel = Channel.of([{ name: "Jonas" }, { name: "Eli" }])
names = channel.pluckAll("name")
expect(names.value).to.eql(["Jonas", "Eli"])
expect(-> channel.emit([{ name: "Kim" }])).to.emit(names, with: ["Kim"])
expect(names.value).to.eql(["Kim"])
it "gets value from attached channel and emits event when it changes", ->
person1 = Serenade(name: "Jonas")
person2 = Serenade(name: "Kim")
channel = Channel.of([person1, person2])
names = channel.pluckAll("name")
expect(names.value).to.eql(["Jonas", "Kim"])
expect(-> person1.name = "Eli").to.emit(names, with: ["Eli", "Kim"])
expect(names.value).to.eql(["Eli", "Kim"])
it "emits values when collection changed", ->
person1 = Serenade(name: "Jonas")
person2 = Serenade(name: "Kim")
authors = new Serenade.Collection([person1, person2])
channel = Channel.of(authors)
names = channel.pluckAll("name")
expect(names.value.toArray()).to.eql(["Jonas", "Kim"])
expect(-> authors.push(Serenade(name: "Eli"))).to.emit(names.map((x) -> x.toArray()), with: ["Jonas", "Kim", "Eli"])
expect(names.value.toArray()).to.eql(["Jonas", "Kim", "Eli"])
it "doesn't emit values from old objects", ->
person1 = Serenade(name: "Jonas")
person2 = Serenade(name: "Kim")
person3 = Serenade(name: "Fia")
channel = Channel.of([person1, person2])
names = channel.pluckAll("name")
channel.emit([person3, person2])
expect(-> person1.name = "Ville").not.to.emit(names)
expect(-> person2.name = "Tor").to.emit(names, with: ["Fia", "Tor"])
expect(-> person3.name = "Anna").to.emit(names, with: ["Anna", "Tor"])
expect(names.value).to.eql(["Anna", "Tor"])
describe "#collection", ->
it "emits values when collection changed", ->
person1 = "Jonas"
person2 = "Kim"
authors = new Serenade.Collection([person1, person2])
channel = Channel.of(authors)
names = channel.collection()
expect(names.value.toArray()).to.eql(["Jonas", "Kim"])
expect(-> authors.push("Eli")).to.emit(names)
expect(names.value.toArray()).to.eql(["Jonas", "Kim", "Eli"])
| 121210 | require "./spec_helper"
Serenade = require("../lib/serenade")
{Channel} = Serenade
{defineProperty, defineAttribute} = Serenade
describe "Serenade.Channel", ->
describe ".all", ->
it "combines multiple channels", ->
channel1 = Channel.of(1)
channel2 = Channel.of(2)
channel3 = Channel.of(3)
combined = Channel.all([channel1, channel2, channel3]).map((args) => args.join(","))
expect(combined.value).to.equal("1,2,3")
expect(-> channel1.emit(4)).to.emit(combined, with: "4,2,3")
expect(-> channel2.emit(5)).to.emit(combined, with: "4,5,3")
expect(-> channel3.emit(6)).to.emit(combined, with: "4,5,6")
it "can combine channels and non channels", ->
channel1 = Channel.of(1)
channel2 = undefined
channel3 = 123
combined = Channel.all([channel1, channel2, channel3]).map((args) => args.join(","))
expect(combined.value).to.equal("1,,123")
expect(-> channel1.emit(2)).to.emit(combined, with: "2,,123")
describe ".pluck", ->
it "creates a channel which listens to changes in a nested property", ->
person1 = Serenade(name: "<NAME>")
person2 = Serenade(name: "<NAME>")
book = Serenade(author: person1)
channel = Channel.pluck(book, "author.name")
expect(channel.value).to.equal("Jon<NAME>")
expect(-> book.author = person2).to.emit(channel, with: "Kim")
expect(-> person2.name = "<NAME>").to.emit(channel, with: "<NAME>")
expect(channel.value).to.equal("Eli")
it "creates a channel which listens to changes in all nested properties", ->
person1 = Serenade(name: "<NAME>")
person2 = Serenade(name: "<NAME>")
person3 = Serenade(name: "<NAME>")
book = Serenade(authors: [person1, person2])
channel = Channel.pluck(book, "authors:name")
expect(channel.value).to.eql(["<NAME>", "<NAME>"])
expect(-> book.authors = [person2, person3]).to.emit(channel, with: ["<NAME>", "<NAME>"])
expect(-> person2.name = "<NAME>").to.emit(channel, with: ["<NAME>", "<NAME>"])
expect(channel.value).to.eql(["<NAME>", "<NAME>"])
it "creates a channel which listens to a given property", ->
person = Serenade(name: "<NAME>")
channel = Channel.pluck(person, "name")
expect(channel.value).to.eql("<NAME>")
expect(-> person.name = "<NAME>").to.emit(channel, with: "<NAME>")
expect(channel.value).to.eql("<NAME>")
describe "#emit", ->
it "broadcasts a new value to all subscribers in order", ->
sum = 0
channel = Channel.of(2)
channel.bind((x) => sum = sum + x)
channel.bind((x) => sum = sum * x)
channel.emit(3)
expect(sum).to.equal(21)
describe "#static", ->
it "creates a new channel which never emits", ->
channel = Channel.of(2)
staticChannel = channel.static()
expect(staticChannel.value).to.equal(2)
expect(-> channel.emit(3)).not.to.emit(staticChannel)
expect(staticChannel.value).to.equal(3)
describe "#map", ->
it "creates a new channel which maps over the existing channel", ->
channel = Channel.of(2)
double = channel.map((x) => x * 2)
expect(channel.value).to.equal(2)
expect(double.value).to.equal(4)
expect(-> channel.emit(3)).to.emit(double, with: 6)
expect(channel.value).to.equal(3)
expect(double.value).to.equal(6)
describe "#filter", ->
it "creates a new channel which filters the channel", ->
channel = Channel.of(2)
even = channel.filter((x) => x % 2 is 0)
expect(channel.value).to.equal(2)
expect(even.value).to.equal(2)
expect(-> channel.emit(6)).to.emit(even, with: 6)
expect(-> channel.emit(3)).not.to.emit(even)
expect(channel.value).to.equal(3)
expect(even.value).to.equal(3)
it "can take boolean option", ->
channel = Channel.of(2)
always = channel.filter(true)
never = channel.filter(false)
expect(-> channel.emit(3)).to.emit(always, with: 3)
expect(-> channel.emit(3)).not.to.emit(never)
expect(always.value).to.equal(3)
expect(never.value).to.equal(3)
describe "#pluck", ->
it "creates a new channel which returns the given property", ->
channel = Channel.of(name: "<NAME>")
names = channel.pluck("name")
expect(names.value).to.equal("<NAME>")
expect(-> channel.emit(name: "<NAME>")).to.emit(names, with: "<NAME>")
expect(names.value).to.equal("<NAME>")
it "gets value from attached channel and emits event when it changes", ->
object = Serenade(name: "<NAME>")
channel = Channel.of(object)
names = channel.pluck("name")
expect(names.value).to.equal("<NAME>")
expect(-> object.name = "<NAME>").to.emit(names, with: "<NAME>")
expect(names.value).to.equal("<NAME>")
it "doesn't emit values from old objects", ->
person1 = Serenade(name: "<NAME>")
person2 = Serenade(name: "<NAME>")
channel = Channel.of(person1)
names = channel.pluck("name")
expect(-> channel.emit(person2)).to.emit(names, with: "<NAME>")
expect(-> person1.name = "<NAME>").not.to.emit(names)
expect(-> person2.name = "<NAME>").to.emit(names, with: "<NAME>")
describe "#pluckAll", ->
it "creates a new channel which returns the the given property of each element", ->
channel = Channel.of([{ name: "<NAME>" }, { name: "<NAME>" }])
names = channel.pluckAll("name")
expect(names.value).to.eql(["<NAME>", "<NAME>"])
expect(-> channel.emit([{ name: "<NAME>" }])).to.emit(names, with: ["<NAME>"])
expect(names.value).to.eql(["<NAME>"])
it "gets value from attached channel and emits event when it changes", ->
person1 = Serenade(name: "<NAME>")
person2 = Serenade(name: "<NAME>")
channel = Channel.of([person1, person2])
names = channel.pluckAll("name")
expect(names.value).to.eql(["<NAME>", "<NAME>"])
expect(-> person1.name = "<NAME>").to.emit(names, with: ["<NAME>", "<NAME>"])
expect(names.value).to.eql(["<NAME>", "<NAME>"])
it "emits values when collection changed", ->
person1 = Serenade(name: "<NAME>")
person2 = Serenade(name: "<NAME>")
authors = new Serenade.Collection([person1, person2])
channel = Channel.of(authors)
names = channel.pluckAll("name")
expect(names.value.toArray()).to.eql(["<NAME>", "<NAME>"])
expect(-> authors.push(Serenade(name: "<NAME>"))).to.emit(names.map((x) -> x.toArray()), with: ["<NAME>", "<NAME>", "<NAME>"])
expect(names.value.toArray()).to.eql(["<NAME>", "<NAME>", "<NAME>"])
it "doesn't emit values from old objects", ->
person1 = Serenade(name: "<NAME>")
person2 = Serenade(name: "<NAME>")
person3 = Serenade(name: "<NAME>")
channel = Channel.of([person1, person2])
names = channel.pluckAll("name")
channel.emit([person3, person2])
expect(-> person1.name = "<NAME>").not.to.emit(names)
expect(-> person2.name = "<NAME>").to.emit(names, with: ["<NAME>", "<NAME>"])
expect(-> person3.name = "<NAME>").to.emit(names, with: ["<NAME>", "<NAME>"])
expect(names.value).to.eql(["<NAME>", "<NAME>"])
describe "#collection", ->
it "emits values when collection changed", ->
person1 = "<NAME>"
person2 = "<NAME>"
authors = new Serenade.Collection([person1, person2])
channel = Channel.of(authors)
names = channel.collection()
expect(names.value.toArray()).to.eql(["<NAME>", "<NAME>"])
expect(-> authors.push("<NAME>")).to.emit(names)
expect(names.value.toArray()).to.eql(["<NAME>", "<NAME>", "<NAME>"])
| true | require "./spec_helper"
Serenade = require("../lib/serenade")
{Channel} = Serenade
{defineProperty, defineAttribute} = Serenade
describe "Serenade.Channel", ->
describe ".all", ->
it "combines multiple channels", ->
channel1 = Channel.of(1)
channel2 = Channel.of(2)
channel3 = Channel.of(3)
combined = Channel.all([channel1, channel2, channel3]).map((args) => args.join(","))
expect(combined.value).to.equal("1,2,3")
expect(-> channel1.emit(4)).to.emit(combined, with: "4,2,3")
expect(-> channel2.emit(5)).to.emit(combined, with: "4,5,3")
expect(-> channel3.emit(6)).to.emit(combined, with: "4,5,6")
it "can combine channels and non channels", ->
channel1 = Channel.of(1)
channel2 = undefined
channel3 = 123
combined = Channel.all([channel1, channel2, channel3]).map((args) => args.join(","))
expect(combined.value).to.equal("1,,123")
expect(-> channel1.emit(2)).to.emit(combined, with: "2,,123")
describe ".pluck", ->
it "creates a channel which listens to changes in a nested property", ->
person1 = Serenade(name: "PI:NAME:<NAME>END_PI")
person2 = Serenade(name: "PI:NAME:<NAME>END_PI")
book = Serenade(author: person1)
channel = Channel.pluck(book, "author.name")
expect(channel.value).to.equal("JonPI:NAME:<NAME>END_PI")
expect(-> book.author = person2).to.emit(channel, with: "Kim")
expect(-> person2.name = "PI:NAME:<NAME>END_PI").to.emit(channel, with: "PI:NAME:<NAME>END_PI")
expect(channel.value).to.equal("Eli")
it "creates a channel which listens to changes in all nested properties", ->
person1 = Serenade(name: "PI:NAME:<NAME>END_PI")
person2 = Serenade(name: "PI:NAME:<NAME>END_PI")
person3 = Serenade(name: "PI:NAME:<NAME>END_PI")
book = Serenade(authors: [person1, person2])
channel = Channel.pluck(book, "authors:name")
expect(channel.value).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(-> book.authors = [person2, person3]).to.emit(channel, with: ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(-> person2.name = "PI:NAME:<NAME>END_PI").to.emit(channel, with: ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(channel.value).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
it "creates a channel which listens to a given property", ->
person = Serenade(name: "PI:NAME:<NAME>END_PI")
channel = Channel.pluck(person, "name")
expect(channel.value).to.eql("PI:NAME:<NAME>END_PI")
expect(-> person.name = "PI:NAME:<NAME>END_PI").to.emit(channel, with: "PI:NAME:<NAME>END_PI")
expect(channel.value).to.eql("PI:NAME:<NAME>END_PI")
describe "#emit", ->
it "broadcasts a new value to all subscribers in order", ->
sum = 0
channel = Channel.of(2)
channel.bind((x) => sum = sum + x)
channel.bind((x) => sum = sum * x)
channel.emit(3)
expect(sum).to.equal(21)
describe "#static", ->
it "creates a new channel which never emits", ->
channel = Channel.of(2)
staticChannel = channel.static()
expect(staticChannel.value).to.equal(2)
expect(-> channel.emit(3)).not.to.emit(staticChannel)
expect(staticChannel.value).to.equal(3)
describe "#map", ->
it "creates a new channel which maps over the existing channel", ->
channel = Channel.of(2)
double = channel.map((x) => x * 2)
expect(channel.value).to.equal(2)
expect(double.value).to.equal(4)
expect(-> channel.emit(3)).to.emit(double, with: 6)
expect(channel.value).to.equal(3)
expect(double.value).to.equal(6)
describe "#filter", ->
it "creates a new channel which filters the channel", ->
channel = Channel.of(2)
even = channel.filter((x) => x % 2 is 0)
expect(channel.value).to.equal(2)
expect(even.value).to.equal(2)
expect(-> channel.emit(6)).to.emit(even, with: 6)
expect(-> channel.emit(3)).not.to.emit(even)
expect(channel.value).to.equal(3)
expect(even.value).to.equal(3)
it "can take boolean option", ->
channel = Channel.of(2)
always = channel.filter(true)
never = channel.filter(false)
expect(-> channel.emit(3)).to.emit(always, with: 3)
expect(-> channel.emit(3)).not.to.emit(never)
expect(always.value).to.equal(3)
expect(never.value).to.equal(3)
describe "#pluck", ->
it "creates a new channel which returns the given property", ->
channel = Channel.of(name: "PI:NAME:<NAME>END_PI")
names = channel.pluck("name")
expect(names.value).to.equal("PI:NAME:<NAME>END_PI")
expect(-> channel.emit(name: "PI:NAME:<NAME>END_PI")).to.emit(names, with: "PI:NAME:<NAME>END_PI")
expect(names.value).to.equal("PI:NAME:<NAME>END_PI")
it "gets value from attached channel and emits event when it changes", ->
object = Serenade(name: "PI:NAME:<NAME>END_PI")
channel = Channel.of(object)
names = channel.pluck("name")
expect(names.value).to.equal("PI:NAME:<NAME>END_PI")
expect(-> object.name = "PI:NAME:<NAME>END_PI").to.emit(names, with: "PI:NAME:<NAME>END_PI")
expect(names.value).to.equal("PI:NAME:<NAME>END_PI")
it "doesn't emit values from old objects", ->
person1 = Serenade(name: "PI:NAME:<NAME>END_PI")
person2 = Serenade(name: "PI:NAME:<NAME>END_PI")
channel = Channel.of(person1)
names = channel.pluck("name")
expect(-> channel.emit(person2)).to.emit(names, with: "PI:NAME:<NAME>END_PI")
expect(-> person1.name = "PI:NAME:<NAME>END_PI").not.to.emit(names)
expect(-> person2.name = "PI:NAME:<NAME>END_PI").to.emit(names, with: "PI:NAME:<NAME>END_PI")
describe "#pluckAll", ->
it "creates a new channel which returns the the given property of each element", ->
channel = Channel.of([{ name: "PI:NAME:<NAME>END_PI" }, { name: "PI:NAME:<NAME>END_PI" }])
names = channel.pluckAll("name")
expect(names.value).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(-> channel.emit([{ name: "PI:NAME:<NAME>END_PI" }])).to.emit(names, with: ["PI:NAME:<NAME>END_PI"])
expect(names.value).to.eql(["PI:NAME:<NAME>END_PI"])
it "gets value from attached channel and emits event when it changes", ->
person1 = Serenade(name: "PI:NAME:<NAME>END_PI")
person2 = Serenade(name: "PI:NAME:<NAME>END_PI")
channel = Channel.of([person1, person2])
names = channel.pluckAll("name")
expect(names.value).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(-> person1.name = "PI:NAME:<NAME>END_PI").to.emit(names, with: ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(names.value).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
it "emits values when collection changed", ->
person1 = Serenade(name: "PI:NAME:<NAME>END_PI")
person2 = Serenade(name: "PI:NAME:<NAME>END_PI")
authors = new Serenade.Collection([person1, person2])
channel = Channel.of(authors)
names = channel.pluckAll("name")
expect(names.value.toArray()).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(-> authors.push(Serenade(name: "PI:NAME:<NAME>END_PI"))).to.emit(names.map((x) -> x.toArray()), with: ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(names.value.toArray()).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
it "doesn't emit values from old objects", ->
person1 = Serenade(name: "PI:NAME:<NAME>END_PI")
person2 = Serenade(name: "PI:NAME:<NAME>END_PI")
person3 = Serenade(name: "PI:NAME:<NAME>END_PI")
channel = Channel.of([person1, person2])
names = channel.pluckAll("name")
channel.emit([person3, person2])
expect(-> person1.name = "PI:NAME:<NAME>END_PI").not.to.emit(names)
expect(-> person2.name = "PI:NAME:<NAME>END_PI").to.emit(names, with: ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(-> person3.name = "PI:NAME:<NAME>END_PI").to.emit(names, with: ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(names.value).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
describe "#collection", ->
it "emits values when collection changed", ->
person1 = "PI:NAME:<NAME>END_PI"
person2 = "PI:NAME:<NAME>END_PI"
authors = new Serenade.Collection([person1, person2])
channel = Channel.of(authors)
names = channel.collection()
expect(names.value.toArray()).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
expect(-> authors.push("PI:NAME:<NAME>END_PI")).to.emit(names)
expect(names.value.toArray()).to.eql(["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
|
[
{
"context": " edit_mode: true\n detail: \"\"\n key: TEMP_EVENT_PREFIX + @state.counter\n temp_event: true\n }\n ",
"end": 2222,
"score": 0.7602358460426331,
"start": 2210,
"tag": "KEY",
"value": "EVENT_PREFIX"
},
{
"context": " event.rendered_date = date\n ... | src/components/life_app.coffee | dwetterau/life | 2 | React = require 'react'
moment = require 'moment'
utils = require '../lib/utils'
{Icon, FlatButton, Paper, Styles} = require 'material-ui'
ThemeManager = Styles.ThemeManager()
{EditEvent} = require './edit_event'
{LifeAppNavigation} = require './life_app_navigation'
{EventTile} = require './tiles/event_tile'
{HeaderTile} = require './tiles/header_tile'
# Structure:
# LifeApp (which is the timeline)
# - Maintains a list of day headers
# - Maintains a list of events
RENDERED_DATE_FORMAT = "dddd, MMMM D, YYYY"
TEMP_EVENT_PREFIX = "TempEventKey"
LifeApp = React.createClass
displayName: 'LifeApp'
getInitialState: (props) ->
props = props || @props
@initializeEvents(props.events)
{events, headers, labels} = @processEvents(props.events)
view_type = "day"
base_moment = moment()
{objects, someFiltered} =
@getAllTimelineObjects(
events, headers, labels, @getViewTimeRange(view_type, base_moment), []
)
return {
appType: @props.appType
events
headers
objects
counter: 0
in_edit: false
view_type
base_moment
someFiltered
labels
labelFilter: []
}
childContextTypes:
muiTheme: React.PropTypes.object
getChildContext: ->
muiTheme: ThemeManager.getCurrentTheme()
componentWillReceiveProps: (new_props, old_props) ->
@setState @getInitialState(new_props)
componentDidUpdate: () ->
if @state.in_edit
@scrollToEdit()
scrollToEdit: () ->
# Scroll to the edit pane
$('html, body').animate({
scrollTop: Math.max(0, $("form#event_form").offset().top - 120)
}, 1000)
getNewObjects: (events) ->
{events, headers, labels} = @processEvents events
new_state = @getAllTimelineObjects events, headers, labels
new_state.events = events
new_state.headers = headers
return new_state
addEvent: () ->
# Don't add another if we are editing something
if @state.in_edit
@throwAlreadyEditingError()
return
new_date = moment()
# Make the new event
event = {
date: new_date
rendered_date: new_date.format(RENDERED_DATE_FORMAT)
edit_mode: true
detail: ""
key: TEMP_EVENT_PREFIX + @state.counter
temp_event: true
}
new_state = {
temp_event: @createEventTileObject(event, @state.labels)
counter: @state.counter + 1
in_edit: true
}
@setState new_state
submitHandler: (e, url) ->
key = e.key
event_id = e.id
# Note: this should be the only thing left that relies on there only being one event
# in edit mode at a time.
$.post url, {
id: event_id
date: $('#date').val()
detail: $('#detail').val()
labels: $('#labels').val()
},
(body) =>
if body.status == 'ok'
# Remove the event edit, add in the real event
new_event = body.new_event
@initializeEvents([new_event])
events = @state.events
if @inlineEditing(false)
# If we were editing inline, remove the old event
# Determine the index of the edit event
index = -1
for event, i in events
if event.key == key
index = i
break
if index == -1
throw Error("Didn't find edit event")
events.splice(index, 1)
events.push new_event
new_state = @getNewObjects events
new_state.in_edit = false
new_state.temp_event = null
@setState new_state
cancelHandler: (e) ->
# If the event was a temp event, just delete it
if not @inlineEditing()
new_state = {
temp_event: null
in_edit: false
}
@setState new_state
else
# Remove the event in edit mode
i = -1
events = (x for x in @state.events)
for event, index in events
if event.edit_mode
i = index
break
if i == -1 or not @state.in_edit
throw Error("Canceled when no event was being edited.")
event = events[i]
if not event.id?
# Remove the event, it doesn't have an id yet
events.splice(i, 1)
else
events[i].edit_mode = false
new_state = @getNewObjects events
new_state.in_edit = false
@setState new_state
# Don't let the form submit
e.preventDefault()
e.stopPropagation()
beginEdit: (e) ->
if @state.in_edit
@throwAlreadyEditingError()
return
id = $(e.target).data('event_id')
index = -1
events = (x for x in @state.events)
for event, i in events
if event.id == id
index = i
break
if index == -1
throw Error("Couldn't find event entering edit mode.")
# Save the event's current state in event.old
event.edit_mode = true
new_state = @getNewObjects events
new_state.in_edit = true
@setState new_state
grabEventIdAndRemove: (endpoint, e) ->
id = $(e.target).data('event_id')
$.post endpoint, {
id
}, (body) =>
if body.status != 'ok'
console.error("Bad call to endpoint")
index = -1
events = (x for x in @state.events)
for event, i in events
if event.id == id
index = i
break
if index == -1
throw Error("Couldn't find event that was modified")
# Remove the event
events.splice(index, 1)
new_state = @getNewObjects events
@setState new_state
archiveEvent: (e) ->
@grabEventIdAndRemove '/event/archive', e
restoreEvent: (e) ->
@grabEventIdAndRemove '/event/restore', e
deleteEvent: (e) ->
@grabEventIdAndRemove '/event/delete', e
sortEvents: (events) ->
# Sort all the events from oldest to newest
events.sort (a, b) ->
a.date.unix() - b.date.unix()
return events
initializeEvents: (events) ->
for event in events
event.date = moment.utc(event.date).local()
date = event.date.format(RENDERED_DATE_FORMAT)
event.rendered_date = date
event.key = "event_" + event.id + "_" +
utils.hash(event.detail + date + JSON.stringify(event.labels))
event.labels = (l.name for l in event.labels)
processEvents: (events) ->
# Takes in the events and returns a dict with events and headers, both in sorted order
if events.length == 0
return {events, headers: []}
events = @sortEvents events
labels = {}
headers = {}
header_list = []
for event in events
if event.rendered_date not of headers
header_list.push {
date: event.rendered_date,
moment: moment(event.rendered_date, RENDERED_DATE_FORMAT)
key: "header_" + event.rendered_date
}
headers[event.rendered_date] = true
event.labelLookupMap = {}
# Compute all the labels
for label in event.labels
event.labelLookupMap[label] = true
if label of labels
labels[label].push event.id
else
labels[label] = [event.id]
return {events, headers: header_list, labels}
createEventTileObject: (event, allLabels) ->
object = {
key: event.key
event
id: "event_" + event.id
type: event.state
labels: allLabels
}
if event.edit_mode
object.submit_handler = @submitHandler
object.cancel_handler = @cancelHandler
else
if event.state == 'active'
object.edit_handler = @beginEdit
object.archive_handler = @archiveEvent
else if event.state == 'archived'
object.restoreHandler = @restoreEvent
object.deleteHandler = @deleteEvent
return object
getAllTimelineObjects: (events, headers, labels, view_time_range, labelFilter) ->
if not view_time_range?
view_time_range = @getViewTimeRange @state.view_type
if not labelFilter?
labelFilter = @state.labelFilter
# Returns true if the event is filtered out because it doesn't have one of the
# labels in the labelFilter
filtered = (event) ->
for label in labelFilter
if label not of event.labelLookupMap
return true
return false
# Note that it's okay to change events here because we don't output it
events = (e for e in events when not filtered(e))
# Reads the events and headers off of state, orders them, and returns them
objects = []
i = 0
someFiltered = false
for header, j in headers
if header.moment.unix() < view_time_range.start
# Skip over all the events for this header that are out of the window
while i < events.length and events[i].rendered_date == header.date
i++
continue
if header.moment.unix() >= view_time_range.end
break
objects.push {key: header.key, header, id: "header_" + j}
while i < events.length and events[i].rendered_date == header.date
objects.push @createEventTileObject(events[i], labels)
i++
# If the previous thing in objects is a header, the events have been filtered out
if objects[objects.length - 1].header?
someFiltered = true
objects.pop()
return {objects, someFiltered}
throwAlreadyEditingError: () ->
$.snackbar
content: "Finish editing your event first!"
timeout: 3000
# Returns if we are editing an event inline or not. If so, we shouldn't allow view changes.
inlineEditing: (displayError) ->
inlineEditing = @state.in_edit and not @state.temp_event?
if inlineEditing and displayError
@throwAlreadyEditingError()
return inlineEditing
switchView: (view_type) ->
if @inlineEditing(true)
return
if view_type == @state.view_type
return
view_time_range = @getViewTimeRange(view_type)
new_state = @getAllTimelineObjects(
@state.events, @state.headers, @state.labels, view_time_range
)
new_state.view_type = view_type
@setState new_state
changeTimeRange: (to_past) ->
if @inlineEditing(true)
return
m = @state.base_moment
if to_past
m.subtract 1, @state.view_type
else
m.add 1, @state.view_type
# Update the objects to fit in this range
new_state = @getAllTimelineObjects(@state.events, @state.headers, @state.labels)
new_state.base_moment = m
@setState new_state
resetTimeRange: () ->
if @inlineEditing true
return
m = moment()
@state.base_moment = m
newState = @getAllTimelineObjects(@state.events, @state.headers, @state.labels)
newState.base_moment = m
@setState newState
filterTokens: (filterTokens) ->
if @inlineEditing(true)
return
if filterTokens is ''
filterTokens = []
else
filterTokens = filterTokens.split(' ')
new_state = @getAllTimelineObjects(
@state.events, @state.headers, @state.labels, null, filterTokens
)
new_state.labelFilter = filterTokens
@setState new_state
getViewTimeRange: (view_type, base_moment) ->
# Return the beginning and end time points as moments for the view type
# @return {start: unix_timestamp, end: unix_timestamp}
if not base_moment?
m = @state.base_moment
else
m = base_moment
format = "MM/DD/YYYY"
if view_type == 'day'
start = moment(m.format(format), format)
else if view_type == 'week'
start = moment(m.format(format), format).subtract(m.weekday(), 'day')
else if view_type == 'month'
start = moment(m.format("MM/1/YYYY"), format)
else if view_type == 'year'
start = moment(m.format("1/1/YYYY"), format)
end = moment(start).add(1, view_type)
return {start: start.unix(), end: end.unix()}
getNoObjectsHeader: (prefix) ->
time_range = @getViewTimeRange(@state.view_type)
start_moment = moment.unix(time_range.start)
if @state.view_type == 'day'
content = start_moment.format(RENDERED_DATE_FORMAT)
subtext_ending = "day."
else if @state.view_type == 'week'
content = 'Week of ' + start_moment.format(RENDERED_DATE_FORMAT)
subtext_ending = "week."
else if @state.view_type == 'month'
content = start_moment.format("MMMM, YYYY")
subtext_ending = "month."
else if @state.view_type == 'year'
content = start_moment.format("YYYY")
subtext_ending = "year."
return [
React.createElement("div", {className: "header-tile", key: 'temp-header'},
React.createElement("h4", {key: 'temp-header-content'}, content)
React.createElement("i", {className: "text-center", key: 'temp-header-subtext'},
prefix + subtext_ending
)
)
]
render: () ->
timeline_list = []
hasEvent = false
for object in @state.objects
if object.element?
timeline_list.push object.element
else if object.header?
timeline_list.push React.createElement(HeaderTile, object)
else if object.event?
hasEvent = true
timeline_list.push React.createElement(EventTile, object)
if timeline_list.length
timeline = [
React.createElement("div", {key: "timeline-content"}, timeline_list)
]
else
# No events in the timeline, there are 3 cases. In archive,
# or some are filtered, or none are filtered
if @state.appType == 'archive'
timeline = @getNoObjectsHeader "You have no archived thoughts for this "
else if @state.appType == 'active'
if @state.someFiltered
timeline = @getNoObjectsHeader "You have filtered out all your thoughts for this "
else
timeline = @getNoObjectsHeader "You have not recorded any thoughts for this "
app_nav_props = () =>
key: "top_app_nav"
top: true
switchView: @switchView
changeTimeRange: @changeTimeRange
resetTimeRange: @resetTimeRange
addEvent: @addEvent
labels: @state.labels
filterTokens: @filterTokens
viewType: @state.view_type
app_array = [React.createElement(LifeAppNavigation, app_nav_props())]
if @state.temp_event?
app_array.push React.createElement(
"div",
{key: "temp-event-container", className: "container col-sm-offset-2 col-sm-8"},
React.createElement(EventTile, @state.temp_event)
)
app_array.push React.createElement("div",
{key: "timeline", className: "col-sm-offset-2 col-sm-8"}, timeline)
return React.createElement("div", null, app_array)
module.exports = {LifeApp}
| 46920 | React = require 'react'
moment = require 'moment'
utils = require '../lib/utils'
{Icon, FlatButton, Paper, Styles} = require 'material-ui'
ThemeManager = Styles.ThemeManager()
{EditEvent} = require './edit_event'
{LifeAppNavigation} = require './life_app_navigation'
{EventTile} = require './tiles/event_tile'
{HeaderTile} = require './tiles/header_tile'
# Structure:
# LifeApp (which is the timeline)
# - Maintains a list of day headers
# - Maintains a list of events
RENDERED_DATE_FORMAT = "dddd, MMMM D, YYYY"
TEMP_EVENT_PREFIX = "TempEventKey"
LifeApp = React.createClass
displayName: 'LifeApp'
getInitialState: (props) ->
props = props || @props
@initializeEvents(props.events)
{events, headers, labels} = @processEvents(props.events)
view_type = "day"
base_moment = moment()
{objects, someFiltered} =
@getAllTimelineObjects(
events, headers, labels, @getViewTimeRange(view_type, base_moment), []
)
return {
appType: @props.appType
events
headers
objects
counter: 0
in_edit: false
view_type
base_moment
someFiltered
labels
labelFilter: []
}
childContextTypes:
muiTheme: React.PropTypes.object
getChildContext: ->
muiTheme: ThemeManager.getCurrentTheme()
componentWillReceiveProps: (new_props, old_props) ->
@setState @getInitialState(new_props)
componentDidUpdate: () ->
if @state.in_edit
@scrollToEdit()
scrollToEdit: () ->
# Scroll to the edit pane
$('html, body').animate({
scrollTop: Math.max(0, $("form#event_form").offset().top - 120)
}, 1000)
getNewObjects: (events) ->
{events, headers, labels} = @processEvents events
new_state = @getAllTimelineObjects events, headers, labels
new_state.events = events
new_state.headers = headers
return new_state
addEvent: () ->
# Don't add another if we are editing something
if @state.in_edit
@throwAlreadyEditingError()
return
new_date = moment()
# Make the new event
event = {
date: new_date
rendered_date: new_date.format(RENDERED_DATE_FORMAT)
edit_mode: true
detail: ""
key: TEMP_<KEY> + @state.counter
temp_event: true
}
new_state = {
temp_event: @createEventTileObject(event, @state.labels)
counter: @state.counter + 1
in_edit: true
}
@setState new_state
submitHandler: (e, url) ->
key = e.key
event_id = e.id
# Note: this should be the only thing left that relies on there only being one event
# in edit mode at a time.
$.post url, {
id: event_id
date: $('#date').val()
detail: $('#detail').val()
labels: $('#labels').val()
},
(body) =>
if body.status == 'ok'
# Remove the event edit, add in the real event
new_event = body.new_event
@initializeEvents([new_event])
events = @state.events
if @inlineEditing(false)
# If we were editing inline, remove the old event
# Determine the index of the edit event
index = -1
for event, i in events
if event.key == key
index = i
break
if index == -1
throw Error("Didn't find edit event")
events.splice(index, 1)
events.push new_event
new_state = @getNewObjects events
new_state.in_edit = false
new_state.temp_event = null
@setState new_state
cancelHandler: (e) ->
# If the event was a temp event, just delete it
if not @inlineEditing()
new_state = {
temp_event: null
in_edit: false
}
@setState new_state
else
# Remove the event in edit mode
i = -1
events = (x for x in @state.events)
for event, index in events
if event.edit_mode
i = index
break
if i == -1 or not @state.in_edit
throw Error("Canceled when no event was being edited.")
event = events[i]
if not event.id?
# Remove the event, it doesn't have an id yet
events.splice(i, 1)
else
events[i].edit_mode = false
new_state = @getNewObjects events
new_state.in_edit = false
@setState new_state
# Don't let the form submit
e.preventDefault()
e.stopPropagation()
beginEdit: (e) ->
if @state.in_edit
@throwAlreadyEditingError()
return
id = $(e.target).data('event_id')
index = -1
events = (x for x in @state.events)
for event, i in events
if event.id == id
index = i
break
if index == -1
throw Error("Couldn't find event entering edit mode.")
# Save the event's current state in event.old
event.edit_mode = true
new_state = @getNewObjects events
new_state.in_edit = true
@setState new_state
grabEventIdAndRemove: (endpoint, e) ->
id = $(e.target).data('event_id')
$.post endpoint, {
id
}, (body) =>
if body.status != 'ok'
console.error("Bad call to endpoint")
index = -1
events = (x for x in @state.events)
for event, i in events
if event.id == id
index = i
break
if index == -1
throw Error("Couldn't find event that was modified")
# Remove the event
events.splice(index, 1)
new_state = @getNewObjects events
@setState new_state
archiveEvent: (e) ->
@grabEventIdAndRemove '/event/archive', e
restoreEvent: (e) ->
@grabEventIdAndRemove '/event/restore', e
deleteEvent: (e) ->
@grabEventIdAndRemove '/event/delete', e
sortEvents: (events) ->
# Sort all the events from oldest to newest
events.sort (a, b) ->
a.date.unix() - b.date.unix()
return events
initializeEvents: (events) ->
for event in events
event.date = moment.utc(event.date).local()
date = event.date.format(RENDERED_DATE_FORMAT)
event.rendered_date = date
event.key = "<KEY> + event.id + <KEY> +
utils.hash(event.detail + date + JSON.stringify(event.labels))
event.labels = (l.name for l in event.labels)
processEvents: (events) ->
# Takes in the events and returns a dict with events and headers, both in sorted order
if events.length == 0
return {events, headers: []}
events = @sortEvents events
labels = {}
headers = {}
header_list = []
for event in events
if event.rendered_date not of headers
header_list.push {
date: event.rendered_date,
moment: moment(event.rendered_date, RENDERED_DATE_FORMAT)
key: "header_" + event.rendered_date
}
headers[event.rendered_date] = true
event.labelLookupMap = {}
# Compute all the labels
for label in event.labels
event.labelLookupMap[label] = true
if label of labels
labels[label].push event.id
else
labels[label] = [event.id]
return {events, headers: header_list, labels}
createEventTileObject: (event, allLabels) ->
object = {
key: event.key
event
id: "event_" + event.id
type: event.state
labels: allLabels
}
if event.edit_mode
object.submit_handler = @submitHandler
object.cancel_handler = @cancelHandler
else
if event.state == 'active'
object.edit_handler = @beginEdit
object.archive_handler = @archiveEvent
else if event.state == 'archived'
object.restoreHandler = @restoreEvent
object.deleteHandler = @deleteEvent
return object
getAllTimelineObjects: (events, headers, labels, view_time_range, labelFilter) ->
if not view_time_range?
view_time_range = @getViewTimeRange @state.view_type
if not labelFilter?
labelFilter = @state.labelFilter
# Returns true if the event is filtered out because it doesn't have one of the
# labels in the labelFilter
filtered = (event) ->
for label in labelFilter
if label not of event.labelLookupMap
return true
return false
# Note that it's okay to change events here because we don't output it
events = (e for e in events when not filtered(e))
# Reads the events and headers off of state, orders them, and returns them
objects = []
i = 0
someFiltered = false
for header, j in headers
if header.moment.unix() < view_time_range.start
# Skip over all the events for this header that are out of the window
while i < events.length and events[i].rendered_date == header.date
i++
continue
if header.moment.unix() >= view_time_range.end
break
objects.push {key: header.key, header, id: "header_" + j}
while i < events.length and events[i].rendered_date == header.date
objects.push @createEventTileObject(events[i], labels)
i++
# If the previous thing in objects is a header, the events have been filtered out
if objects[objects.length - 1].header?
someFiltered = true
objects.pop()
return {objects, someFiltered}
throwAlreadyEditingError: () ->
$.snackbar
content: "Finish editing your event first!"
timeout: 3000
# Returns if we are editing an event inline or not. If so, we shouldn't allow view changes.
inlineEditing: (displayError) ->
inlineEditing = @state.in_edit and not @state.temp_event?
if inlineEditing and displayError
@throwAlreadyEditingError()
return inlineEditing
switchView: (view_type) ->
if @inlineEditing(true)
return
if view_type == @state.view_type
return
view_time_range = @getViewTimeRange(view_type)
new_state = @getAllTimelineObjects(
@state.events, @state.headers, @state.labels, view_time_range
)
new_state.view_type = view_type
@setState new_state
changeTimeRange: (to_past) ->
if @inlineEditing(true)
return
m = @state.base_moment
if to_past
m.subtract 1, @state.view_type
else
m.add 1, @state.view_type
# Update the objects to fit in this range
new_state = @getAllTimelineObjects(@state.events, @state.headers, @state.labels)
new_state.base_moment = m
@setState new_state
resetTimeRange: () ->
if @inlineEditing true
return
m = moment()
@state.base_moment = m
newState = @getAllTimelineObjects(@state.events, @state.headers, @state.labels)
newState.base_moment = m
@setState newState
filterTokens: (filterTokens) ->
if @inlineEditing(true)
return
if filterTokens is ''
filterTokens = []
else
filterTokens = filterTokens.split(' ')
new_state = @getAllTimelineObjects(
@state.events, @state.headers, @state.labels, null, filterTokens
)
new_state.labelFilter = filterTokens
@setState new_state
getViewTimeRange: (view_type, base_moment) ->
# Return the beginning and end time points as moments for the view type
# @return {start: unix_timestamp, end: unix_timestamp}
if not base_moment?
m = @state.base_moment
else
m = base_moment
format = "MM/DD/YYYY"
if view_type == 'day'
start = moment(m.format(format), format)
else if view_type == 'week'
start = moment(m.format(format), format).subtract(m.weekday(), 'day')
else if view_type == 'month'
start = moment(m.format("MM/1/YYYY"), format)
else if view_type == 'year'
start = moment(m.format("1/1/YYYY"), format)
end = moment(start).add(1, view_type)
return {start: start.unix(), end: end.unix()}
getNoObjectsHeader: (prefix) ->
time_range = @getViewTimeRange(@state.view_type)
start_moment = moment.unix(time_range.start)
if @state.view_type == 'day'
content = start_moment.format(RENDERED_DATE_FORMAT)
subtext_ending = "day."
else if @state.view_type == 'week'
content = 'Week of ' + start_moment.format(RENDERED_DATE_FORMAT)
subtext_ending = "week."
else if @state.view_type == 'month'
content = start_moment.format("MMMM, YYYY")
subtext_ending = "month."
else if @state.view_type == 'year'
content = start_moment.format("YYYY")
subtext_ending = "year."
return [
React.createElement("div", {className: "header-tile", key: 'temp-header'},
React.createElement("h4", {key: 'temp-header-content'}, content)
React.createElement("i", {className: "text-center", key: 'temp-header-subtext'},
prefix + subtext_ending
)
)
]
render: () ->
timeline_list = []
hasEvent = false
for object in @state.objects
if object.element?
timeline_list.push object.element
else if object.header?
timeline_list.push React.createElement(HeaderTile, object)
else if object.event?
hasEvent = true
timeline_list.push React.createElement(EventTile, object)
if timeline_list.length
timeline = [
React.createElement("div", {key: "timeline-content"}, timeline_list)
]
else
# No events in the timeline, there are 3 cases. In archive,
# or some are filtered, or none are filtered
if @state.appType == 'archive'
timeline = @getNoObjectsHeader "You have no archived thoughts for this "
else if @state.appType == 'active'
if @state.someFiltered
timeline = @getNoObjectsHeader "You have filtered out all your thoughts for this "
else
timeline = @getNoObjectsHeader "You have not recorded any thoughts for this "
app_nav_props = () =>
key: "top_app_nav"
top: true
switchView: @switchView
changeTimeRange: @changeTimeRange
resetTimeRange: @resetTimeRange
addEvent: @addEvent
labels: @state.labels
filterTokens: @filterTokens
viewType: @state.view_type
app_array = [React.createElement(LifeAppNavigation, app_nav_props())]
if @state.temp_event?
app_array.push React.createElement(
"div",
{key: "temp-event-container", className: "container col-sm-offset-2 col-sm-8"},
React.createElement(EventTile, @state.temp_event)
)
app_array.push React.createElement("div",
{key: "timeline", className: "col-sm-offset-2 col-sm-8"}, timeline)
return React.createElement("div", null, app_array)
module.exports = {LifeApp}
| true | React = require 'react'
moment = require 'moment'
utils = require '../lib/utils'
{Icon, FlatButton, Paper, Styles} = require 'material-ui'
ThemeManager = Styles.ThemeManager()
{EditEvent} = require './edit_event'
{LifeAppNavigation} = require './life_app_navigation'
{EventTile} = require './tiles/event_tile'
{HeaderTile} = require './tiles/header_tile'
# Structure:
# LifeApp (which is the timeline)
# - Maintains a list of day headers
# - Maintains a list of events
RENDERED_DATE_FORMAT = "dddd, MMMM D, YYYY"
TEMP_EVENT_PREFIX = "TempEventKey"
LifeApp = React.createClass
displayName: 'LifeApp'
getInitialState: (props) ->
props = props || @props
@initializeEvents(props.events)
{events, headers, labels} = @processEvents(props.events)
view_type = "day"
base_moment = moment()
{objects, someFiltered} =
@getAllTimelineObjects(
events, headers, labels, @getViewTimeRange(view_type, base_moment), []
)
return {
appType: @props.appType
events
headers
objects
counter: 0
in_edit: false
view_type
base_moment
someFiltered
labels
labelFilter: []
}
childContextTypes:
muiTheme: React.PropTypes.object
getChildContext: ->
muiTheme: ThemeManager.getCurrentTheme()
componentWillReceiveProps: (new_props, old_props) ->
@setState @getInitialState(new_props)
componentDidUpdate: () ->
if @state.in_edit
@scrollToEdit()
scrollToEdit: () ->
# Scroll to the edit pane
$('html, body').animate({
scrollTop: Math.max(0, $("form#event_form").offset().top - 120)
}, 1000)
getNewObjects: (events) ->
{events, headers, labels} = @processEvents events
new_state = @getAllTimelineObjects events, headers, labels
new_state.events = events
new_state.headers = headers
return new_state
addEvent: () ->
# Don't add another if we are editing something
if @state.in_edit
@throwAlreadyEditingError()
return
new_date = moment()
# Make the new event
event = {
date: new_date
rendered_date: new_date.format(RENDERED_DATE_FORMAT)
edit_mode: true
detail: ""
key: TEMP_PI:KEY:<KEY>END_PI + @state.counter
temp_event: true
}
new_state = {
temp_event: @createEventTileObject(event, @state.labels)
counter: @state.counter + 1
in_edit: true
}
@setState new_state
submitHandler: (e, url) ->
key = e.key
event_id = e.id
# Note: this should be the only thing left that relies on there only being one event
# in edit mode at a time.
$.post url, {
id: event_id
date: $('#date').val()
detail: $('#detail').val()
labels: $('#labels').val()
},
(body) =>
if body.status == 'ok'
# Remove the event edit, add in the real event
new_event = body.new_event
@initializeEvents([new_event])
events = @state.events
if @inlineEditing(false)
# If we were editing inline, remove the old event
# Determine the index of the edit event
index = -1
for event, i in events
if event.key == key
index = i
break
if index == -1
throw Error("Didn't find edit event")
events.splice(index, 1)
events.push new_event
new_state = @getNewObjects events
new_state.in_edit = false
new_state.temp_event = null
@setState new_state
cancelHandler: (e) ->
# If the event was a temp event, just delete it
if not @inlineEditing()
new_state = {
temp_event: null
in_edit: false
}
@setState new_state
else
# Remove the event in edit mode
i = -1
events = (x for x in @state.events)
for event, index in events
if event.edit_mode
i = index
break
if i == -1 or not @state.in_edit
throw Error("Canceled when no event was being edited.")
event = events[i]
if not event.id?
# Remove the event, it doesn't have an id yet
events.splice(i, 1)
else
events[i].edit_mode = false
new_state = @getNewObjects events
new_state.in_edit = false
@setState new_state
# Don't let the form submit
e.preventDefault()
e.stopPropagation()
beginEdit: (e) ->
if @state.in_edit
@throwAlreadyEditingError()
return
id = $(e.target).data('event_id')
index = -1
events = (x for x in @state.events)
for event, i in events
if event.id == id
index = i
break
if index == -1
throw Error("Couldn't find event entering edit mode.")
# Save the event's current state in event.old
event.edit_mode = true
new_state = @getNewObjects events
new_state.in_edit = true
@setState new_state
grabEventIdAndRemove: (endpoint, e) ->
id = $(e.target).data('event_id')
$.post endpoint, {
id
}, (body) =>
if body.status != 'ok'
console.error("Bad call to endpoint")
index = -1
events = (x for x in @state.events)
for event, i in events
if event.id == id
index = i
break
if index == -1
throw Error("Couldn't find event that was modified")
# Remove the event
events.splice(index, 1)
new_state = @getNewObjects events
@setState new_state
archiveEvent: (e) ->
@grabEventIdAndRemove '/event/archive', e
restoreEvent: (e) ->
@grabEventIdAndRemove '/event/restore', e
deleteEvent: (e) ->
@grabEventIdAndRemove '/event/delete', e
sortEvents: (events) ->
# Sort all the events from oldest to newest
events.sort (a, b) ->
a.date.unix() - b.date.unix()
return events
initializeEvents: (events) ->
for event in events
event.date = moment.utc(event.date).local()
date = event.date.format(RENDERED_DATE_FORMAT)
event.rendered_date = date
event.key = "PI:KEY:<KEY>END_PI + event.id + PI:KEY:<KEY>END_PI +
utils.hash(event.detail + date + JSON.stringify(event.labels))
event.labels = (l.name for l in event.labels)
processEvents: (events) ->
# Takes in the events and returns a dict with events and headers, both in sorted order
if events.length == 0
return {events, headers: []}
events = @sortEvents events
labels = {}
headers = {}
header_list = []
for event in events
if event.rendered_date not of headers
header_list.push {
date: event.rendered_date,
moment: moment(event.rendered_date, RENDERED_DATE_FORMAT)
key: "header_" + event.rendered_date
}
headers[event.rendered_date] = true
event.labelLookupMap = {}
# Compute all the labels
for label in event.labels
event.labelLookupMap[label] = true
if label of labels
labels[label].push event.id
else
labels[label] = [event.id]
return {events, headers: header_list, labels}
createEventTileObject: (event, allLabels) ->
object = {
key: event.key
event
id: "event_" + event.id
type: event.state
labels: allLabels
}
if event.edit_mode
object.submit_handler = @submitHandler
object.cancel_handler = @cancelHandler
else
if event.state == 'active'
object.edit_handler = @beginEdit
object.archive_handler = @archiveEvent
else if event.state == 'archived'
object.restoreHandler = @restoreEvent
object.deleteHandler = @deleteEvent
return object
getAllTimelineObjects: (events, headers, labels, view_time_range, labelFilter) ->
if not view_time_range?
view_time_range = @getViewTimeRange @state.view_type
if not labelFilter?
labelFilter = @state.labelFilter
# Returns true if the event is filtered out because it doesn't have one of the
# labels in the labelFilter
filtered = (event) ->
for label in labelFilter
if label not of event.labelLookupMap
return true
return false
# Note that it's okay to change events here because we don't output it
events = (e for e in events when not filtered(e))
# Reads the events and headers off of state, orders them, and returns them
objects = []
i = 0
someFiltered = false
for header, j in headers
if header.moment.unix() < view_time_range.start
# Skip over all the events for this header that are out of the window
while i < events.length and events[i].rendered_date == header.date
i++
continue
if header.moment.unix() >= view_time_range.end
break
objects.push {key: header.key, header, id: "header_" + j}
while i < events.length and events[i].rendered_date == header.date
objects.push @createEventTileObject(events[i], labels)
i++
# If the previous thing in objects is a header, the events have been filtered out
if objects[objects.length - 1].header?
someFiltered = true
objects.pop()
return {objects, someFiltered}
throwAlreadyEditingError: () ->
$.snackbar
content: "Finish editing your event first!"
timeout: 3000
# Returns if we are editing an event inline or not. If so, we shouldn't allow view changes.
inlineEditing: (displayError) ->
inlineEditing = @state.in_edit and not @state.temp_event?
if inlineEditing and displayError
@throwAlreadyEditingError()
return inlineEditing
switchView: (view_type) ->
if @inlineEditing(true)
return
if view_type == @state.view_type
return
view_time_range = @getViewTimeRange(view_type)
new_state = @getAllTimelineObjects(
@state.events, @state.headers, @state.labels, view_time_range
)
new_state.view_type = view_type
@setState new_state
changeTimeRange: (to_past) ->
if @inlineEditing(true)
return
m = @state.base_moment
if to_past
m.subtract 1, @state.view_type
else
m.add 1, @state.view_type
# Update the objects to fit in this range
new_state = @getAllTimelineObjects(@state.events, @state.headers, @state.labels)
new_state.base_moment = m
@setState new_state
resetTimeRange: () ->
if @inlineEditing true
return
m = moment()
@state.base_moment = m
newState = @getAllTimelineObjects(@state.events, @state.headers, @state.labels)
newState.base_moment = m
@setState newState
filterTokens: (filterTokens) ->
if @inlineEditing(true)
return
if filterTokens is ''
filterTokens = []
else
filterTokens = filterTokens.split(' ')
new_state = @getAllTimelineObjects(
@state.events, @state.headers, @state.labels, null, filterTokens
)
new_state.labelFilter = filterTokens
@setState new_state
getViewTimeRange: (view_type, base_moment) ->
# Return the beginning and end time points as moments for the view type
# @return {start: unix_timestamp, end: unix_timestamp}
if not base_moment?
m = @state.base_moment
else
m = base_moment
format = "MM/DD/YYYY"
if view_type == 'day'
start = moment(m.format(format), format)
else if view_type == 'week'
start = moment(m.format(format), format).subtract(m.weekday(), 'day')
else if view_type == 'month'
start = moment(m.format("MM/1/YYYY"), format)
else if view_type == 'year'
start = moment(m.format("1/1/YYYY"), format)
end = moment(start).add(1, view_type)
return {start: start.unix(), end: end.unix()}
getNoObjectsHeader: (prefix) ->
time_range = @getViewTimeRange(@state.view_type)
start_moment = moment.unix(time_range.start)
if @state.view_type == 'day'
content = start_moment.format(RENDERED_DATE_FORMAT)
subtext_ending = "day."
else if @state.view_type == 'week'
content = 'Week of ' + start_moment.format(RENDERED_DATE_FORMAT)
subtext_ending = "week."
else if @state.view_type == 'month'
content = start_moment.format("MMMM, YYYY")
subtext_ending = "month."
else if @state.view_type == 'year'
content = start_moment.format("YYYY")
subtext_ending = "year."
return [
React.createElement("div", {className: "header-tile", key: 'temp-header'},
React.createElement("h4", {key: 'temp-header-content'}, content)
React.createElement("i", {className: "text-center", key: 'temp-header-subtext'},
prefix + subtext_ending
)
)
]
render: () ->
timeline_list = []
hasEvent = false
for object in @state.objects
if object.element?
timeline_list.push object.element
else if object.header?
timeline_list.push React.createElement(HeaderTile, object)
else if object.event?
hasEvent = true
timeline_list.push React.createElement(EventTile, object)
if timeline_list.length
timeline = [
React.createElement("div", {key: "timeline-content"}, timeline_list)
]
else
# No events in the timeline, there are 3 cases. In archive,
# or some are filtered, or none are filtered
if @state.appType == 'archive'
timeline = @getNoObjectsHeader "You have no archived thoughts for this "
else if @state.appType == 'active'
if @state.someFiltered
timeline = @getNoObjectsHeader "You have filtered out all your thoughts for this "
else
timeline = @getNoObjectsHeader "You have not recorded any thoughts for this "
app_nav_props = () =>
key: "top_app_nav"
top: true
switchView: @switchView
changeTimeRange: @changeTimeRange
resetTimeRange: @resetTimeRange
addEvent: @addEvent
labels: @state.labels
filterTokens: @filterTokens
viewType: @state.view_type
app_array = [React.createElement(LifeAppNavigation, app_nav_props())]
if @state.temp_event?
app_array.push React.createElement(
"div",
{key: "temp-event-container", className: "container col-sm-offset-2 col-sm-8"},
React.createElement(EventTile, @state.temp_event)
)
app_array.push React.createElement("div",
{key: "timeline", className: "col-sm-offset-2 col-sm-8"}, timeline)
return React.createElement("div", null, app_array)
module.exports = {LifeApp}
|
[
{
"context": "# nodejs-express-mongoose-demo\n# Copyright(c) 2013 Madhusudhan Srinivasa <madhums8@gmail.com>\n# MIT Licensed\n###\n\n## Modul",
"end": 76,
"score": 0.9998883605003357,
"start": 55,
"tag": "NAME",
"value": "Madhusudhan Srinivasa"
},
{
"context": "e-demo\n# Copyright(c)... | src/server.coffee | yi/coffee-nodejs-passport-boilerplate | 1 | ###
# nodejs-express-mongoose-demo
# Copyright(c) 2013 Madhusudhan Srinivasa <madhums8@gmail.com>
# MIT Licensed
###
## Module dependencies.
express = require('express')
fs = require('fs')
passport = require('passport')
# Main application entry file.
# Please note that the order of loading is important.
# Load configurations
# if test env, load example file
env = process.env.NODE_ENV || 'development'
config = require('./config/config')[env]
auth = require('./config/middlewares/authorization')
mongoose = require('mongoose')
# Bootstrap db connection
mongoose.connect(config.db)
# Bootstrap models
models_path = __dirname + '/models'
fs.readdirSync(models_path).forEach (file)-> require(models_path+'/'+file)
# bootstrap passport config
require('./config/passport')(passport, config)
app = express()
# express settings
require('./config/express')(app, config, passport)
# Bootstrap routes
require('./config/routes')(app, passport, auth)
# Start the app by listening on <port>
port = process.env.PORT || 3000
app.listen(port)
console.log "Express app started on port #{port}"
# expose app
exports = module.exports = app
| 74321 | ###
# nodejs-express-mongoose-demo
# Copyright(c) 2013 <NAME> <<EMAIL>>
# MIT Licensed
###
## Module dependencies.
express = require('express')
fs = require('fs')
passport = require('passport')
# Main application entry file.
# Please note that the order of loading is important.
# Load configurations
# if test env, load example file
env = process.env.NODE_ENV || 'development'
config = require('./config/config')[env]
auth = require('./config/middlewares/authorization')
mongoose = require('mongoose')
# Bootstrap db connection
mongoose.connect(config.db)
# Bootstrap models
models_path = __dirname + '/models'
fs.readdirSync(models_path).forEach (file)-> require(models_path+'/'+file)
# bootstrap passport config
require('./config/passport')(passport, config)
app = express()
# express settings
require('./config/express')(app, config, passport)
# Bootstrap routes
require('./config/routes')(app, passport, auth)
# Start the app by listening on <port>
port = process.env.PORT || 3000
app.listen(port)
console.log "Express app started on port #{port}"
# expose app
exports = module.exports = app
| true | ###
# nodejs-express-mongoose-demo
# Copyright(c) 2013 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
# MIT Licensed
###
## Module dependencies.
express = require('express')
fs = require('fs')
passport = require('passport')
# Main application entry file.
# Please note that the order of loading is important.
# Load configurations
# if test env, load example file
env = process.env.NODE_ENV || 'development'
config = require('./config/config')[env]
auth = require('./config/middlewares/authorization')
mongoose = require('mongoose')
# Bootstrap db connection
mongoose.connect(config.db)
# Bootstrap models
models_path = __dirname + '/models'
fs.readdirSync(models_path).forEach (file)-> require(models_path+'/'+file)
# bootstrap passport config
require('./config/passport')(passport, config)
app = express()
# express settings
require('./config/express')(app, config, passport)
# Bootstrap routes
require('./config/routes')(app, passport, auth)
# Start the app by listening on <port>
port = process.env.PORT || 3000
app.listen(port)
console.log "Express app started on port #{port}"
# expose app
exports = module.exports = app
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.999914824962616,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/landing-user-stats.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @LandingUserStats
constructor: ->
# Define constants
@margin =
top: 40
right: 0
bottom: 0
left: 0
# radius for peak circle
@peakR = 5
# Define basic elements
@svgContainerOuter = d3
.select '.js-landing-graph'
# Clear out previously set graphs
@svgContainerOuter.selectAll('svg').remove()
@svgContainerInner = @svgContainerOuter
.append 'svg'
.attr 'class', 'landing-graph'
@svg = @svgContainerInner
.append 'g'
# Ensure no blank space at the bottom at certain zoom level in Firefox.
.attr 'transform', "translate(#{@margin.left}, #{@margin.top + 1})"
@svgArea = @svg
.append 'path'
.attr 'class', 'landing-graph__area'
@svgPeakText = @svg
.append 'text'
.attr 'class', 'landing-graph__text'
.attr 'y', (-@peakR * 2)
@svgPeakCircle = @svg
.append 'circle'
.attr 'class', 'landing-graph__circle'
.attr 'cy', 0
.attr 'r', @peakR
@scaleX = d3.scaleLinear()
@scaleY = d3.scaleTime()
@area = d3.area()
.curve(d3.curveBasis)
.x (d) =>
@scaleX d.x
.y0 =>
@height
.y1 (d) =>
@scaleY d.y
# Load initial data
@loadData()
# Render
@resize()
loadData: =>
@data = osu.parseJson('json-stats')
return if _.isEmpty(@data)
@maxElem = _.maxBy @data, (o) -> o.y
@scaleX.domain d3.extent(@data, (d) -> d.x)
@scaleY.domain [0, d3.max(@data, (d) -> d.y)]
@svgPeakText
.text osu.trans('home.landing.peak', count: osu.formatNumber(@maxElem.y))
@peakTextLength = @svgPeakText.node().getComputedTextLength()
resize: =>
return if _.isEmpty(@data)
# set basic dimensions
@width = parseInt(@svgContainerOuter.style('width')) - @margin.left - @margin.right
@height = parseInt(@svgContainerOuter.style('height')) - @margin.top - @margin.bottom
# set range of scales
@scaleX.range [0, @width]
@scaleY.range [@height, 0]
# resize svgContainerInner
@svgContainerInner
.attr 'width', @width + @margin.left + @margin.right
.attr 'height', @height + @margin.top + @margin.bottom
# resize svgArea
@svgArea
.datum @data
.attr 'd', @area
# reposition peak circle...
@svgPeakCircle.attr 'cx', @scaleX(@maxElem.x)
# ...and its label
@svgPeakText.attr 'x', =>
rightX = @scaleX(@maxElem.x) + (@peakR * 2)
if (@peakTextLength + rightX) > @width
@scaleX(@maxElem.x) - (@peakTextLength + (@peakR * 2))
else
rightX
| 127523 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @LandingUserStats
constructor: ->
# Define constants
@margin =
top: 40
right: 0
bottom: 0
left: 0
# radius for peak circle
@peakR = 5
# Define basic elements
@svgContainerOuter = d3
.select '.js-landing-graph'
# Clear out previously set graphs
@svgContainerOuter.selectAll('svg').remove()
@svgContainerInner = @svgContainerOuter
.append 'svg'
.attr 'class', 'landing-graph'
@svg = @svgContainerInner
.append 'g'
# Ensure no blank space at the bottom at certain zoom level in Firefox.
.attr 'transform', "translate(#{@margin.left}, #{@margin.top + 1})"
@svgArea = @svg
.append 'path'
.attr 'class', 'landing-graph__area'
@svgPeakText = @svg
.append 'text'
.attr 'class', 'landing-graph__text'
.attr 'y', (-@peakR * 2)
@svgPeakCircle = @svg
.append 'circle'
.attr 'class', 'landing-graph__circle'
.attr 'cy', 0
.attr 'r', @peakR
@scaleX = d3.scaleLinear()
@scaleY = d3.scaleTime()
@area = d3.area()
.curve(d3.curveBasis)
.x (d) =>
@scaleX d.x
.y0 =>
@height
.y1 (d) =>
@scaleY d.y
# Load initial data
@loadData()
# Render
@resize()
loadData: =>
@data = osu.parseJson('json-stats')
return if _.isEmpty(@data)
@maxElem = _.maxBy @data, (o) -> o.y
@scaleX.domain d3.extent(@data, (d) -> d.x)
@scaleY.domain [0, d3.max(@data, (d) -> d.y)]
@svgPeakText
.text osu.trans('home.landing.peak', count: osu.formatNumber(@maxElem.y))
@peakTextLength = @svgPeakText.node().getComputedTextLength()
resize: =>
return if _.isEmpty(@data)
# set basic dimensions
@width = parseInt(@svgContainerOuter.style('width')) - @margin.left - @margin.right
@height = parseInt(@svgContainerOuter.style('height')) - @margin.top - @margin.bottom
# set range of scales
@scaleX.range [0, @width]
@scaleY.range [@height, 0]
# resize svgContainerInner
@svgContainerInner
.attr 'width', @width + @margin.left + @margin.right
.attr 'height', @height + @margin.top + @margin.bottom
# resize svgArea
@svgArea
.datum @data
.attr 'd', @area
# reposition peak circle...
@svgPeakCircle.attr 'cx', @scaleX(@maxElem.x)
# ...and its label
@svgPeakText.attr 'x', =>
rightX = @scaleX(@maxElem.x) + (@peakR * 2)
if (@peakTextLength + rightX) > @width
@scaleX(@maxElem.x) - (@peakTextLength + (@peakR * 2))
else
rightX
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @LandingUserStats
constructor: ->
# Define constants
@margin =
top: 40
right: 0
bottom: 0
left: 0
# radius for peak circle
@peakR = 5
# Define basic elements
@svgContainerOuter = d3
.select '.js-landing-graph'
# Clear out previously set graphs
@svgContainerOuter.selectAll('svg').remove()
@svgContainerInner = @svgContainerOuter
.append 'svg'
.attr 'class', 'landing-graph'
@svg = @svgContainerInner
.append 'g'
# Ensure no blank space at the bottom at certain zoom level in Firefox.
.attr 'transform', "translate(#{@margin.left}, #{@margin.top + 1})"
@svgArea = @svg
.append 'path'
.attr 'class', 'landing-graph__area'
@svgPeakText = @svg
.append 'text'
.attr 'class', 'landing-graph__text'
.attr 'y', (-@peakR * 2)
@svgPeakCircle = @svg
.append 'circle'
.attr 'class', 'landing-graph__circle'
.attr 'cy', 0
.attr 'r', @peakR
@scaleX = d3.scaleLinear()
@scaleY = d3.scaleTime()
@area = d3.area()
.curve(d3.curveBasis)
.x (d) =>
@scaleX d.x
.y0 =>
@height
.y1 (d) =>
@scaleY d.y
# Load initial data
@loadData()
# Render
@resize()
loadData: =>
@data = osu.parseJson('json-stats')
return if _.isEmpty(@data)
@maxElem = _.maxBy @data, (o) -> o.y
@scaleX.domain d3.extent(@data, (d) -> d.x)
@scaleY.domain [0, d3.max(@data, (d) -> d.y)]
@svgPeakText
.text osu.trans('home.landing.peak', count: osu.formatNumber(@maxElem.y))
@peakTextLength = @svgPeakText.node().getComputedTextLength()
resize: =>
return if _.isEmpty(@data)
# set basic dimensions
@width = parseInt(@svgContainerOuter.style('width')) - @margin.left - @margin.right
@height = parseInt(@svgContainerOuter.style('height')) - @margin.top - @margin.bottom
# set range of scales
@scaleX.range [0, @width]
@scaleY.range [@height, 0]
# resize svgContainerInner
@svgContainerInner
.attr 'width', @width + @margin.left + @margin.right
.attr 'height', @height + @margin.top + @margin.bottom
# resize svgArea
@svgArea
.datum @data
.attr 'd', @area
# reposition peak circle...
@svgPeakCircle.attr 'cx', @scaleX(@maxElem.x)
# ...and its label
@svgPeakText.attr 'x', =>
rightX = @scaleX(@maxElem.x) + (@peakR * 2)
if (@peakTextLength + rightX) > @width
@scaleX(@maxElem.x) - (@peakTextLength + (@peakR * 2))
else
rightX
|
[
{
"context": ".jp-east.idcfcloud.com/client/api'\n @apiKey = 'XXX'\n @secretKey = 'YYY'\n @idcf = new IDCF { @e",
"end": 273,
"score": 0.994695246219635,
"start": 270,
"tag": "KEY",
"value": "XXX"
},
{
"context": "client/api'\n @apiKey = 'XXX'\n @secretKey = 'YYY'\n ... | test/idcf-test.coffee | bouzuya/node-idcf-cloud-api | 2 | {IDCF} = require '../src/idcf'
assert = require 'power-assert'
request = require 'request'
sinon = require 'sinon'
describe 'IDCF', ->
beforeEach ->
@sinon = sinon.sandbox.create()
@endpoint = 'https://compute.jp-east.idcfcloud.com/client/api'
@apiKey = 'XXX'
@secretKey = 'YYY'
@idcf = new IDCF { @endpoint, @apiKey, @secretKey }
afterEach ->
@sinon.restore()
describe '#constructor', ->
it 'should be defined as function', ->
assert IDCF
assert typeof IDCF is 'function'
assert @idcf.endpoint is @endpoint
assert @idcf.apiKey is @apiKey
assert @idcf.secretKey is @secretKey
describe '#request', ->
context 'when resolved', ->
beforeEach ->
@response = body: { xyz: 456 }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.then (result) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert.deepEqual result, @response
context 'when resolved and contained errorcode', ->
beforeEach ->
@response = body: { xyz: { errorcode: '789' } }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert e instanceof Error
assert.deepEqual e.response, @response
context 'when rejected', ->
beforeEach ->
@error = new Error 'ERROR!'
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback @error, null
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert e instanceof Error
assert.deepEqual e, @error
describe '#_buildSignature', ->
it 'works', ->
query =
apiKey: @apiKey
command: 'listZones'
response: 'json'
signature = @idcf._buildSignature query, @secretKey
assert signature is 'wWTwUIFBdRW7ep/xvNmmbj90ykI='
describe '#_request', ->
context 'when resolved', ->
beforeEach ->
@response = body: { xyz: 456 }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf._request abc: 123
.then (result) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].abc is 123
assert.deepEqual result, @response
context 'when rejected', ->
beforeEach ->
@error = new Error 'ERROR!'
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback @error, null
it 'works', ->
@idcf._request abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].abc is 123
assert.deepEqual e, @error
| 154940 | {IDCF} = require '../src/idcf'
assert = require 'power-assert'
request = require 'request'
sinon = require 'sinon'
describe 'IDCF', ->
beforeEach ->
@sinon = sinon.sandbox.create()
@endpoint = 'https://compute.jp-east.idcfcloud.com/client/api'
@apiKey = '<KEY>'
@secretKey = '<KEY>'
@idcf = new IDCF { @endpoint, @apiKey, @secretKey }
afterEach ->
@sinon.restore()
describe '#constructor', ->
it 'should be defined as function', ->
assert IDCF
assert typeof IDCF is 'function'
assert @idcf.endpoint is @endpoint
assert @idcf.apiKey is @apiKey
assert @idcf.secretKey is @secretKey
describe '#request', ->
context 'when resolved', ->
beforeEach ->
@response = body: { xyz: 456 }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.then (result) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert.deepEqual result, @response
context 'when resolved and contained errorcode', ->
beforeEach ->
@response = body: { xyz: { errorcode: '789' } }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert e instanceof Error
assert.deepEqual e.response, @response
context 'when rejected', ->
beforeEach ->
@error = new Error 'ERROR!'
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback @error, null
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert e instanceof Error
assert.deepEqual e, @error
describe '#_buildSignature', ->
it 'works', ->
query =
apiKey: @apiKey
command: 'listZones'
response: 'json'
signature = @idcf._buildSignature query, @secretKey
assert signature is 'wWTwUIFBdRW7ep/xvNmmbj90ykI='
describe '#_request', ->
context 'when resolved', ->
beforeEach ->
@response = body: { xyz: 456 }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf._request abc: 123
.then (result) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].abc is 123
assert.deepEqual result, @response
context 'when rejected', ->
beforeEach ->
@error = new Error 'ERROR!'
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback @error, null
it 'works', ->
@idcf._request abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].abc is 123
assert.deepEqual e, @error
| true | {IDCF} = require '../src/idcf'
assert = require 'power-assert'
request = require 'request'
sinon = require 'sinon'
describe 'IDCF', ->
beforeEach ->
@sinon = sinon.sandbox.create()
@endpoint = 'https://compute.jp-east.idcfcloud.com/client/api'
@apiKey = 'PI:KEY:<KEY>END_PI'
@secretKey = 'PI:KEY:<KEY>END_PI'
@idcf = new IDCF { @endpoint, @apiKey, @secretKey }
afterEach ->
@sinon.restore()
describe '#constructor', ->
it 'should be defined as function', ->
assert IDCF
assert typeof IDCF is 'function'
assert @idcf.endpoint is @endpoint
assert @idcf.apiKey is @apiKey
assert @idcf.secretKey is @secretKey
describe '#request', ->
context 'when resolved', ->
beforeEach ->
@response = body: { xyz: 456 }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.then (result) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert.deepEqual result, @response
context 'when resolved and contained errorcode', ->
beforeEach ->
@response = body: { xyz: { errorcode: '789' } }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert e instanceof Error
assert.deepEqual e.response, @response
context 'when rejected', ->
beforeEach ->
@error = new Error 'ERROR!'
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback @error, null
it 'works', ->
@idcf.request 'COMMAND!', abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].json is true
assert @stub.getCall(0).args[0].url is @endpoint
assert @stub.getCall(0).args[0].qs.apiKey is @apiKey
assert @stub.getCall(0).args[0].qs.command is 'COMMAND!'
assert @stub.getCall(0).args[0].qs.response is 'json'
assert e instanceof Error
assert.deepEqual e, @error
describe '#_buildSignature', ->
it 'works', ->
query =
apiKey: @apiKey
command: 'listZones'
response: 'json'
signature = @idcf._buildSignature query, @secretKey
assert signature is 'wWTwUIFBdRW7ep/xvNmmbj90ykI='
describe '#_request', ->
context 'when resolved', ->
beforeEach ->
@response = body: { xyz: 456 }
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback null, @response
it 'works', ->
@idcf._request abc: 123
.then (result) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].abc is 123
assert.deepEqual result, @response
context 'when rejected', ->
beforeEach ->
@error = new Error 'ERROR!'
@stub = @sinon.stub request, 'Request', ({ callback }) =>
callback @error, null
it 'works', ->
@idcf._request abc: 123
.catch (e) =>
assert @stub.callCount is 1
assert @stub.getCall(0).args[0].abc is 123
assert.deepEqual e, @error
|
[
{
"context": "*\n * @namespace KINOUT\n * @class Url\n *\n * @author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\nKINOUT.Url = (",
"end": 110,
"score": 0.9998881816864014,
"start": 89,
"tag": "NAME",
"value": "Javier Jimenez Villar"
},
{
"context": "* @class Url\n *\n ... | components/Kinout/src/Kinout.Url.coffee | biojazzard/kirbout | 2 | ###
* Description or Responsability
*
* @namespace KINOUT
* @class Url
*
* @author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi
###
KINOUT.Url = ((knt, undefined_) ->
###
Read the page URL (hash) to get the current
navigational slide.
###
read = ->
bits = window.location.hash.slice(2).split("/")
indexh = (if bits[0] then parseInt(bits[0]) else 0)
indexv = (if bits[1] then parseInt(bits[1]) else 0)
knt.View.slide indexh, indexv
return
###
Updates the page URL (hash) to reflect the current
navigational slide.
###
write = (horizontal_idx, vertical_idx) ->
url = "/"
url += horizontal_idx if horizontal_idx > 0 or vertical_idx > 0
url += "/" + vertical_idx if vertical_idx > 0
window.location.hash = url
return
read: read
write: write
)(KINOUT) | 222539 | ###
* Description or Responsability
*
* @namespace KINOUT
* @class Url
*
* @author <NAME> <<EMAIL>> || @soyjavi
###
KINOUT.Url = ((knt, undefined_) ->
###
Read the page URL (hash) to get the current
navigational slide.
###
read = ->
bits = window.location.hash.slice(2).split("/")
indexh = (if bits[0] then parseInt(bits[0]) else 0)
indexv = (if bits[1] then parseInt(bits[1]) else 0)
knt.View.slide indexh, indexv
return
###
Updates the page URL (hash) to reflect the current
navigational slide.
###
write = (horizontal_idx, vertical_idx) ->
url = "/"
url += horizontal_idx if horizontal_idx > 0 or vertical_idx > 0
url += "/" + vertical_idx if vertical_idx > 0
window.location.hash = url
return
read: read
write: write
)(KINOUT) | true | ###
* Description or Responsability
*
* @namespace KINOUT
* @class Url
*
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> || @soyjavi
###
KINOUT.Url = ((knt, undefined_) ->
###
Read the page URL (hash) to get the current
navigational slide.
###
read = ->
bits = window.location.hash.slice(2).split("/")
indexh = (if bits[0] then parseInt(bits[0]) else 0)
indexv = (if bits[1] then parseInt(bits[1]) else 0)
knt.View.slide indexh, indexv
return
###
Updates the page URL (hash) to reflect the current
navigational slide.
###
write = (horizontal_idx, vertical_idx) ->
url = "/"
url += horizontal_idx if horizontal_idx > 0 or vertical_idx > 0
url += "/" + vertical_idx if vertical_idx > 0
window.location.hash = url
return
read: read
write: write
)(KINOUT) |
[
{
"context": "-Connect\"\nAccounts.emailTemplates.from = \"EIDR-C <no-reply@eha.io>\"\n\nMeteor.startup ->\n unless Meteor.users.find()",
"end": 105,
"score": 0.9999244809150696,
"start": 90,
"tag": "EMAIL",
"value": "no-reply@eha.io"
},
{
"context": "settings.private?.initial_user ... | server/accounts.coffee | ecohealthalliance/eidr-connect | 1 | Accounts.emailTemplates.siteName = "EIDR-Connect"
Accounts.emailTemplates.from = "EIDR-C <no-reply@eha.io>"
Meteor.startup ->
unless Meteor.users.find().count()
userData = Meteor.settings.private?.initial_user or {
email: "admin@eha.io"
}
if userData
userData.profile = { name: 'Admin' }
console.log "[ Creating initial user with email #{userData.email} ]"
Accounts.createUser userData
newUserRecord = Meteor.users.findOne('emails.address': userData.email)
if newUserRecord
Roles.addUsersToRoles(newUserRecord._id, ['admin'])
else
console.warn '[ Meteor.settings.private.initial_user object \
is required to create the initial user record ]'
| 41916 | Accounts.emailTemplates.siteName = "EIDR-Connect"
Accounts.emailTemplates.from = "EIDR-C <<EMAIL>>"
Meteor.startup ->
unless Meteor.users.find().count()
userData = Meteor.settings.private?.initial_user or {
email: "<EMAIL>"
}
if userData
userData.profile = { name: '<NAME>' }
console.log "[ Creating initial user with email #{userData.email} ]"
Accounts.createUser userData
newUserRecord = Meteor.users.findOne('emails.address': userData.email)
if newUserRecord
Roles.addUsersToRoles(newUserRecord._id, ['admin'])
else
console.warn '[ Meteor.settings.private.initial_user object \
is required to create the initial user record ]'
| true | Accounts.emailTemplates.siteName = "EIDR-Connect"
Accounts.emailTemplates.from = "EIDR-C <PI:EMAIL:<EMAIL>END_PI>"
Meteor.startup ->
unless Meteor.users.find().count()
userData = Meteor.settings.private?.initial_user or {
email: "PI:EMAIL:<EMAIL>END_PI"
}
if userData
userData.profile = { name: 'PI:NAME:<NAME>END_PI' }
console.log "[ Creating initial user with email #{userData.email} ]"
Accounts.createUser userData
newUserRecord = Meteor.users.findOne('emails.address': userData.email)
if newUserRecord
Roles.addUsersToRoles(newUserRecord._id, ['admin'])
else
console.warn '[ Meteor.settings.private.initial_user object \
is required to create the initial user record ]'
|
[
{
"context": ")\n#\n# Depends on: ['mbit/api/Request']\n#\n# @author Torstein Thune\n# @copyright 2016 Microbrew.it\nmbit.factory('mbit",
"end": 100,
"score": 0.9998851418495178,
"start": 86,
"tag": "NAME",
"value": "Torstein Thune"
}
] | app/api/Beerstyles.coffee | Microbrewit/microbrewit-recipe-calculator | 0 | mbit = angular.module('Microbrewit')
#
# Depends on: ['mbit/api/Request']
#
# @author Torstein Thune
# @copyright 2016 Microbrew.it
mbit.factory('mbit/api/Beerstyles', [
'mbit/api/Request'
(Request) ->
endpoint = 'json/beerStyles'
# @param [Integer] id (optional) If you want to get a single one
# @return [Promise] promise
get = (id = '') ->
return Request.get "/#{endpoint}/#{id}"
return { get }
]) | 104630 | mbit = angular.module('Microbrewit')
#
# Depends on: ['mbit/api/Request']
#
# @author <NAME>
# @copyright 2016 Microbrew.it
mbit.factory('mbit/api/Beerstyles', [
'mbit/api/Request'
(Request) ->
endpoint = 'json/beerStyles'
# @param [Integer] id (optional) If you want to get a single one
# @return [Promise] promise
get = (id = '') ->
return Request.get "/#{endpoint}/#{id}"
return { get }
]) | true | mbit = angular.module('Microbrewit')
#
# Depends on: ['mbit/api/Request']
#
# @author PI:NAME:<NAME>END_PI
# @copyright 2016 Microbrew.it
mbit.factory('mbit/api/Beerstyles', [
'mbit/api/Request'
(Request) ->
endpoint = 'json/beerStyles'
# @param [Integer] id (optional) If you want to get a single one
# @return [Promise] promise
get = (id = '') ->
return Request.get "/#{endpoint}/#{id}"
return { get }
]) |
[
{
"context": "quire '../lib/memcacher'\nClient = new Memcacher ['127.0.0.1:11211']\nrequire 'should'\n\ndescribe 'Memcacher', -",
"end": 73,
"score": 0.9997494220733643,
"start": 64,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "ve records', (done) ->\n\t\tClient.set key... | test/memcacher.test.coffee | vadimdemedes/memcacher | 1 | Memcacher = require '../lib/memcacher'
Client = new Memcacher ['127.0.0.1:11211']
require 'should'
describe 'Memcacher', ->
it 'should save records', (done) ->
Client.set key: 'test-key-first', value: 'first', expireIn: 360, tags: ['test-first'], ->
Client.set key: 'test-key-second', value: 'second', expireIn: 360, tags: ['test-second'], ->
Client.set key: 'test-keys', value: JSON.stringify(['first', 'second']), expireIn: 360, tags: ['test-first', 'test-second'], ->
do done
it 'should get record', (done) ->
Client.get 'test-keys', (err, value) ->
keys = JSON.parse value
keys[0].should.equal('first') and keys[1].should.equal('second')
do done
it 'should remove record', (done) ->
Client.del 'test-key-first', ->
Client.get 'test-keys', (err, value) ->
value.should.equal false
do done
it 'should chain methods', (done) ->
Client.set(key: 'test-key', value: 'test', expireIn: 360).get 'test-key', (err, value) ->
value.should.equal 'test'
do done | 119593 | Memcacher = require '../lib/memcacher'
Client = new Memcacher ['127.0.0.1:11211']
require 'should'
describe 'Memcacher', ->
it 'should save records', (done) ->
Client.set key: 'test-key<KEY>-first', value: 'first', expireIn: 360, tags: ['test-first'], ->
Client.set key: 'test<KEY>-key<KEY>-second', value: 'second', expireIn: 360, tags: ['test-second'], ->
Client.set key: 'test-keys', value: JSON.stringify(['first', 'second']), expireIn: 360, tags: ['test-first', 'test-second'], ->
do done
it 'should get record', (done) ->
Client.get 'test-keys', (err, value) ->
keys = JSON.parse value
keys[0].should.equal('first') and keys[1].should.equal('second')
do done
it 'should remove record', (done) ->
Client.del 'test-key-first', ->
Client.get 'test-keys', (err, value) ->
value.should.equal false
do done
it 'should chain methods', (done) ->
Client.set(key: 'test-key', value: 'test', expireIn: 360).get 'test-key', (err, value) ->
value.should.equal 'test'
do done | true | Memcacher = require '../lib/memcacher'
Client = new Memcacher ['127.0.0.1:11211']
require 'should'
describe 'Memcacher', ->
it 'should save records', (done) ->
Client.set key: 'test-keyPI:KEY:<KEY>END_PI-first', value: 'first', expireIn: 360, tags: ['test-first'], ->
Client.set key: 'testPI:KEY:<KEY>END_PI-keyPI:KEY:<KEY>END_PI-second', value: 'second', expireIn: 360, tags: ['test-second'], ->
Client.set key: 'test-keys', value: JSON.stringify(['first', 'second']), expireIn: 360, tags: ['test-first', 'test-second'], ->
do done
it 'should get record', (done) ->
Client.get 'test-keys', (err, value) ->
keys = JSON.parse value
keys[0].should.equal('first') and keys[1].should.equal('second')
do done
it 'should remove record', (done) ->
Client.del 'test-key-first', ->
Client.get 'test-keys', (err, value) ->
value.should.equal false
do done
it 'should chain methods', (done) ->
Client.set(key: 'test-key', value: 'test', expireIn: 360).get 'test-key', (err, value) ->
value.should.equal 'test'
do done |
[
{
"context": " = new ModelBuilder\n json =\n first_name: \"Asterix\",\n last_name: \"Gaul\",\n is_active: \"Ano\"",
"end": 206,
"score": 0.9997518658638,
"start": 199,
"tag": "NAME",
"value": "Asterix"
},
{
"context": " =\n first_name: \"Asterix\",\n last_... | wzk/resource/ModelBuilder_test.coffee | vojtatranta/werkzeug | 1 | suite 'wzk.resource.ModelBuilder', ->
ModelBuilder = wzk.resource.ModelBuilder
builder = null
json = null
arr = null
setup ->
builder = new ModelBuilder
json =
first_name: "Asterix",
last_name: "Gaul",
is_active: "Ano",
id: "1",
role: "Superuser",
email: "asterix@rychmat.eu"
arr = [json]
test 'Should build a model from JSON', ->
model = builder.build json
assert.instanceOf model, wzk.resource.Model
test 'Should build an array of models', ->
actual = builder.build arr
assert.isArray actual
assert.instanceOf actual[0], wzk.resource.Model
| 176917 | suite 'wzk.resource.ModelBuilder', ->
ModelBuilder = wzk.resource.ModelBuilder
builder = null
json = null
arr = null
setup ->
builder = new ModelBuilder
json =
first_name: "<NAME>",
last_name: "<NAME>",
is_active: "<NAME>",
id: "1",
role: "Superuser",
email: "<EMAIL>"
arr = [json]
test 'Should build a model from JSON', ->
model = builder.build json
assert.instanceOf model, wzk.resource.Model
test 'Should build an array of models', ->
actual = builder.build arr
assert.isArray actual
assert.instanceOf actual[0], wzk.resource.Model
| true | suite 'wzk.resource.ModelBuilder', ->
ModelBuilder = wzk.resource.ModelBuilder
builder = null
json = null
arr = null
setup ->
builder = new ModelBuilder
json =
first_name: "PI:NAME:<NAME>END_PI",
last_name: "PI:NAME:<NAME>END_PI",
is_active: "PI:NAME:<NAME>END_PI",
id: "1",
role: "Superuser",
email: "PI:EMAIL:<EMAIL>END_PI"
arr = [json]
test 'Should build a model from JSON', ->
model = builder.build json
assert.instanceOf model, wzk.resource.Model
test 'Should build an array of models', ->
actual = builder.build arr
assert.isArray actual
assert.instanceOf actual[0], wzk.resource.Model
|
[
{
"context": "push {\n\t\t\t\tid: user._id.toString()\n\t\t\t\tfirst_name: user.first_name\n\t\t\t\tlast_name: user.last_name\n\t\t\t\temai",
"end": 1715,
"score": 0.5635517239570618,
"start": 1711,
"tag": "NAME",
"value": "user"
},
{
"context": "g()\n\t\t\t\tfirst_name: user.first_n... | app/coffee/Features/Collaborators/CollaboratorsController.coffee | sandstormports/web-sharelatex | 1 | ProjectGetter = require "../Project/ProjectGetter"
CollaboratorsHandler = require "./CollaboratorsHandler"
EditorController = require "../Editor/EditorController"
module.exports = CollaboratorsController =
getCollaborators: (req, res, next = (error) ->) ->
ProjectGetter.getProject req.params.Project_id, { owner_ref: true, collaberator_refs: true, readOnly_refs: true}, (error, project) ->
return next(error) if error?
ProjectGetter.populateProjectWithUsers project, (error, project) ->
return next(error) if error?
CollaboratorsController._formatCollaborators project, (error, collaborators) ->
return next(error) if error?
res.send(JSON.stringify(collaborators))
removeSelfFromProject: (req, res, next = (error) ->) ->
user_id = req.session?.user?._id
if !user_id?
return next(new Error("User should be logged in"))
CollaboratorsHandler.removeUserFromProject req.params.project_id, user_id, (error) ->
return next(error) if error?
res.sendStatus 204
addUserToProject: (req, res, next) ->
project_id = req.params.Project_id
{email, privileges} = req.body
EditorController.addUserToProject project_id, email, privileges, (error, user) ->
return next(error) if error?
res.json user: user
removeUserFromProject: (req, res, next) ->
project_id = req.params.Project_id
user_id = req.params.user_id
EditorController.removeUserFromProject project_id, user_id, (error)->
return next(error) if error?
res.sendStatus 204
_formatCollaborators: (project, callback = (error, collaborators) ->) ->
collaborators = []
pushCollaborator = (user, permissions, owner) ->
collaborators.push {
id: user._id.toString()
first_name: user.first_name
last_name: user.last_name
email: user.email
permissions: permissions
owner: owner
}
if project.owner_ref?
pushCollaborator(project.owner_ref, ["read", "write", "admin"], true)
if project.collaberator_refs? and project.collaberator_refs.length > 0
for user in project.collaberator_refs
pushCollaborator(user, ["read", "write"], false)
if project.readOnly_refs? and project.readOnly_refs.length > 0
for user in project.readOnly_refs
pushCollaborator(user, ["read"], false)
callback null, collaborators
| 147105 | ProjectGetter = require "../Project/ProjectGetter"
CollaboratorsHandler = require "./CollaboratorsHandler"
EditorController = require "../Editor/EditorController"
module.exports = CollaboratorsController =
getCollaborators: (req, res, next = (error) ->) ->
ProjectGetter.getProject req.params.Project_id, { owner_ref: true, collaberator_refs: true, readOnly_refs: true}, (error, project) ->
return next(error) if error?
ProjectGetter.populateProjectWithUsers project, (error, project) ->
return next(error) if error?
CollaboratorsController._formatCollaborators project, (error, collaborators) ->
return next(error) if error?
res.send(JSON.stringify(collaborators))
removeSelfFromProject: (req, res, next = (error) ->) ->
user_id = req.session?.user?._id
if !user_id?
return next(new Error("User should be logged in"))
CollaboratorsHandler.removeUserFromProject req.params.project_id, user_id, (error) ->
return next(error) if error?
res.sendStatus 204
addUserToProject: (req, res, next) ->
project_id = req.params.Project_id
{email, privileges} = req.body
EditorController.addUserToProject project_id, email, privileges, (error, user) ->
return next(error) if error?
res.json user: user
removeUserFromProject: (req, res, next) ->
project_id = req.params.Project_id
user_id = req.params.user_id
EditorController.removeUserFromProject project_id, user_id, (error)->
return next(error) if error?
res.sendStatus 204
_formatCollaborators: (project, callback = (error, collaborators) ->) ->
collaborators = []
pushCollaborator = (user, permissions, owner) ->
collaborators.push {
id: user._id.toString()
first_name: <NAME>.first_name
last_name: <NAME>.last_name
email: user.email
permissions: permissions
owner: owner
}
if project.owner_ref?
pushCollaborator(project.owner_ref, ["read", "write", "admin"], true)
if project.collaberator_refs? and project.collaberator_refs.length > 0
for user in project.collaberator_refs
pushCollaborator(user, ["read", "write"], false)
if project.readOnly_refs? and project.readOnly_refs.length > 0
for user in project.readOnly_refs
pushCollaborator(user, ["read"], false)
callback null, collaborators
| true | ProjectGetter = require "../Project/ProjectGetter"
CollaboratorsHandler = require "./CollaboratorsHandler"
EditorController = require "../Editor/EditorController"
module.exports = CollaboratorsController =
getCollaborators: (req, res, next = (error) ->) ->
ProjectGetter.getProject req.params.Project_id, { owner_ref: true, collaberator_refs: true, readOnly_refs: true}, (error, project) ->
return next(error) if error?
ProjectGetter.populateProjectWithUsers project, (error, project) ->
return next(error) if error?
CollaboratorsController._formatCollaborators project, (error, collaborators) ->
return next(error) if error?
res.send(JSON.stringify(collaborators))
removeSelfFromProject: (req, res, next = (error) ->) ->
user_id = req.session?.user?._id
if !user_id?
return next(new Error("User should be logged in"))
CollaboratorsHandler.removeUserFromProject req.params.project_id, user_id, (error) ->
return next(error) if error?
res.sendStatus 204
addUserToProject: (req, res, next) ->
project_id = req.params.Project_id
{email, privileges} = req.body
EditorController.addUserToProject project_id, email, privileges, (error, user) ->
return next(error) if error?
res.json user: user
removeUserFromProject: (req, res, next) ->
project_id = req.params.Project_id
user_id = req.params.user_id
EditorController.removeUserFromProject project_id, user_id, (error)->
return next(error) if error?
res.sendStatus 204
_formatCollaborators: (project, callback = (error, collaborators) ->) ->
collaborators = []
pushCollaborator = (user, permissions, owner) ->
collaborators.push {
id: user._id.toString()
first_name: PI:NAME:<NAME>END_PI.first_name
last_name: PI:NAME:<NAME>END_PI.last_name
email: user.email
permissions: permissions
owner: owner
}
if project.owner_ref?
pushCollaborator(project.owner_ref, ["read", "write", "admin"], true)
if project.collaberator_refs? and project.collaberator_refs.length > 0
for user in project.collaberator_refs
pushCollaborator(user, ["read", "write"], false)
if project.readOnly_refs? and project.readOnly_refs.length > 0
for user in project.readOnly_refs
pushCollaborator(user, ["read"], false)
callback null, collaborators
|
[
{
"context": "r the Kaffee library.\n\t\n @version 0.3.0\n @author Fabian M. <mail.fabianm@gmail.com>\n###\nclass Cli\n\n\t###\n\t ",
"end": 378,
"score": 0.9998049139976501,
"start": 370,
"tag": "NAME",
"value": "Fabian M"
},
{
"context": " library.\n\t\n @version 0.3.0\n @auth... | kaffee-cli/src/main/kaffee/cli.coffee | fabianm/kaffee | 1 | Commander = require 'commander'
Winston = require 'winston'
Kaffee = require 'kaffee'
Configuration = Kaffee.Configuration
Workspace = Kaffee.project.Workspace
ProjectConfiguration = Kaffee.project.ProjectConfiguration
Project = Kaffee.project.Project
Request = Kaffee.execution.Request
###
Command line interface for the Kaffee library.
@version 0.3.0
@author Fabian M. <mail.fabianm@gmail.com>
###
class Cli
###
Runs Kaffee using command line arguments.
@since 0.2.1
@param args The command line arguments.
###
@run: (args) ->
logger = new Winston.Logger
transports: [ new Winston.transports.Console
colorize : true
]
goals = []
Commander.version(Configuration.VERSION).usage(Configuration.NAME + " [options] [goal(s)]")
Commander.option "-w, --workspace <path>", "Changes the working directory.", "."
Commander.option "-c, --config <path>", "Sets the path to the package.json file.", undefined
Commander.option "-f, --force", "Forces Kaffee.", Boolean, false
Commander.command('*').action ->
a = goals.concat Array.prototype.slice.call(arguments)
a.pop()
goals = a
Commander.parse(args);
try
workspace = new Workspace Commander.workspace
config = new ProjectConfiguration workspace, Commander.config
catch e
return not logger.error e
project = new Project config
project.getEventManager().on "attain", (goal) ->
console.log ">> Running goal \"#{ goal.getPlugin().getName() }:#{ goal.getName() }\""
project.getEventManager().on "attained", (goal, result) ->
errors = (log for log in result.getLogs() when log.getLevel().value >= 3)
warnings = (log for log in result.getLogs() when log.getLevel().value is 2)
console.log ">> Finished with #{ errors.length } error(s) and #{ warnings.length } warning(s)"
project.getEventManager().on "*log", (log) ->
return logger.error log.getStack() if log.getLevel().value >= 3
logger.log log.getLevel().name, log.getMessage()
return unless project.load()
result = project.execute new Request(goals, Commander.force)
module.exports = Cli
| 201814 | Commander = require 'commander'
Winston = require 'winston'
Kaffee = require 'kaffee'
Configuration = Kaffee.Configuration
Workspace = Kaffee.project.Workspace
ProjectConfiguration = Kaffee.project.ProjectConfiguration
Project = Kaffee.project.Project
Request = Kaffee.execution.Request
###
Command line interface for the Kaffee library.
@version 0.3.0
@author <NAME>. <<EMAIL>>
###
class Cli
###
Runs Kaffee using command line arguments.
@since 0.2.1
@param args The command line arguments.
###
@run: (args) ->
logger = new Winston.Logger
transports: [ new Winston.transports.Console
colorize : true
]
goals = []
Commander.version(Configuration.VERSION).usage(Configuration.NAME + " [options] [goal(s)]")
Commander.option "-w, --workspace <path>", "Changes the working directory.", "."
Commander.option "-c, --config <path>", "Sets the path to the package.json file.", undefined
Commander.option "-f, --force", "Forces Kaffee.", Boolean, false
Commander.command('*').action ->
a = goals.concat Array.prototype.slice.call(arguments)
a.pop()
goals = a
Commander.parse(args);
try
workspace = new Workspace Commander.workspace
config = new ProjectConfiguration workspace, Commander.config
catch e
return not logger.error e
project = new Project config
project.getEventManager().on "attain", (goal) ->
console.log ">> Running goal \"#{ goal.getPlugin().getName() }:#{ goal.getName() }\""
project.getEventManager().on "attained", (goal, result) ->
errors = (log for log in result.getLogs() when log.getLevel().value >= 3)
warnings = (log for log in result.getLogs() when log.getLevel().value is 2)
console.log ">> Finished with #{ errors.length } error(s) and #{ warnings.length } warning(s)"
project.getEventManager().on "*log", (log) ->
return logger.error log.getStack() if log.getLevel().value >= 3
logger.log log.getLevel().name, log.getMessage()
return unless project.load()
result = project.execute new Request(goals, Commander.force)
module.exports = Cli
| true | Commander = require 'commander'
Winston = require 'winston'
Kaffee = require 'kaffee'
Configuration = Kaffee.Configuration
Workspace = Kaffee.project.Workspace
ProjectConfiguration = Kaffee.project.ProjectConfiguration
Project = Kaffee.project.Project
Request = Kaffee.execution.Request
###
Command line interface for the Kaffee library.
@version 0.3.0
@author PI:NAME:<NAME>END_PI. <PI:EMAIL:<EMAIL>END_PI>
###
class Cli
###
Runs Kaffee using command line arguments.
@since 0.2.1
@param args The command line arguments.
###
@run: (args) ->
logger = new Winston.Logger
transports: [ new Winston.transports.Console
colorize : true
]
goals = []
Commander.version(Configuration.VERSION).usage(Configuration.NAME + " [options] [goal(s)]")
Commander.option "-w, --workspace <path>", "Changes the working directory.", "."
Commander.option "-c, --config <path>", "Sets the path to the package.json file.", undefined
Commander.option "-f, --force", "Forces Kaffee.", Boolean, false
Commander.command('*').action ->
a = goals.concat Array.prototype.slice.call(arguments)
a.pop()
goals = a
Commander.parse(args);
try
workspace = new Workspace Commander.workspace
config = new ProjectConfiguration workspace, Commander.config
catch e
return not logger.error e
project = new Project config
project.getEventManager().on "attain", (goal) ->
console.log ">> Running goal \"#{ goal.getPlugin().getName() }:#{ goal.getName() }\""
project.getEventManager().on "attained", (goal, result) ->
errors = (log for log in result.getLogs() when log.getLevel().value >= 3)
warnings = (log for log in result.getLogs() when log.getLevel().value is 2)
console.log ">> Finished with #{ errors.length } error(s) and #{ warnings.length } warning(s)"
project.getEventManager().on "*log", (log) ->
return logger.error log.getStack() if log.getLevel().value >= 3
logger.log log.getLevel().name, log.getMessage()
return unless project.load()
result = project.execute new Request(goals, Commander.force)
module.exports = Cli
|
[
{
"context": "ith fake credentials\n\tbrowser\n\t.fill('username', 'you@example.com')\n\t.fill('password', 'fake password')\n\t.pressButt",
"end": 542,
"score": 0.9999244809150696,
"start": 527,
"tag": "EMAIL",
"value": "you@example.com"
},
{
"context": "username', 'you@example.com')... | tests/zombie/admin-login-failure.coffee | twheel/elefant | 96 | # zombie.js tests for elefant
# 1. compile to javascript via `coffee -c tests/zombie/*.coffee`
# 2. run via `node tests/zombie/*.js`
zombie = require('zombie')
assert = require('assert')
zombie.visit 'http://www.elefant.lo/admin', (err, browser, status)->
# page loaded ok
assert.ok browser.querySelector('form'), 'should find login form'
assert.ok browser.querySelector('p:contains("Please log in to continue.")'), 'should display login message'
# verify login error with fake credentials
browser
.fill('username', 'you@example.com')
.fill('password', 'fake password')
.pressButton 'Sign in', (err, browser, status)->
# verify login error
assert.ok browser.querySelector('p:contains("Incorrect email or password, please try again.")'), 'should display login error'
| 163890 | # zombie.js tests for elefant
# 1. compile to javascript via `coffee -c tests/zombie/*.coffee`
# 2. run via `node tests/zombie/*.js`
zombie = require('zombie')
assert = require('assert')
zombie.visit 'http://www.elefant.lo/admin', (err, browser, status)->
# page loaded ok
assert.ok browser.querySelector('form'), 'should find login form'
assert.ok browser.querySelector('p:contains("Please log in to continue.")'), 'should display login message'
# verify login error with fake credentials
browser
.fill('username', '<EMAIL>')
.fill('password', '<PASSWORD>')
.pressButton 'Sign in', (err, browser, status)->
# verify login error
assert.ok browser.querySelector('p:contains("Incorrect email or password, please try again.")'), 'should display login error'
| true | # zombie.js tests for elefant
# 1. compile to javascript via `coffee -c tests/zombie/*.coffee`
# 2. run via `node tests/zombie/*.js`
zombie = require('zombie')
assert = require('assert')
zombie.visit 'http://www.elefant.lo/admin', (err, browser, status)->
# page loaded ok
assert.ok browser.querySelector('form'), 'should find login form'
assert.ok browser.querySelector('p:contains("Please log in to continue.")'), 'should display login message'
# verify login error with fake credentials
browser
.fill('username', 'PI:EMAIL:<EMAIL>END_PI')
.fill('password', 'PI:PASSWORD:<PASSWORD>END_PI')
.pressButton 'Sign in', (err, browser, status)->
# verify login error
assert.ok browser.querySelector('p:contains("Incorrect email or password, please try again.")'), 'should display login error'
|
[
{
"context": " member.teamId = team.id\n member.firstName = 'Test'\n teamsnap.saveMember member, (err, result) ->",
"end": 466,
"score": 0.997183620929718,
"start": 462,
"tag": "NAME",
"value": "Test"
}
] | test/paymentNotes.coffee | teamsnap/teamsnap-javascript-sdk | 9 | describe 'Payment Notes', ->
teamFee = null
member = null
memberPayment = null
paymentNote = null
before (done) ->
teamFee = teamsnap.createTeamFee()
teamFee.teamId = team.id
teamFee.description = 'Test Team Fee'
teamFee.amount = 1
teamsnap.saveTeamFee teamFee, (err, result) ->
expect(err).to.be.null
done()
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = 'Test'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
before (done) ->
teamsnap.loadMemberPayments(
{memberId: member.id, teamFeeId: teamFee.id}).then (memberPayments) ->
memberPayment = memberPayments[0]
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteTeamFee teamFee, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to load all payment notes for a team', (done) ->
teamsnap.loadPaymentNotes team.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to create a payment note', (done) ->
paymentNote = teamsnap.createPaymentNote()
paymentNote.teamId = team.id
paymentNote.memberPaymentId = memberPayment.id
paymentNote.note = 'Test payment note.'
paymentNote.description = 'Payment Note Description'
teamsnap.savePaymentNote paymentNote, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'paymentNote')
done() | 54629 | describe 'Payment Notes', ->
teamFee = null
member = null
memberPayment = null
paymentNote = null
before (done) ->
teamFee = teamsnap.createTeamFee()
teamFee.teamId = team.id
teamFee.description = 'Test Team Fee'
teamFee.amount = 1
teamsnap.saveTeamFee teamFee, (err, result) ->
expect(err).to.be.null
done()
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = '<NAME>'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
before (done) ->
teamsnap.loadMemberPayments(
{memberId: member.id, teamFeeId: teamFee.id}).then (memberPayments) ->
memberPayment = memberPayments[0]
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteTeamFee teamFee, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to load all payment notes for a team', (done) ->
teamsnap.loadPaymentNotes team.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to create a payment note', (done) ->
paymentNote = teamsnap.createPaymentNote()
paymentNote.teamId = team.id
paymentNote.memberPaymentId = memberPayment.id
paymentNote.note = 'Test payment note.'
paymentNote.description = 'Payment Note Description'
teamsnap.savePaymentNote paymentNote, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'paymentNote')
done() | true | describe 'Payment Notes', ->
teamFee = null
member = null
memberPayment = null
paymentNote = null
before (done) ->
teamFee = teamsnap.createTeamFee()
teamFee.teamId = team.id
teamFee.description = 'Test Team Fee'
teamFee.amount = 1
teamsnap.saveTeamFee teamFee, (err, result) ->
expect(err).to.be.null
done()
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = 'PI:NAME:<NAME>END_PI'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
before (done) ->
teamsnap.loadMemberPayments(
{memberId: member.id, teamFeeId: teamFee.id}).then (memberPayments) ->
memberPayment = memberPayments[0]
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteTeamFee teamFee, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to load all payment notes for a team', (done) ->
teamsnap.loadPaymentNotes team.id, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to create a payment note', (done) ->
paymentNote = teamsnap.createPaymentNote()
paymentNote.teamId = team.id
paymentNote.memberPaymentId = memberPayment.id
paymentNote.note = 'Test payment note.'
paymentNote.description = 'Payment Note Description'
teamsnap.savePaymentNote paymentNote, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'paymentNote')
done() |
[
{
"context": "io.com\n\nCopyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>\n\nLicensed under the Apache License, Version 2.0 ",
"end": 194,
"score": 0.9999229311943054,
"start": 178,
"tag": "EMAIL",
"value": "info@chaibio.com"
}
] | frontend/javascripts/app/services/modal-error.coffee | MakerButt/chaipcr | 1 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
App.service 'ModalError', [
'$uibModal'
'$rootScope'
($uibModal, $rootScope) ->
self = @
$scope = $rootScope.$new()
self.open = (err) ->
$scope.title = err.title || 'ERROR'
$scope.message = err.message
$scope.date = err.date
$uibModal.open
templateUrl: 'app/views/directives/error-modal.html'
scope: $scope
windowClass: 'modal-error-window'
return self
] | 2807 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
App.service 'ModalError', [
'$uibModal'
'$rootScope'
($uibModal, $rootScope) ->
self = @
$scope = $rootScope.$new()
self.open = (err) ->
$scope.title = err.title || 'ERROR'
$scope.message = err.message
$scope.date = err.date
$uibModal.open
templateUrl: 'app/views/directives/error-modal.html'
scope: $scope
windowClass: 'modal-error-window'
return self
] | true | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <PI:EMAIL:<EMAIL>END_PI>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
App.service 'ModalError', [
'$uibModal'
'$rootScope'
($uibModal, $rootScope) ->
self = @
$scope = $rootScope.$new()
self.open = (err) ->
$scope.title = err.title || 'ERROR'
$scope.message = err.message
$scope.date = err.date
$uibModal.open
templateUrl: 'app/views/directives/error-modal.html'
scope: $scope
windowClass: 'modal-error-window'
return self
] |
[
{
"context": "fields: [\n 'name'\n 'firstName'\n 'occupation'\n 'em",
"end": 7809,
"score": 0.9502477049827576,
"start": 7800,
"tag": "NAME",
"value": "firstName"
}
] | test/suite/13_test_join.coffee | smbape/node-dblayer | 0 | logger = log4js.getLogger __filename.replace /^(?:.+[\\\/])?([^.\\\/]+)(?:.[^.]+)?$/, '$1'
async = require 'async'
_ = require 'lodash'
{PersistenceManager, squel} = require '../../'
describe 'join', ->
it 'should generate select with join query', ->
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'INNER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'default'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'INNER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'outer'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'OUTER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'left'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'LEFT JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'right'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'RIGHT JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'CROSS JOIN'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'CROSS JOIN'
assertThrows ->
globals.pMgr.getSelectQuery 'User', {
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{xxxxx, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
return
, 'TABLE_UNDEF'
assertThrows ->
globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: {}
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
return
, 'JOIN_TYPE'
return
it 'should join', (done)->
connector = globals.pools.reader.createConnector()
countryCode = 'CAMEROUN'
options =
connector: connector
fields: [
'id'
'country:property:code'
]
where: [
'{LNG, key} = ' + connector.escape 'FR'
'{country:property:code} = ' + connector.escape countryCode
]
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
LNG:
entity: 'Language'
type: 'left'
condition: '{LNG, id} = {translation, language}'
fields: [
'code'
'key'
]
limit: 5
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
assert.ok models.length <= options.limit
for model in models
assert.strictEqual countryCode, model.get('country').get('property').get('code')
assert.strictEqual countryCode, model.get('translation').get('property').get('code')
# test count with fields and join
# Using LIMIT you will not limit the count or sum but only the returned rows
# http://stackoverflow.com/questions/17020842/mysql-count-with-limit#answers-header
options.count = true
globals.pMgr.list 'User', options, next
return
(count, next)->
# There are supposed to be 25 users matching the where field
assert.strictEqual count, 25
next()
return
], done
return
it 'should fix list isues', (done)->
# Nested condition on non selected field cause crash
# Mixin parent causes inner join instead of left join for left join on child
# Select a was select a:*
connector = globals.pools.writer.createConnector()
countryCode = 'CAMEROUN'
options =
connector: connector
fields: ['id']
where: [
'{author:country:property:code} = ' + connector.escape countryCode
]
order: '{id}' # Important. For an unknown reason, second query is ordered
pModels = null
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
pModels = models
options.fields = [
'id'
'author:country:property:*'
'author:language:property:*'
]
globals.pMgr.list 'User', options, next
return
(models, next)->
for model, index in models
assert.strictEqual countryCode, model.get('author').get('country').get('property').get('code')
assert.strictEqual model.get('id'), pModels[index].get('id')
next()
return
], done
return
it 'should fix issue: no field was considered as *', (done)->
connector = globals.pools.writer.createConnector()
countryCode = 'CAMEROUN'
options =
type: 'json'
connector: connector
fields: [
'name'
'firstName'
'occupation'
'email'
'country:property:code'
]
where: [
'{LNG, key} = ' + connector.escape 'FR'
'{country:property:code} = ' + connector.escape countryCode
]
join:
ctry:
entity: 'Translation'
condition: '{ctry, property} = {country:property}'
fields: [
'property:code'
]
LNG:
entity: 'Language'
type: 'left'
condition: '{LNG, id} = {ctry, language}'
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
for model in models
assert.strictEqual countryCode, model.country.property.code
assert.strictEqual countryCode, model.ctry.property.code
next()
return
], done
return
return
| 208635 | logger = log4js.getLogger __filename.replace /^(?:.+[\\\/])?([^.\\\/]+)(?:.[^.]+)?$/, '$1'
async = require 'async'
_ = require 'lodash'
{PersistenceManager, squel} = require '../../'
describe 'join', ->
it 'should generate select with join query', ->
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'INNER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'default'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'INNER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'outer'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'OUTER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'left'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'LEFT JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'right'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'RIGHT JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'CROSS JOIN'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'CROSS JOIN'
assertThrows ->
globals.pMgr.getSelectQuery 'User', {
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{xxxxx, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
return
, 'TABLE_UNDEF'
assertThrows ->
globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: {}
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
return
, 'JOIN_TYPE'
return
it 'should join', (done)->
connector = globals.pools.reader.createConnector()
countryCode = 'CAMEROUN'
options =
connector: connector
fields: [
'id'
'country:property:code'
]
where: [
'{LNG, key} = ' + connector.escape 'FR'
'{country:property:code} = ' + connector.escape countryCode
]
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
LNG:
entity: 'Language'
type: 'left'
condition: '{LNG, id} = {translation, language}'
fields: [
'code'
'key'
]
limit: 5
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
assert.ok models.length <= options.limit
for model in models
assert.strictEqual countryCode, model.get('country').get('property').get('code')
assert.strictEqual countryCode, model.get('translation').get('property').get('code')
# test count with fields and join
# Using LIMIT you will not limit the count or sum but only the returned rows
# http://stackoverflow.com/questions/17020842/mysql-count-with-limit#answers-header
options.count = true
globals.pMgr.list 'User', options, next
return
(count, next)->
# There are supposed to be 25 users matching the where field
assert.strictEqual count, 25
next()
return
], done
return
it 'should fix list isues', (done)->
# Nested condition on non selected field cause crash
# Mixin parent causes inner join instead of left join for left join on child
# Select a was select a:*
connector = globals.pools.writer.createConnector()
countryCode = 'CAMEROUN'
options =
connector: connector
fields: ['id']
where: [
'{author:country:property:code} = ' + connector.escape countryCode
]
order: '{id}' # Important. For an unknown reason, second query is ordered
pModels = null
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
pModels = models
options.fields = [
'id'
'author:country:property:*'
'author:language:property:*'
]
globals.pMgr.list 'User', options, next
return
(models, next)->
for model, index in models
assert.strictEqual countryCode, model.get('author').get('country').get('property').get('code')
assert.strictEqual model.get('id'), pModels[index].get('id')
next()
return
], done
return
it 'should fix issue: no field was considered as *', (done)->
connector = globals.pools.writer.createConnector()
countryCode = 'CAMEROUN'
options =
type: 'json'
connector: connector
fields: [
'name'
'<NAME>'
'occupation'
'email'
'country:property:code'
]
where: [
'{LNG, key} = ' + connector.escape 'FR'
'{country:property:code} = ' + connector.escape countryCode
]
join:
ctry:
entity: 'Translation'
condition: '{ctry, property} = {country:property}'
fields: [
'property:code'
]
LNG:
entity: 'Language'
type: 'left'
condition: '{LNG, id} = {ctry, language}'
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
for model in models
assert.strictEqual countryCode, model.country.property.code
assert.strictEqual countryCode, model.ctry.property.code
next()
return
], done
return
return
| true | logger = log4js.getLogger __filename.replace /^(?:.+[\\\/])?([^.\\\/]+)(?:.[^.]+)?$/, '$1'
async = require 'async'
_ = require 'lodash'
{PersistenceManager, squel} = require '../../'
describe 'join', ->
it 'should generate select with join query', ->
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'INNER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'default'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'INNER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'outer'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'OUTER JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'left'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'LEFT JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'right'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'RIGHT JOIN'
query = globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: 'CROSS JOIN'
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
assert.include query.toString(), 'CROSS JOIN'
assertThrows ->
globals.pMgr.getSelectQuery 'User', {
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{xxxxx, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
return
, 'TABLE_UNDEF'
assertThrows ->
globals.pMgr.getSelectQuery 'User', {
join:
translation:
type: {}
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
}
return
, 'JOIN_TYPE'
return
it 'should join', (done)->
connector = globals.pools.reader.createConnector()
countryCode = 'CAMEROUN'
options =
connector: connector
fields: [
'id'
'country:property:code'
]
where: [
'{LNG, key} = ' + connector.escape 'FR'
'{country:property:code} = ' + connector.escape countryCode
]
join:
translation:
entity: 'Translation'
condition: squel.expr().and '{translation, property} = {country:property}'
fields: [
'value'
'property:code'
]
LNG:
entity: 'Language'
type: 'left'
condition: '{LNG, id} = {translation, language}'
fields: [
'code'
'key'
]
limit: 5
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
assert.ok models.length <= options.limit
for model in models
assert.strictEqual countryCode, model.get('country').get('property').get('code')
assert.strictEqual countryCode, model.get('translation').get('property').get('code')
# test count with fields and join
# Using LIMIT you will not limit the count or sum but only the returned rows
# http://stackoverflow.com/questions/17020842/mysql-count-with-limit#answers-header
options.count = true
globals.pMgr.list 'User', options, next
return
(count, next)->
# There are supposed to be 25 users matching the where field
assert.strictEqual count, 25
next()
return
], done
return
it 'should fix list isues', (done)->
# Nested condition on non selected field cause crash
# Mixin parent causes inner join instead of left join for left join on child
# Select a was select a:*
connector = globals.pools.writer.createConnector()
countryCode = 'CAMEROUN'
options =
connector: connector
fields: ['id']
where: [
'{author:country:property:code} = ' + connector.escape countryCode
]
order: '{id}' # Important. For an unknown reason, second query is ordered
pModels = null
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
pModels = models
options.fields = [
'id'
'author:country:property:*'
'author:language:property:*'
]
globals.pMgr.list 'User', options, next
return
(models, next)->
for model, index in models
assert.strictEqual countryCode, model.get('author').get('country').get('property').get('code')
assert.strictEqual model.get('id'), pModels[index].get('id')
next()
return
], done
return
it 'should fix issue: no field was considered as *', (done)->
connector = globals.pools.writer.createConnector()
countryCode = 'CAMEROUN'
options =
type: 'json'
connector: connector
fields: [
'name'
'PI:NAME:<NAME>END_PI'
'occupation'
'email'
'country:property:code'
]
where: [
'{LNG, key} = ' + connector.escape 'FR'
'{country:property:code} = ' + connector.escape countryCode
]
join:
ctry:
entity: 'Translation'
condition: '{ctry, property} = {country:property}'
fields: [
'property:code'
]
LNG:
entity: 'Language'
type: 'left'
condition: '{LNG, id} = {ctry, language}'
twaterfall connector, [
(next)-> globals.pMgr.list 'User', options, next
(models, next)->
assert.ok models.length > 0
for model in models
assert.strictEqual countryCode, model.country.property.code
assert.strictEqual countryCode, model.ctry.property.code
next()
return
], done
return
return
|
[
{
"context": "emModel\n item = Item.create\n id: 1\n name: 'Test'\n console.log item\n console.log item.set('optio",
"end": 398,
"score": 0.998866081237793,
"start": 394,
"tag": "NAME",
"value": "Test"
}
] | javascripts/descriptor.coffee | moongift/hifive-examples | 0 | $ ->
manager = h5.core.data.createManager('SampleManager', 'sample');
ItemModel =
name: "Item"
schema:
id:
id: true
type: 'integer'
name:
type: 'string'
constraint:
notNull: true
option:
type: 'enum'
enumValue: [1, 'a', 'c']
Item = manager.createModel ItemModel
item = Item.create
id: 1
name: 'Test'
console.log item
console.log item.set('option', 1)
| 140724 | $ ->
manager = h5.core.data.createManager('SampleManager', 'sample');
ItemModel =
name: "Item"
schema:
id:
id: true
type: 'integer'
name:
type: 'string'
constraint:
notNull: true
option:
type: 'enum'
enumValue: [1, 'a', 'c']
Item = manager.createModel ItemModel
item = Item.create
id: 1
name: '<NAME>'
console.log item
console.log item.set('option', 1)
| true | $ ->
manager = h5.core.data.createManager('SampleManager', 'sample');
ItemModel =
name: "Item"
schema:
id:
id: true
type: 'integer'
name:
type: 'string'
constraint:
notNull: true
option:
type: 'enum'
enumValue: [1, 'a', 'c']
Item = manager.createModel ItemModel
item = Item.create
id: 1
name: 'PI:NAME:<NAME>END_PI'
console.log item
console.log item.set('option', 1)
|
[
{
"context": "e.js 1.8.3 http://underscorejs.org\n# (c) 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Edit",
"end": 582,
"score": 0.9998729825019836,
"start": 567,
"tag": "NAME",
"value": "Jeremy Ashkenas"
}
] | src/lib/Utils.coffee | rrothenb/noflo | 0 | # NoFlo - Flow-Based Programming for JavaScript
# (c) 2014-2017 Flowhub UG
# NoFlo may be freely distributed under the MIT license
# Guess language from filename
guessLanguageFromFilename = (filename) ->
return 'coffeescript' if /.*\.coffee$/.test filename
return 'javascript'
isArray = (obj) ->
return Array.isArray(obj) if Array.isArray
return Object.prototype.toString.call(arg) == '[object Array]'
# the following functions are from http://underscorejs.org/docs/underscore.html
# Underscore.js 1.8.3 http://underscorejs.org
# (c) 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
# Underscore may be freely distributed under the MIT license.
# Internal function that returns an efficient (for current engines)
# version of the passed-in callback,
# to be repeatedly applied in other Underscore functions.
optimizeCb = (func, context, argCount) ->
if context == undefined
return func
switch (if argCount == null then 3 else argCount)
when 1
return (value) ->
func.call context, value
when 2
return (value, other) ->
func.call context, value, other
when 3
return (value, index, collection) ->
func.call context, value, index, collection
when 4
return (accumulator, value, index, collection) ->
func.call context, accumulator, value, index, collection
->
func.apply context, arguments
# Create a reducing function iterating left or right.
# Optimized iterator function as using arguments.length in the main function
# will deoptimize the, see #1991.
createReduce = (dir) ->
iterator = (obj, iteratee, memo, keys, index, length) ->
while index >= 0 and index < length
currentKey = if keys then keys[index] else index
memo = iteratee(memo, obj[currentKey], currentKey, obj)
index += dir
memo
return (obj, iteratee, memo, context) ->
iteratee = optimizeCb(iteratee, context, 4)
keys = Object.keys obj
length = (keys or obj).length
index = if dir > 0 then 0 else length - 1
if arguments.length < 3
memo = obj[if keys then keys[index] else index]
index += dir
iterator obj, iteratee, memo, keys, index, length
reduceRight = createReduce(-1)
# Returns a function, that, as long as it continues to be invoked,
# will not be triggered.
# The function will be called after it stops being called for N milliseconds.
# If immediate is passed, trigger the function on the leading edge,
# instead of the trailing.
debounce = (func, wait, immediate) ->
timeout = undefined
args = undefined
context = undefined
timestamp = undefined
result = undefined
later = ->
last = Date.now - timestamp
if last < wait and last >= 0
timeout = setTimeout(later, wait - last)
else
timeout = null
if !immediate
result = func.apply(context, args)
if !timeout
context = args = null
return
->
context = this
args = arguments
timestamp = Date.now
callNow = immediate and !timeout
if !timeout
timeout = setTimeout(later, wait)
if callNow
result = func.apply(context, args)
context = args = null
result
exports.guessLanguageFromFilename = guessLanguageFromFilename
exports.reduceRight = reduceRight
exports.debounce = debounce
exports.isArray = isArray
| 17896 | # NoFlo - Flow-Based Programming for JavaScript
# (c) 2014-2017 Flowhub UG
# NoFlo may be freely distributed under the MIT license
# Guess language from filename
guessLanguageFromFilename = (filename) ->
return 'coffeescript' if /.*\.coffee$/.test filename
return 'javascript'
isArray = (obj) ->
return Array.isArray(obj) if Array.isArray
return Object.prototype.toString.call(arg) == '[object Array]'
# the following functions are from http://underscorejs.org/docs/underscore.html
# Underscore.js 1.8.3 http://underscorejs.org
# (c) 2009-2015 <NAME>, DocumentCloud and Investigative Reporters & Editors
# Underscore may be freely distributed under the MIT license.
# Internal function that returns an efficient (for current engines)
# version of the passed-in callback,
# to be repeatedly applied in other Underscore functions.
optimizeCb = (func, context, argCount) ->
if context == undefined
return func
switch (if argCount == null then 3 else argCount)
when 1
return (value) ->
func.call context, value
when 2
return (value, other) ->
func.call context, value, other
when 3
return (value, index, collection) ->
func.call context, value, index, collection
when 4
return (accumulator, value, index, collection) ->
func.call context, accumulator, value, index, collection
->
func.apply context, arguments
# Create a reducing function iterating left or right.
# Optimized iterator function as using arguments.length in the main function
# will deoptimize the, see #1991.
createReduce = (dir) ->
iterator = (obj, iteratee, memo, keys, index, length) ->
while index >= 0 and index < length
currentKey = if keys then keys[index] else index
memo = iteratee(memo, obj[currentKey], currentKey, obj)
index += dir
memo
return (obj, iteratee, memo, context) ->
iteratee = optimizeCb(iteratee, context, 4)
keys = Object.keys obj
length = (keys or obj).length
index = if dir > 0 then 0 else length - 1
if arguments.length < 3
memo = obj[if keys then keys[index] else index]
index += dir
iterator obj, iteratee, memo, keys, index, length
reduceRight = createReduce(-1)
# Returns a function, that, as long as it continues to be invoked,
# will not be triggered.
# The function will be called after it stops being called for N milliseconds.
# If immediate is passed, trigger the function on the leading edge,
# instead of the trailing.
debounce = (func, wait, immediate) ->
timeout = undefined
args = undefined
context = undefined
timestamp = undefined
result = undefined
later = ->
last = Date.now - timestamp
if last < wait and last >= 0
timeout = setTimeout(later, wait - last)
else
timeout = null
if !immediate
result = func.apply(context, args)
if !timeout
context = args = null
return
->
context = this
args = arguments
timestamp = Date.now
callNow = immediate and !timeout
if !timeout
timeout = setTimeout(later, wait)
if callNow
result = func.apply(context, args)
context = args = null
result
exports.guessLanguageFromFilename = guessLanguageFromFilename
exports.reduceRight = reduceRight
exports.debounce = debounce
exports.isArray = isArray
| true | # NoFlo - Flow-Based Programming for JavaScript
# (c) 2014-2017 Flowhub UG
# NoFlo may be freely distributed under the MIT license
# Guess language from filename
guessLanguageFromFilename = (filename) ->
return 'coffeescript' if /.*\.coffee$/.test filename
return 'javascript'
isArray = (obj) ->
return Array.isArray(obj) if Array.isArray
return Object.prototype.toString.call(arg) == '[object Array]'
# the following functions are from http://underscorejs.org/docs/underscore.html
# Underscore.js 1.8.3 http://underscorejs.org
# (c) 2009-2015 PI:NAME:<NAME>END_PI, DocumentCloud and Investigative Reporters & Editors
# Underscore may be freely distributed under the MIT license.
# Internal function that returns an efficient (for current engines)
# version of the passed-in callback,
# to be repeatedly applied in other Underscore functions.
optimizeCb = (func, context, argCount) ->
if context == undefined
return func
switch (if argCount == null then 3 else argCount)
when 1
return (value) ->
func.call context, value
when 2
return (value, other) ->
func.call context, value, other
when 3
return (value, index, collection) ->
func.call context, value, index, collection
when 4
return (accumulator, value, index, collection) ->
func.call context, accumulator, value, index, collection
->
func.apply context, arguments
# Create a reducing function iterating left or right.
# Optimized iterator function as using arguments.length in the main function
# will deoptimize the, see #1991.
createReduce = (dir) ->
iterator = (obj, iteratee, memo, keys, index, length) ->
while index >= 0 and index < length
currentKey = if keys then keys[index] else index
memo = iteratee(memo, obj[currentKey], currentKey, obj)
index += dir
memo
return (obj, iteratee, memo, context) ->
iteratee = optimizeCb(iteratee, context, 4)
keys = Object.keys obj
length = (keys or obj).length
index = if dir > 0 then 0 else length - 1
if arguments.length < 3
memo = obj[if keys then keys[index] else index]
index += dir
iterator obj, iteratee, memo, keys, index, length
reduceRight = createReduce(-1)
# Returns a function, that, as long as it continues to be invoked,
# will not be triggered.
# The function will be called after it stops being called for N milliseconds.
# If immediate is passed, trigger the function on the leading edge,
# instead of the trailing.
debounce = (func, wait, immediate) ->
timeout = undefined
args = undefined
context = undefined
timestamp = undefined
result = undefined
later = ->
last = Date.now - timestamp
if last < wait and last >= 0
timeout = setTimeout(later, wait - last)
else
timeout = null
if !immediate
result = func.apply(context, args)
if !timeout
context = args = null
return
->
context = this
args = arguments
timestamp = Date.now
callNow = immediate and !timeout
if !timeout
timeout = setTimeout(later, wait)
if callNow
result = func.apply(context, args)
context = args = null
result
exports.guessLanguageFromFilename = guessLanguageFromFilename
exports.reduceRight = reduceRight
exports.debounce = debounce
exports.isArray = isArray
|
[
{
"context": "ession settings\n\tapp.use session \n\t\tname: 'connect.sid'\n\t\tsecret: app.env.SECRET + ' '\n\t\tcookie:\n\t\t\tmaxA",
"end": 1287,
"score": 0.5299859642982483,
"start": 1284,
"tag": "USERNAME",
"value": "sid"
},
{
"context": "RT\n\t\t\tuser: app.env.DATABASE_USERNAM... | boot/config.coffee | jrdbnntt/theMenu | 0 | ###
# Loads module dependencies and configures app.
###
# Module dependencies
validator = require 'express-validator'
session = require 'express-session'
dotenv = require 'dotenv'
Q = require 'q'
Mariasql = require 'mariasql'
moment = require 'moment'
#JS utility libraries
util = require 'util'
vsprintf = require('sprintf-js').vsprintf
bcrypt = require 'bcrypt'
qt = require 'quickthumb'
fs = require 'fs-extra'
formidable = require 'formidable'
# Local lib
autoload = require '../lib/autoload'
# Configuration
module.exports = (app) ->
# Load random utility libraries
app.util = util
app.vsprintf = vsprintf
app.bcrypt = bcrypt
app.moment = moment
app.Q = Q
app.fs = fs
app.formidable = formidable
app.qt = qt
# Autoload controllers
autoload 'app/controllers', app
# Load .env
dotenv.load()
app.env = process.env
# Configure app settings
env = app.env.NODE_ENV || 'development'
app.set 'port', if app.env.DEV_MODE then app.env.PORT_DEV else app.env.PORT_LIVE
app.set 'views', __dirname + '/../app/views'
app.set 'view engine', 'jade'
app.use require('express').static __dirname + '/../public'
app.use validator()
# Development settings
if (env == 'development')
app.locals.pretty = true
#Session settings
app.use session
name: 'connect.sid'
secret: app.env.SECRET + ' '
cookie:
maxAge: 172800000 #2 days
saveUninitialized: false
resave: false
app.use (req,res,next) ->
res.locals.session = req.session;
next();
#setup database, including a global persistent connection
app.db =
Client: Mariasql
setup:
host: app.env.DATABASE_HOSTNAME
port: app.env.DATABASE_PORT
user: app.env.DATABASE_USERNAME
password: app.env.DATABASE_PASSWORD
db: app.env.DATABASE_NAME
app.db.newCon = ()->
con = new app.db.Client()
con.connect app.db.setup
con.on 'connect', ()->
this.tId = this.threadId #so it isnt deleted
# console.log '> DB: New connection established with threadId ' + this.tId
.on 'error', (err)->
console.log '> DB: Error on threadId ' + this.tId + '= ' + err
.on 'close', (hadError)->
if hadError
console.log '> DB: Connection closed with old threadId ' + this.tId + ' WITH ERROR!'
else
# console.log '> DB: Connection closed with old threadId ' + this.tId + ' without error'
return con
app.db.newMultiCon = ()->
config = app.db.setup
config.multiStatements = true
con = new app.db.Client()
con.connect config
con.on 'connect', ()->
this.tId = this.threadId #so it isnt deleted
# console.log '> DB: New connection established with threadId ' + this.tId
.on 'error', (err)->
console.log '> DB: Error on threadId ' + this.tId + '= ' + err
.on 'close', (hadError)->
if hadError
console.log '> DB: Connection closed with old threadId ' + this.tId + ' WITH ERROR!'
else
# console.log '> DB: Connection closed with old threadId ' + this.tId + ' without error'
return con
#setup models (must setup db first)
app.models = {}
autoload 'app/models', app
| 63858 | ###
# Loads module dependencies and configures app.
###
# Module dependencies
validator = require 'express-validator'
session = require 'express-session'
dotenv = require 'dotenv'
Q = require 'q'
Mariasql = require 'mariasql'
moment = require 'moment'
#JS utility libraries
util = require 'util'
vsprintf = require('sprintf-js').vsprintf
bcrypt = require 'bcrypt'
qt = require 'quickthumb'
fs = require 'fs-extra'
formidable = require 'formidable'
# Local lib
autoload = require '../lib/autoload'
# Configuration
module.exports = (app) ->
# Load random utility libraries
app.util = util
app.vsprintf = vsprintf
app.bcrypt = bcrypt
app.moment = moment
app.Q = Q
app.fs = fs
app.formidable = formidable
app.qt = qt
# Autoload controllers
autoload 'app/controllers', app
# Load .env
dotenv.load()
app.env = process.env
# Configure app settings
env = app.env.NODE_ENV || 'development'
app.set 'port', if app.env.DEV_MODE then app.env.PORT_DEV else app.env.PORT_LIVE
app.set 'views', __dirname + '/../app/views'
app.set 'view engine', 'jade'
app.use require('express').static __dirname + '/../public'
app.use validator()
# Development settings
if (env == 'development')
app.locals.pretty = true
#Session settings
app.use session
name: 'connect.sid'
secret: app.env.SECRET + ' '
cookie:
maxAge: 172800000 #2 days
saveUninitialized: false
resave: false
app.use (req,res,next) ->
res.locals.session = req.session;
next();
#setup database, including a global persistent connection
app.db =
Client: Mariasql
setup:
host: app.env.DATABASE_HOSTNAME
port: app.env.DATABASE_PORT
user: app.env.DATABASE_USERNAME
password: <PASSWORD>.DATABASE_PASSWORD
db: app.env.DATABASE_NAME
app.db.newCon = ()->
con = new app.db.Client()
con.connect app.db.setup
con.on 'connect', ()->
this.tId = this.threadId #so it isnt deleted
# console.log '> DB: New connection established with threadId ' + this.tId
.on 'error', (err)->
console.log '> DB: Error on threadId ' + this.tId + '= ' + err
.on 'close', (hadError)->
if hadError
console.log '> DB: Connection closed with old threadId ' + this.tId + ' WITH ERROR!'
else
# console.log '> DB: Connection closed with old threadId ' + this.tId + ' without error'
return con
app.db.newMultiCon = ()->
config = app.db.setup
config.multiStatements = true
con = new app.db.Client()
con.connect config
con.on 'connect', ()->
this.tId = this.threadId #so it isnt deleted
# console.log '> DB: New connection established with threadId ' + this.tId
.on 'error', (err)->
console.log '> DB: Error on threadId ' + this.tId + '= ' + err
.on 'close', (hadError)->
if hadError
console.log '> DB: Connection closed with old threadId ' + this.tId + ' WITH ERROR!'
else
# console.log '> DB: Connection closed with old threadId ' + this.tId + ' without error'
return con
#setup models (must setup db first)
app.models = {}
autoload 'app/models', app
| true | ###
# Loads module dependencies and configures app.
###
# Module dependencies
validator = require 'express-validator'
session = require 'express-session'
dotenv = require 'dotenv'
Q = require 'q'
Mariasql = require 'mariasql'
moment = require 'moment'
#JS utility libraries
util = require 'util'
vsprintf = require('sprintf-js').vsprintf
bcrypt = require 'bcrypt'
qt = require 'quickthumb'
fs = require 'fs-extra'
formidable = require 'formidable'
# Local lib
autoload = require '../lib/autoload'
# Configuration
module.exports = (app) ->
# Load random utility libraries
app.util = util
app.vsprintf = vsprintf
app.bcrypt = bcrypt
app.moment = moment
app.Q = Q
app.fs = fs
app.formidable = formidable
app.qt = qt
# Autoload controllers
autoload 'app/controllers', app
# Load .env
dotenv.load()
app.env = process.env
# Configure app settings
env = app.env.NODE_ENV || 'development'
app.set 'port', if app.env.DEV_MODE then app.env.PORT_DEV else app.env.PORT_LIVE
app.set 'views', __dirname + '/../app/views'
app.set 'view engine', 'jade'
app.use require('express').static __dirname + '/../public'
app.use validator()
# Development settings
if (env == 'development')
app.locals.pretty = true
#Session settings
app.use session
name: 'connect.sid'
secret: app.env.SECRET + ' '
cookie:
maxAge: 172800000 #2 days
saveUninitialized: false
resave: false
app.use (req,res,next) ->
res.locals.session = req.session;
next();
#setup database, including a global persistent connection
app.db =
Client: Mariasql
setup:
host: app.env.DATABASE_HOSTNAME
port: app.env.DATABASE_PORT
user: app.env.DATABASE_USERNAME
password: PI:PASSWORD:<PASSWORD>END_PI.DATABASE_PASSWORD
db: app.env.DATABASE_NAME
app.db.newCon = ()->
con = new app.db.Client()
con.connect app.db.setup
con.on 'connect', ()->
this.tId = this.threadId #so it isnt deleted
# console.log '> DB: New connection established with threadId ' + this.tId
.on 'error', (err)->
console.log '> DB: Error on threadId ' + this.tId + '= ' + err
.on 'close', (hadError)->
if hadError
console.log '> DB: Connection closed with old threadId ' + this.tId + ' WITH ERROR!'
else
# console.log '> DB: Connection closed with old threadId ' + this.tId + ' without error'
return con
app.db.newMultiCon = ()->
config = app.db.setup
config.multiStatements = true
con = new app.db.Client()
con.connect config
con.on 'connect', ()->
this.tId = this.threadId #so it isnt deleted
# console.log '> DB: New connection established with threadId ' + this.tId
.on 'error', (err)->
console.log '> DB: Error on threadId ' + this.tId + '= ' + err
.on 'close', (hadError)->
if hadError
console.log '> DB: Connection closed with old threadId ' + this.tId + ' WITH ERROR!'
else
# console.log '> DB: Connection closed with old threadId ' + this.tId + ' without error'
return con
#setup models (must setup db first)
app.models = {}
autoload 'app/models', app
|
[
{
"context": "ns =\n reduce: false\n key: ['uid', store.inboxID, 10]\n Message.rawRequest '",
"end": 2252,
"score": 0.9376524090766907,
"start": 2249,
"tag": "KEY",
"value": "uid"
}
] | tests/02_account_synchro.coffee | gelnior/cozy-emails | 58 | should = require 'should'
describe 'Account Synchronizations', ->
it "Get initial Inbox count", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count'
store.initialInboxCount = body.count
done()
it "When I move a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@move '8', 'Test Folder', =>
@closeBox =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have moved", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count', store.initialInboxCount - 1
client.get "/mailbox/#{store.testBoxID}", (err, res, body) =>
body.should.have.property 'count', 4
done()
it "When I copy a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@copy '9', 'Test Folder', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have been copied", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count', store.initialInboxCount - 1
client.get "/mailbox/#{store.testBoxID}", (err, res, body) =>
body.should.have.property 'count', 5
done()
it "When I read a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@addFlags '10', ['\\Seen'], =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have been mark as read in cozy", (done) ->
Message = require appPath + 'server/models/message'
options =
reduce: false
key: ['uid', store.inboxID, 10]
Message.rawRequest 'byMailboxRequest', options, (err, rows)->
return done err if err
rows[0].value.should.containEql '\\Seen'
done null
it "When the server changes one UIDValidity", (done) ->
@timeout 30000
DovecotTesting.changeSentUIDValidity done
it "And refresh the account", (done) ->
@timeout 15000
client.get "/refresh", done
it "Then the mailbox has been updated", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.find store.sentBoxID, (err, sentBox) ->
return done err if err
sentBox.should.have.property 'uidvalidity', 1337
done null
it "When the server add one mailbox", (done) ->
@timeout 10000
helpers.getImapServerRawConnection done, ->
@addBox 'Yolo', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Then the mailbox has been created in couch", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.rawRequest 'treeMap', include_docs: true, (err, rows) ->
return done err if err
for row in rows when row.doc.path is 'Yolo'
store.yoloID = row.id
should.exist store.yoloID
done null
it "Then the mailbox has been created in store", (done) ->
ramStore = require appPath + 'server/models/store_account_and_boxes'
boxes = ramStore.getMailboxesByAccount store.accountID
for box in boxes when box.path is 'Yolo'
store.yoloID = box.id
should.exist store.yoloID
done null
it "When the server remove one mailbox", (done) ->
@timeout 10000
helpers.getImapServerRawConnection done, ->
@delBox 'Yolo', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Then the mailbox has been deleted", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.find store.yoloID, (err, found) ->
should.not.exist found
done()
| 86316 | should = require 'should'
describe 'Account Synchronizations', ->
it "Get initial Inbox count", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count'
store.initialInboxCount = body.count
done()
it "When I move a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@move '8', 'Test Folder', =>
@closeBox =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have moved", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count', store.initialInboxCount - 1
client.get "/mailbox/#{store.testBoxID}", (err, res, body) =>
body.should.have.property 'count', 4
done()
it "When I copy a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@copy '9', 'Test Folder', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have been copied", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count', store.initialInboxCount - 1
client.get "/mailbox/#{store.testBoxID}", (err, res, body) =>
body.should.have.property 'count', 5
done()
it "When I read a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@addFlags '10', ['\\Seen'], =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have been mark as read in cozy", (done) ->
Message = require appPath + 'server/models/message'
options =
reduce: false
key: ['<KEY>', store.inboxID, 10]
Message.rawRequest 'byMailboxRequest', options, (err, rows)->
return done err if err
rows[0].value.should.containEql '\\Seen'
done null
it "When the server changes one UIDValidity", (done) ->
@timeout 30000
DovecotTesting.changeSentUIDValidity done
it "And refresh the account", (done) ->
@timeout 15000
client.get "/refresh", done
it "Then the mailbox has been updated", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.find store.sentBoxID, (err, sentBox) ->
return done err if err
sentBox.should.have.property 'uidvalidity', 1337
done null
it "When the server add one mailbox", (done) ->
@timeout 10000
helpers.getImapServerRawConnection done, ->
@addBox 'Yolo', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Then the mailbox has been created in couch", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.rawRequest 'treeMap', include_docs: true, (err, rows) ->
return done err if err
for row in rows when row.doc.path is 'Yolo'
store.yoloID = row.id
should.exist store.yoloID
done null
it "Then the mailbox has been created in store", (done) ->
ramStore = require appPath + 'server/models/store_account_and_boxes'
boxes = ramStore.getMailboxesByAccount store.accountID
for box in boxes when box.path is 'Yolo'
store.yoloID = box.id
should.exist store.yoloID
done null
it "When the server remove one mailbox", (done) ->
@timeout 10000
helpers.getImapServerRawConnection done, ->
@delBox 'Yolo', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Then the mailbox has been deleted", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.find store.yoloID, (err, found) ->
should.not.exist found
done()
| true | should = require 'should'
describe 'Account Synchronizations', ->
it "Get initial Inbox count", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count'
store.initialInboxCount = body.count
done()
it "When I move a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@move '8', 'Test Folder', =>
@closeBox =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have moved", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count', store.initialInboxCount - 1
client.get "/mailbox/#{store.testBoxID}", (err, res, body) =>
body.should.have.property 'count', 4
done()
it "When I copy a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@copy '9', 'Test Folder', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have been copied", (done) ->
client.get "/mailbox/#{store.inboxID}", (err, res, body) =>
body.should.have.property 'count', store.initialInboxCount - 1
client.get "/mailbox/#{store.testBoxID}", (err, res, body) =>
body.should.have.property 'count', 5
done()
it "When I read a message on the IMAP server", (done) ->
@timeout 10000
imap = helpers.getImapServerRawConnection done, ->
@openBox 'INBOX', =>
@addFlags '10', ['\\Seen'], =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Message have been mark as read in cozy", (done) ->
Message = require appPath + 'server/models/message'
options =
reduce: false
key: ['PI:KEY:<KEY>END_PI', store.inboxID, 10]
Message.rawRequest 'byMailboxRequest', options, (err, rows)->
return done err if err
rows[0].value.should.containEql '\\Seen'
done null
it "When the server changes one UIDValidity", (done) ->
@timeout 30000
DovecotTesting.changeSentUIDValidity done
it "And refresh the account", (done) ->
@timeout 15000
client.get "/refresh", done
it "Then the mailbox has been updated", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.find store.sentBoxID, (err, sentBox) ->
return done err if err
sentBox.should.have.property 'uidvalidity', 1337
done null
it "When the server add one mailbox", (done) ->
@timeout 10000
helpers.getImapServerRawConnection done, ->
@addBox 'Yolo', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Then the mailbox has been created in couch", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.rawRequest 'treeMap', include_docs: true, (err, rows) ->
return done err if err
for row in rows when row.doc.path is 'Yolo'
store.yoloID = row.id
should.exist store.yoloID
done null
it "Then the mailbox has been created in store", (done) ->
ramStore = require appPath + 'server/models/store_account_and_boxes'
boxes = ramStore.getMailboxesByAccount store.accountID
for box in boxes when box.path is 'Yolo'
store.yoloID = box.id
should.exist store.yoloID
done null
it "When the server remove one mailbox", (done) ->
@timeout 10000
helpers.getImapServerRawConnection done, ->
@delBox 'Yolo', =>
@end()
it "And refresh the account", (done) ->
@timeout 10000
client.get "/refresh", done
it "Then the mailbox has been deleted", (done) ->
Mailbox = require appPath + 'server/models/mailbox'
Mailbox.find store.yoloID, (err, found) ->
should.not.exist found
done()
|
[
{
"context": " Flow-Based Programming for Node.js\n# (c) 2011 Henri Bergius, Nemein\n# NoFlo may be freely distributed und",
"end": 77,
"score": 0.9998401403427124,
"start": 64,
"tag": "NAME",
"value": "Henri Bergius"
},
{
"context": "gramming for Node.js\n# (c) 2011 Henr... | src/lib/Graph.coffee | rybesh/noflo | 0 | # NoFlo - Flow-Based Programming for Node.js
# (c) 2011 Henri Bergius, Nemein
# NoFlo may be freely distributed under the MIT license
fs = require 'fs'
events = require 'events'
fbp = require './Fbp'
# # NoFlo network graph
#
# This class represents an abstract NoFlo graph containing nodes
# connected to each other with edges.
#
# These graphs can be used for visualization and sketching, but
# also are the way to start a NoFlo network.
class Graph extends events.EventEmitter
name: ''
nodes: []
edges: []
initializers: []
# ## Creating new graphs
#
# Graphs are created by simply instantiating the Graph class
# and giving it a name:
#
# myGraph = new Graph 'My very cool graph'
constructor: (@name) ->
@nodes = []
@edges = []
@initializers = []
# ## Adding a node to the graph
#
# Nodes are identified by an ID unique to the graph. Additionally,
# a node may contain information on what NoFlo component it is and
# possible display coordinates.
#
# For example:
#
# myGraph.addNode 'Read, 'ReadFile',
# x: 91
# y: 154
#
# Addition of a node will emit the `addNode` event.
addNode: (id, component, display) ->
node =
id: id
component: component
display: display
@nodes.push node
@emit 'addNode', node
# ## Removing a node from the graph
#
# Existing nodes can be removed from a graph by their ID. This
# will remove the node and also remove all edges connected to it.
#
# myGraph.removeNode 'Read'
#
# Once the node has been removed, the `removeNode` event will be
# emitted.
removeNode: (id) ->
node =
id: id
for edge in @edges
if edge.from.node is node.id
@removeEdge edge
if edge.to.node is node.id
@removeEdge edge
for initializer in @initializers
if initializer.to.node is node.id
@removeEdge initializer.to.node, initializer.to.port
@emit 'removeNode', node
if @nodes.indexOf node isnt -1
@nodes.splice @nodes.indexOf(node), 1
# ## Getting a node
#
# Nodes objects can be retrieved from the graph by their ID:
#
# myNode = myGraph.getNode 'Read'
getNode: (id) ->
for node in @nodes
return node if node.id is id
# ## Connecting nodes
#
# Nodes can be connected by adding edges between a node's outport
# and another node's inport:
#
# myGraph.addEdge 'Read', 'out', 'Display', 'in'
#
# Adding an edge will emit the `addEdge` event.
addEdge: (outNode, outPort, inNode, inPort) ->
edge =
from:
node: outNode
port: outPort
to:
node: inNode
port: inPort
@edges.push edge
@emit 'addEdge', edge
# ## Disconnected nodes
#
# Connections between nodes can be removed by providing the
# node and port to disconnect. The specified node and port can
# be either the outport or the inport of the connection:
#
# myGraph.removeEdge 'Read', 'out'
#
# or:
#
# myGraph.removeEdge 'Display', 'in'
#
# Removing a connection will emit the `removeEdge` event.
removeEdge: (node, port) ->
for edge,index in @edges
if edge.from.node is node and edge.from.port is port
@emit 'removeEdge', edge
@edges.splice index, 1
if edge.to.node is node and edge.to.port is port
@emit 'removeEdge', edge
@edges.splice index, 1
for edge,index in @initializers
if edge.to.node is node and edge.to.port is port
@emit 'removeEdge', edge
@initializers.splice index, 1
# ## Adding Initial Information Packets
#
# Initial Information Packets (IIPs) can be used for sending data
# to specified node inports without a sending node instance.
#
# IIPs are especially useful for sending configuration information
# to components at NoFlo network start-up time. This could include
# filenames to read, or network ports to listen to.
#
# myGraph.addInitial 'somefile.txt', 'Read', 'source'
#
# Adding an IIP will emit a `addEdge` event.
addInitial: (data, node, port) ->
initializer =
from:
data: data
to:
node: node
port: port
@initializers.push initializer
@emit 'addEdge', initializer
toDOT: ->
cleanID = (id) ->
id.replace /\s*/g, ""
cleanPort = (port) ->
port.replace /\./g, ""
dot = "digraph {\n"
for node in @nodes
dot += " #{cleanID(node.id)} [shape=box]\n"
for initializer, id in @initializers
dot += " data#{id} -> #{cleanID(initializer.to.node)} [label='#{cleanPort(initializer.to.port)}']\n"
for edge in @edges
dot += " #{cleanID(edge.from.node)} -> #{cleanID(edge.to.node)}[label='#{cleanPort(edge.from.port)}']\n"
dot += "}"
return dot
toYUML: ->
yuml = []
for initializer in @initializers
yuml.push "(start)[#{initializer.to.port}]->(#{initializer.to.node})";
for edge in @edges
yuml.push "(#{edge.from.node})[#{edge.from.port}]->(#{edge.to.node})"
yuml.join ","
toJSON: ->
json =
properties:
name: @name
processes: {}
connections: []
for node in @nodes
json.processes[node.id] =
component: node.component
if node.display
json.processes[node.id].display = node.display
for edge in @edges
json.connections.push
src:
process: edge.from.node
port: edge.from.port
tgt:
process: edge.to.node
port: edge.to.port
for initializer in @initializers
json.connections.push
data: initializer.from.data
tgt:
process: initializer.to.node
port: initializer.to.port
json
save: (file, success) ->
json = JSON.stringify @toJSON(), null, 4
fs.writeFile "#{file}.json", json, "utf-8", (err, data) ->
throw err if err
success file
exports.Graph = Graph
exports.createGraph = (name) ->
new Graph name
exports.loadJSON = (definition, success) ->
graph = new Graph definition.properties.name
for id, def of definition.processes
graph.addNode id, def.component, def.display
for conn in definition.connections
if conn.data
graph.addInitial conn.data, conn.tgt.process, conn.tgt.port.toLowerCase()
continue
graph.addEdge conn.src.process, conn.src.port.toLowerCase(), conn.tgt.process, conn.tgt.port.toLowerCase()
success graph
exports.loadFile = (file, success) ->
fs.readFile file, "utf-8", (err, data) ->
throw err if err
if file.split('.').pop() is 'fbp'
return exports.loadFBP data, success
definition = JSON.parse data
exports.loadJSON definition, success
exports.loadFBP = (fbpData, success) ->
parser = new fbp.Fbp
definition = parser.parse fbpData
exports.loadJSON definition, success
| 198593 | # NoFlo - Flow-Based Programming for Node.js
# (c) 2011 <NAME>, <NAME>
# NoFlo may be freely distributed under the MIT license
fs = require 'fs'
events = require 'events'
fbp = require './Fbp'
# # NoFlo network graph
#
# This class represents an abstract NoFlo graph containing nodes
# connected to each other with edges.
#
# These graphs can be used for visualization and sketching, but
# also are the way to start a NoFlo network.
class Graph extends events.EventEmitter
name: ''
nodes: []
edges: []
initializers: []
# ## Creating new graphs
#
# Graphs are created by simply instantiating the Graph class
# and giving it a name:
#
# myGraph = new Graph 'My very cool graph'
constructor: (@name) ->
@nodes = []
@edges = []
@initializers = []
# ## Adding a node to the graph
#
# Nodes are identified by an ID unique to the graph. Additionally,
# a node may contain information on what NoFlo component it is and
# possible display coordinates.
#
# For example:
#
# myGraph.addNode 'Read, 'ReadFile',
# x: 91
# y: 154
#
# Addition of a node will emit the `addNode` event.
addNode: (id, component, display) ->
node =
id: id
component: component
display: display
@nodes.push node
@emit 'addNode', node
# ## Removing a node from the graph
#
# Existing nodes can be removed from a graph by their ID. This
# will remove the node and also remove all edges connected to it.
#
# myGraph.removeNode 'Read'
#
# Once the node has been removed, the `removeNode` event will be
# emitted.
removeNode: (id) ->
node =
id: id
for edge in @edges
if edge.from.node is node.id
@removeEdge edge
if edge.to.node is node.id
@removeEdge edge
for initializer in @initializers
if initializer.to.node is node.id
@removeEdge initializer.to.node, initializer.to.port
@emit 'removeNode', node
if @nodes.indexOf node isnt -1
@nodes.splice @nodes.indexOf(node), 1
# ## Getting a node
#
# Nodes objects can be retrieved from the graph by their ID:
#
# myNode = myGraph.getNode 'Read'
getNode: (id) ->
for node in @nodes
return node if node.id is id
# ## Connecting nodes
#
# Nodes can be connected by adding edges between a node's outport
# and another node's inport:
#
# myGraph.addEdge 'Read', 'out', 'Display', 'in'
#
# Adding an edge will emit the `addEdge` event.
addEdge: (outNode, outPort, inNode, inPort) ->
edge =
from:
node: outNode
port: outPort
to:
node: inNode
port: inPort
@edges.push edge
@emit 'addEdge', edge
# ## Disconnected nodes
#
# Connections between nodes can be removed by providing the
# node and port to disconnect. The specified node and port can
# be either the outport or the inport of the connection:
#
# myGraph.removeEdge 'Read', 'out'
#
# or:
#
# myGraph.removeEdge 'Display', 'in'
#
# Removing a connection will emit the `removeEdge` event.
removeEdge: (node, port) ->
for edge,index in @edges
if edge.from.node is node and edge.from.port is port
@emit 'removeEdge', edge
@edges.splice index, 1
if edge.to.node is node and edge.to.port is port
@emit 'removeEdge', edge
@edges.splice index, 1
for edge,index in @initializers
if edge.to.node is node and edge.to.port is port
@emit 'removeEdge', edge
@initializers.splice index, 1
# ## Adding Initial Information Packets
#
# Initial Information Packets (IIPs) can be used for sending data
# to specified node inports without a sending node instance.
#
# IIPs are especially useful for sending configuration information
# to components at NoFlo network start-up time. This could include
# filenames to read, or network ports to listen to.
#
# myGraph.addInitial 'somefile.txt', 'Read', 'source'
#
# Adding an IIP will emit a `addEdge` event.
addInitial: (data, node, port) ->
initializer =
from:
data: data
to:
node: node
port: port
@initializers.push initializer
@emit 'addEdge', initializer
toDOT: ->
cleanID = (id) ->
id.replace /\s*/g, ""
cleanPort = (port) ->
port.replace /\./g, ""
dot = "digraph {\n"
for node in @nodes
dot += " #{cleanID(node.id)} [shape=box]\n"
for initializer, id in @initializers
dot += " data#{id} -> #{cleanID(initializer.to.node)} [label='#{cleanPort(initializer.to.port)}']\n"
for edge in @edges
dot += " #{cleanID(edge.from.node)} -> #{cleanID(edge.to.node)}[label='#{cleanPort(edge.from.port)}']\n"
dot += "}"
return dot
toYUML: ->
yuml = []
for initializer in @initializers
yuml.push "(start)[#{initializer.to.port}]->(#{initializer.to.node})";
for edge in @edges
yuml.push "(#{edge.from.node})[#{edge.from.port}]->(#{edge.to.node})"
yuml.join ","
toJSON: ->
json =
properties:
name: @name
processes: {}
connections: []
for node in @nodes
json.processes[node.id] =
component: node.component
if node.display
json.processes[node.id].display = node.display
for edge in @edges
json.connections.push
src:
process: edge.from.node
port: edge.from.port
tgt:
process: edge.to.node
port: edge.to.port
for initializer in @initializers
json.connections.push
data: initializer.from.data
tgt:
process: initializer.to.node
port: initializer.to.port
json
save: (file, success) ->
json = JSON.stringify @toJSON(), null, 4
fs.writeFile "#{file}.json", json, "utf-8", (err, data) ->
throw err if err
success file
exports.Graph = Graph
exports.createGraph = (name) ->
new Graph name
exports.loadJSON = (definition, success) ->
graph = new Graph definition.properties.name
for id, def of definition.processes
graph.addNode id, def.component, def.display
for conn in definition.connections
if conn.data
graph.addInitial conn.data, conn.tgt.process, conn.tgt.port.toLowerCase()
continue
graph.addEdge conn.src.process, conn.src.port.toLowerCase(), conn.tgt.process, conn.tgt.port.toLowerCase()
success graph
exports.loadFile = (file, success) ->
fs.readFile file, "utf-8", (err, data) ->
throw err if err
if file.split('.').pop() is 'fbp'
return exports.loadFBP data, success
definition = JSON.parse data
exports.loadJSON definition, success
exports.loadFBP = (fbpData, success) ->
parser = new fbp.Fbp
definition = parser.parse fbpData
exports.loadJSON definition, success
| true | # NoFlo - Flow-Based Programming for Node.js
# (c) 2011 PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI
# NoFlo may be freely distributed under the MIT license
fs = require 'fs'
events = require 'events'
fbp = require './Fbp'
# # NoFlo network graph
#
# This class represents an abstract NoFlo graph containing nodes
# connected to each other with edges.
#
# These graphs can be used for visualization and sketching, but
# also are the way to start a NoFlo network.
class Graph extends events.EventEmitter
name: ''
nodes: []
edges: []
initializers: []
# ## Creating new graphs
#
# Graphs are created by simply instantiating the Graph class
# and giving it a name:
#
# myGraph = new Graph 'My very cool graph'
constructor: (@name) ->
@nodes = []
@edges = []
@initializers = []
# ## Adding a node to the graph
#
# Nodes are identified by an ID unique to the graph. Additionally,
# a node may contain information on what NoFlo component it is and
# possible display coordinates.
#
# For example:
#
# myGraph.addNode 'Read, 'ReadFile',
# x: 91
# y: 154
#
# Addition of a node will emit the `addNode` event.
addNode: (id, component, display) ->
node =
id: id
component: component
display: display
@nodes.push node
@emit 'addNode', node
# ## Removing a node from the graph
#
# Existing nodes can be removed from a graph by their ID. This
# will remove the node and also remove all edges connected to it.
#
# myGraph.removeNode 'Read'
#
# Once the node has been removed, the `removeNode` event will be
# emitted.
removeNode: (id) ->
node =
id: id
for edge in @edges
if edge.from.node is node.id
@removeEdge edge
if edge.to.node is node.id
@removeEdge edge
for initializer in @initializers
if initializer.to.node is node.id
@removeEdge initializer.to.node, initializer.to.port
@emit 'removeNode', node
if @nodes.indexOf node isnt -1
@nodes.splice @nodes.indexOf(node), 1
# ## Getting a node
#
# Nodes objects can be retrieved from the graph by their ID:
#
# myNode = myGraph.getNode 'Read'
getNode: (id) ->
for node in @nodes
return node if node.id is id
# ## Connecting nodes
#
# Nodes can be connected by adding edges between a node's outport
# and another node's inport:
#
# myGraph.addEdge 'Read', 'out', 'Display', 'in'
#
# Adding an edge will emit the `addEdge` event.
addEdge: (outNode, outPort, inNode, inPort) ->
edge =
from:
node: outNode
port: outPort
to:
node: inNode
port: inPort
@edges.push edge
@emit 'addEdge', edge
# ## Disconnected nodes
#
# Connections between nodes can be removed by providing the
# node and port to disconnect. The specified node and port can
# be either the outport or the inport of the connection:
#
# myGraph.removeEdge 'Read', 'out'
#
# or:
#
# myGraph.removeEdge 'Display', 'in'
#
# Removing a connection will emit the `removeEdge` event.
removeEdge: (node, port) ->
for edge,index in @edges
if edge.from.node is node and edge.from.port is port
@emit 'removeEdge', edge
@edges.splice index, 1
if edge.to.node is node and edge.to.port is port
@emit 'removeEdge', edge
@edges.splice index, 1
for edge,index in @initializers
if edge.to.node is node and edge.to.port is port
@emit 'removeEdge', edge
@initializers.splice index, 1
# ## Adding Initial Information Packets
#
# Initial Information Packets (IIPs) can be used for sending data
# to specified node inports without a sending node instance.
#
# IIPs are especially useful for sending configuration information
# to components at NoFlo network start-up time. This could include
# filenames to read, or network ports to listen to.
#
# myGraph.addInitial 'somefile.txt', 'Read', 'source'
#
# Adding an IIP will emit a `addEdge` event.
addInitial: (data, node, port) ->
initializer =
from:
data: data
to:
node: node
port: port
@initializers.push initializer
@emit 'addEdge', initializer
toDOT: ->
cleanID = (id) ->
id.replace /\s*/g, ""
cleanPort = (port) ->
port.replace /\./g, ""
dot = "digraph {\n"
for node in @nodes
dot += " #{cleanID(node.id)} [shape=box]\n"
for initializer, id in @initializers
dot += " data#{id} -> #{cleanID(initializer.to.node)} [label='#{cleanPort(initializer.to.port)}']\n"
for edge in @edges
dot += " #{cleanID(edge.from.node)} -> #{cleanID(edge.to.node)}[label='#{cleanPort(edge.from.port)}']\n"
dot += "}"
return dot
toYUML: ->
yuml = []
for initializer in @initializers
yuml.push "(start)[#{initializer.to.port}]->(#{initializer.to.node})";
for edge in @edges
yuml.push "(#{edge.from.node})[#{edge.from.port}]->(#{edge.to.node})"
yuml.join ","
toJSON: ->
json =
properties:
name: @name
processes: {}
connections: []
for node in @nodes
json.processes[node.id] =
component: node.component
if node.display
json.processes[node.id].display = node.display
for edge in @edges
json.connections.push
src:
process: edge.from.node
port: edge.from.port
tgt:
process: edge.to.node
port: edge.to.port
for initializer in @initializers
json.connections.push
data: initializer.from.data
tgt:
process: initializer.to.node
port: initializer.to.port
json
save: (file, success) ->
json = JSON.stringify @toJSON(), null, 4
fs.writeFile "#{file}.json", json, "utf-8", (err, data) ->
throw err if err
success file
exports.Graph = Graph
exports.createGraph = (name) ->
new Graph name
exports.loadJSON = (definition, success) ->
graph = new Graph definition.properties.name
for id, def of definition.processes
graph.addNode id, def.component, def.display
for conn in definition.connections
if conn.data
graph.addInitial conn.data, conn.tgt.process, conn.tgt.port.toLowerCase()
continue
graph.addEdge conn.src.process, conn.src.port.toLowerCase(), conn.tgt.process, conn.tgt.port.toLowerCase()
success graph
exports.loadFile = (file, success) ->
fs.readFile file, "utf-8", (err, data) ->
throw err if err
if file.split('.').pop() is 'fbp'
return exports.loadFBP data, success
definition = JSON.parse data
exports.loadJSON definition, success
exports.loadFBP = (fbpData, success) ->
parser = new fbp.Fbp
definition = parser.parse fbpData
exports.loadJSON definition, success
|
[
{
"context": "###\n# Author: iTonyYo <ceo@holaever.com> (https://github.com/iTonyYo)\n#",
"end": 21,
"score": 0.9986777305603027,
"start": 14,
"tag": "USERNAME",
"value": "iTonyYo"
},
{
"context": "###\n# Author: iTonyYo <ceo@holaever.com> (https://github.com/iTonyYo)\n# Last Update ... | node_modules/node-find-folder/gulp/watch.coffee | long-grass/mikey | 0 | ###
# Author: iTonyYo <ceo@holaever.com> (https://github.com/iTonyYo)
# Last Update (author): iTonyYo <ceo@holaever.com> (https://github.com/iTonyYo)
###
'use strict'
cfg = require '../config.json'
gulp = require 'gulp'
$ = require('gulp-load-plugins')()
gulp.task 'watch', ->
gulp.watch '*.coffee',
cwd: 'dev'
, ['coffeescript']
return
| 161499 | ###
# Author: iTonyYo <<EMAIL>> (https://github.com/iTonyYo)
# Last Update (author): iTonyYo <<EMAIL>> (https://github.com/iTonyYo)
###
'use strict'
cfg = require '../config.json'
gulp = require 'gulp'
$ = require('gulp-load-plugins')()
gulp.task 'watch', ->
gulp.watch '*.coffee',
cwd: 'dev'
, ['coffeescript']
return
| true | ###
# Author: iTonyYo <PI:EMAIL:<EMAIL>END_PI> (https://github.com/iTonyYo)
# Last Update (author): iTonyYo <PI:EMAIL:<EMAIL>END_PI> (https://github.com/iTonyYo)
###
'use strict'
cfg = require '../config.json'
gulp = require 'gulp'
$ = require('gulp-load-plugins')()
gulp.task 'watch', ->
gulp.watch '*.coffee',
cwd: 'dev'
, ['coffeescript']
return
|
[
{
"context": "ogic =\n\n data: () ->\n bannedLeaders: [\n 'Lord Chompy Bits'\n 'Dismounted McCabe'\n ]\n\n computed:\n\n ",
"end": 73,
"score": 0.9956026673316956,
"start": 57,
"tag": "NAME",
"value": "Lord Chompy Bits"
},
{
"context": " bannedLeaders: [\n 'Lo... | src/vues/Mixin/ModelLogic.coffee | AppSynergy/malifaux-crew-explorer | 0 |
ModelLogic =
data: () ->
bannedLeaders: [
'Lord Chompy Bits'
'Dismounted McCabe'
]
computed:
availableLeaders: () ->
if @faction != null && @encounterSize != null
_.filter @models, (model) =>
_.all [
@hasFaction model
not _.contains @bannedLeaders, model.name
_.intersection(model.attributes, @encounterSize.leaders).length > 0
]
.map (x) => @filterLeaderAttributes x
else []
availableCrew: () ->
if @faction != null && @leader != null
_.filter @models, (model) =>
_.all [
@hasFaction(model) || @hasAttribute(model, 'Mercenary')
not @hasAttribute model, 'Master'
]
else []
methods:
hasFaction: (model, faction = false) ->
key = if faction then faction.key else @faction.key
_.contains model.factions, key
hasAttribute: (model, attribute) ->
_.contains model.attributes, attribute
filterLeaderAttributes: (model) ->
atts = model.attributes
reject = (x) -> x.substring(0,4).match /[WR]a[rv]e/
replace = (x) -> x.replace(/([A-Z])/g, ' $1').trim()
model.attributes = _.reject(model.attributes, reject).map(replace)
return model
export default ModelLogic
| 127147 |
ModelLogic =
data: () ->
bannedLeaders: [
'<NAME>'
'<NAME>'
]
computed:
availableLeaders: () ->
if @faction != null && @encounterSize != null
_.filter @models, (model) =>
_.all [
@hasFaction model
not _.contains @bannedLeaders, model.name
_.intersection(model.attributes, @encounterSize.leaders).length > 0
]
.map (x) => @filterLeaderAttributes x
else []
availableCrew: () ->
if @faction != null && @leader != null
_.filter @models, (model) =>
_.all [
@hasFaction(model) || @hasAttribute(model, 'Mercenary')
not @hasAttribute model, 'Master'
]
else []
methods:
hasFaction: (model, faction = false) ->
key = if faction then faction.key else @faction.key
_.contains model.factions, key
hasAttribute: (model, attribute) ->
_.contains model.attributes, attribute
filterLeaderAttributes: (model) ->
atts = model.attributes
reject = (x) -> x.substring(0,4).match /[WR]a[rv]e/
replace = (x) -> x.replace(/([A-Z])/g, ' $1').trim()
model.attributes = _.reject(model.attributes, reject).map(replace)
return model
export default ModelLogic
| true |
ModelLogic =
data: () ->
bannedLeaders: [
'PI:NAME:<NAME>END_PI'
'PI:NAME:<NAME>END_PI'
]
computed:
availableLeaders: () ->
if @faction != null && @encounterSize != null
_.filter @models, (model) =>
_.all [
@hasFaction model
not _.contains @bannedLeaders, model.name
_.intersection(model.attributes, @encounterSize.leaders).length > 0
]
.map (x) => @filterLeaderAttributes x
else []
availableCrew: () ->
if @faction != null && @leader != null
_.filter @models, (model) =>
_.all [
@hasFaction(model) || @hasAttribute(model, 'Mercenary')
not @hasAttribute model, 'Master'
]
else []
methods:
hasFaction: (model, faction = false) ->
key = if faction then faction.key else @faction.key
_.contains model.factions, key
hasAttribute: (model, attribute) ->
_.contains model.attributes, attribute
filterLeaderAttributes: (model) ->
atts = model.attributes
reject = (x) -> x.substring(0,4).match /[WR]a[rv]e/
replace = (x) -> x.replace(/([A-Z])/g, ' $1').trim()
model.attributes = _.reject(model.attributes, reject).map(replace)
return model
export default ModelLogic
|
[
{
"context": " valid', ->\n beforeEach ->\n @email = 'parachute@failure.io'\n @password = 'happenstance'\n @sut.",
"end": 1123,
"score": 0.9999082684516907,
"start": 1103,
"tag": "EMAIL",
"value": "parachute@failure.io"
},
{
"context": "ail = 'parachute@fail... | test/app/signup-controller-spec.coffee | octoblu/email-password-site | 1 | describe 'SignupController', ->
beforeEach ->
module 'email-password'
inject ($controller, $q, $rootScope, $window) ->
@q = $q
@rootScope = $rootScope
@scope = $rootScope.$new()
@routeParams = {}
@window = location: sinon.stub()
@AuthenticatorService = register: sinon.stub().returns @q.when()
@sut = $controller 'SignupController',
AuthenticatorService: @AuthenticatorService
$routeParams: @routeParams
$scope: @scope
$window: @window
@sut.signupForm =
email: { $setTouched: => }
password: { $setTouched: => }
confirmPassword: { $error: {}, $setTouched: => }
it 'should exist', ->
expect(@sut).to.exist
it 'should have a loginPath function', ->
expect(@sut.loginPath).to.equal '/?callback=https%3A%2F%2Fapp.octoblu.com%2Fapi%2Fsession'
it 'should have a signup function', ->
expect(@sut.signup).to.exist
describe 'when the signup function is called', ->
describe 'when email, password and confirmPassword are valid', ->
beforeEach ->
@email = 'parachute@failure.io'
@password = 'happenstance'
@sut.formIsValid = sinon.stub().returns true
@confirmPassword = 'happenstance'
@sut.signup @email, @password, @confirmPassword
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should call the signup service', ->
expect(@AuthenticatorService.register).to.have.been.calledWith @email, @password, 'https://app.octoblu.com/api/session'
describe 'when AuthenticatorService resolves the promise', ->
beforeEach ->
@email = "faulty@machinery"
@password = "execution"
@uuid = "failing"
@token = "tree"
@sut.formIsValid = sinon.stub().returns true
@AuthenticatorService.register.returns @q.when('http://foo.blarg')
@sut.signup @email, @password, @password
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should return a uuid and token', ->
expect(@AuthenticatorService.register).to.have.been.calledWith @email, @password
it 'should redirect to the callback url with that uuid and token', ->
expect(@window.location).to.deep.equal 'http://foo.blarg'
describe 'when AuthenticatorService rejects the promise', ->
beforeEach ->
@email = "faulty@machinery"
@password = "execution"
@sut.formIsValid = sinon.stub().returns true
@AuthenticatorService.register.returns @q.reject(error: new Error('oh no'))
@sut.signup @email, @password, @password
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should set the errorMessage', ->
expect(@sut.errorMessage).to.deep.equal new Error('oh no')
| 220052 | describe 'SignupController', ->
beforeEach ->
module 'email-password'
inject ($controller, $q, $rootScope, $window) ->
@q = $q
@rootScope = $rootScope
@scope = $rootScope.$new()
@routeParams = {}
@window = location: sinon.stub()
@AuthenticatorService = register: sinon.stub().returns @q.when()
@sut = $controller 'SignupController',
AuthenticatorService: @AuthenticatorService
$routeParams: @routeParams
$scope: @scope
$window: @window
@sut.signupForm =
email: { $setTouched: => }
password: { $setTouched: => }
confirmPassword: { $error: {}, $setTouched: => }
it 'should exist', ->
expect(@sut).to.exist
it 'should have a loginPath function', ->
expect(@sut.loginPath).to.equal '/?callback=https%3A%2F%2Fapp.octoblu.com%2Fapi%2Fsession'
it 'should have a signup function', ->
expect(@sut.signup).to.exist
describe 'when the signup function is called', ->
describe 'when email, password and confirmPassword are valid', ->
beforeEach ->
@email = '<EMAIL>'
@password = '<PASSWORD>'
@sut.formIsValid = sinon.stub().returns true
@confirmPassword = '<PASSWORD>'
@sut.signup @email, @password, @confirmPassword
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should call the signup service', ->
expect(@AuthenticatorService.register).to.have.been.calledWith @email, @password, 'https://app.octoblu.com/api/session'
describe 'when AuthenticatorService resolves the promise', ->
beforeEach ->
@email = "<EMAIL>"
@password = "<PASSWORD>"
@uuid = "failing"
@token = "tree"
@sut.formIsValid = sinon.stub().returns true
@AuthenticatorService.register.returns @q.when('http://foo.blarg')
@sut.signup @email, @password, @password
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should return a uuid and token', ->
expect(@AuthenticatorService.register).to.have.been.calledWith @email, @password
it 'should redirect to the callback url with that uuid and token', ->
expect(@window.location).to.deep.equal 'http://foo.blarg'
describe 'when AuthenticatorService rejects the promise', ->
beforeEach ->
@email = "<EMAIL>"
@password = "<PASSWORD>"
@sut.formIsValid = sinon.stub().returns true
@AuthenticatorService.register.returns @q.reject(error: new Error('oh no'))
@sut.signup @email, @password, @password
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should set the errorMessage', ->
expect(@sut.errorMessage).to.deep.equal new Error('oh no')
| true | describe 'SignupController', ->
beforeEach ->
module 'email-password'
inject ($controller, $q, $rootScope, $window) ->
@q = $q
@rootScope = $rootScope
@scope = $rootScope.$new()
@routeParams = {}
@window = location: sinon.stub()
@AuthenticatorService = register: sinon.stub().returns @q.when()
@sut = $controller 'SignupController',
AuthenticatorService: @AuthenticatorService
$routeParams: @routeParams
$scope: @scope
$window: @window
@sut.signupForm =
email: { $setTouched: => }
password: { $setTouched: => }
confirmPassword: { $error: {}, $setTouched: => }
it 'should exist', ->
expect(@sut).to.exist
it 'should have a loginPath function', ->
expect(@sut.loginPath).to.equal '/?callback=https%3A%2F%2Fapp.octoblu.com%2Fapi%2Fsession'
it 'should have a signup function', ->
expect(@sut.signup).to.exist
describe 'when the signup function is called', ->
describe 'when email, password and confirmPassword are valid', ->
beforeEach ->
@email = 'PI:EMAIL:<EMAIL>END_PI'
@password = 'PI:PASSWORD:<PASSWORD>END_PI'
@sut.formIsValid = sinon.stub().returns true
@confirmPassword = 'PI:PASSWORD:<PASSWORD>END_PI'
@sut.signup @email, @password, @confirmPassword
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should call the signup service', ->
expect(@AuthenticatorService.register).to.have.been.calledWith @email, @password, 'https://app.octoblu.com/api/session'
describe 'when AuthenticatorService resolves the promise', ->
beforeEach ->
@email = "PI:EMAIL:<EMAIL>END_PI"
@password = "PI:PASSWORD:<PASSWORD>END_PI"
@uuid = "failing"
@token = "tree"
@sut.formIsValid = sinon.stub().returns true
@AuthenticatorService.register.returns @q.when('http://foo.blarg')
@sut.signup @email, @password, @password
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should return a uuid and token', ->
expect(@AuthenticatorService.register).to.have.been.calledWith @email, @password
it 'should redirect to the callback url with that uuid and token', ->
expect(@window.location).to.deep.equal 'http://foo.blarg'
describe 'when AuthenticatorService rejects the promise', ->
beforeEach ->
@email = "PI:EMAIL:<EMAIL>END_PI"
@password = "PI:PASSWORD:<PASSWORD>END_PI"
@sut.formIsValid = sinon.stub().returns true
@AuthenticatorService.register.returns @q.reject(error: new Error('oh no'))
@sut.signup @email, @password, @password
@rootScope.$digest()
it 'should call formIsValid', ->
expect(@sut.formIsValid).to.have.been.called
it 'should set the errorMessage', ->
expect(@sut.errorMessage).to.deep.equal new Error('oh no')
|
[
{
"context": "key: 'line-break'\n\npatterns: [\n\n # Matches a trailing + preceded ",
"end": 16,
"score": 0.9951799511909485,
"start": 6,
"tag": "KEY",
"value": "line-break"
}
] | grammars/repositories/partials/line-break-grammar.cson | andrewcarver/atom-language-asciidoc | 45 | key: 'line-break'
patterns: [
# Matches a trailing + preceded by at least one space character
# at the end of a line of non-empty content, which forces a
# hard line break (<br> tag in HTML output).
#
# Examples
#
# Foo +
# Bar
#
match: '(?<=\\S)\\p{Blank}+(\\+)$'
captures:
1: name: 'variable.line-break.asciidoc'
]
| 146655 | key: '<KEY>'
patterns: [
# Matches a trailing + preceded by at least one space character
# at the end of a line of non-empty content, which forces a
# hard line break (<br> tag in HTML output).
#
# Examples
#
# Foo +
# Bar
#
match: '(?<=\\S)\\p{Blank}+(\\+)$'
captures:
1: name: 'variable.line-break.asciidoc'
]
| true | key: 'PI:KEY:<KEY>END_PI'
patterns: [
# Matches a trailing + preceded by at least one space character
# at the end of a line of non-empty content, which forces a
# hard line break (<br> tag in HTML output).
#
# Examples
#
# Foo +
# Bar
#
match: '(?<=\\S)\\p{Blank}+(\\+)$'
captures:
1: name: 'variable.line-break.asciidoc'
]
|
[
{
"context": "号\",\n \"field_6\": \"“导员”3号\",\n \"creator_name\": \"sailxjx\",\n \"created_at\": \"2015-01-06T02:36:44Z\",\n \"",
"end": 394,
"score": 0.9990318417549133,
"start": 387,
"tag": "USERNAME",
"value": "sailxjx"
},
{
"context": "\": \"2015-01-06T02:36:44Z\",\n ... | test/services/jinshuju.coffee | jianliaoim/talk-services | 40 | should = require 'should'
loader = require '../../src/loader'
{req} = require '../util'
$jinshuju = loader.load 'jinshuju'
payload = {
"form": "PXmEGx",
"entry": {
"serial_number": 2,
"field_1": "“学霸”6号",
"field_2": [
"“小姐”2号",
"“导员”3号",
"“阿呆“5号"
],
"field_4": " “超人”1号",
"field_5": " “小姐”2号",
"field_6": "“导员”3号",
"creator_name": "sailxjx",
"created_at": "2015-01-06T02:36:44Z",
"updated_at": "2015-01-06T02:36:44Z",
"info_remote_ip": "101.231.114.44"
}
}
describe 'Jinshuju#Webhook', ->
req.integration = _id: 1
it 'should create new message when receive jinshuju webhook', (done) ->
req.body = payload
$jinshuju.then (jinshuju) -> jinshuju.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql "sailxjx 添加了新的数据"
message.attachments[0].data.redirectUrl.should.eql "https://jinshuju.net/forms/PXmEGx/entries"
.nodeify done
| 67711 | should = require 'should'
loader = require '../../src/loader'
{req} = require '../util'
$jinshuju = loader.load 'jinshuju'
payload = {
"form": "PXmEGx",
"entry": {
"serial_number": 2,
"field_1": "“学霸”6号",
"field_2": [
"“小姐”2号",
"“导员”3号",
"“阿呆“5号"
],
"field_4": " “超人”1号",
"field_5": " “小姐”2号",
"field_6": "“导员”3号",
"creator_name": "sailxjx",
"created_at": "2015-01-06T02:36:44Z",
"updated_at": "2015-01-06T02:36:44Z",
"info_remote_ip": "172.16.58.3"
}
}
describe 'Jinshuju#Webhook', ->
req.integration = _id: 1
it 'should create new message when receive jinshuju webhook', (done) ->
req.body = payload
$jinshuju.then (jinshuju) -> jinshuju.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql "sailxjx 添加了新的数据"
message.attachments[0].data.redirectUrl.should.eql "https://jinshuju.net/forms/PXmEGx/entries"
.nodeify done
| true | should = require 'should'
loader = require '../../src/loader'
{req} = require '../util'
$jinshuju = loader.load 'jinshuju'
payload = {
"form": "PXmEGx",
"entry": {
"serial_number": 2,
"field_1": "“学霸”6号",
"field_2": [
"“小姐”2号",
"“导员”3号",
"“阿呆“5号"
],
"field_4": " “超人”1号",
"field_5": " “小姐”2号",
"field_6": "“导员”3号",
"creator_name": "sailxjx",
"created_at": "2015-01-06T02:36:44Z",
"updated_at": "2015-01-06T02:36:44Z",
"info_remote_ip": "PI:IP_ADDRESS:172.16.58.3END_PI"
}
}
describe 'Jinshuju#Webhook', ->
req.integration = _id: 1
it 'should create new message when receive jinshuju webhook', (done) ->
req.body = payload
$jinshuju.then (jinshuju) -> jinshuju.receiveEvent 'service.webhook', req
.then (message) ->
message.attachments[0].data.text.should.eql "sailxjx 添加了新的数据"
message.attachments[0].data.redirectUrl.should.eql "https://jinshuju.net/forms/PXmEGx/entries"
.nodeify done
|
[
{
"context": "\n {\n content:\n $like: \"Dummy\"\n },\n {\n featured:\n ",
"end": 9092,
"score": 0.9890199899673462,
"start": 9087,
"tag": "NAME",
"value": "Dummy"
},
{
"context": ", false\n\n\n it \"$any and $all\", ->\n a ... | test/suite.coffee | davidgtonge/query-predicate | 4 | require "coffee-script"
assert = require "assert"
_ = require "underscore"
_collection = [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript", score: 0}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript", score: 5}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP", score: -1, total:NaN}
]
create = -> _.clone(_collection)
module.exports = (_query) ->
it "Equals query - single result", ->
a = create()
result = _query a, title:"Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "Equals query - 2 results", ->
a = create()
result = _query a, colors: "blue"
assert.equal result.length, 2
# it "Equals query with array value", ->
# a = create()
# result = _query a, colors: ["red", "blue"]
# assert.equal result.length, 1
it "Simple equals query (no results)", ->
a = create()
result = _query a, title:"Homes"
assert.equal result.length, 0
it "equal null doesn't match 0", ->
a = create()
result = _query a, score:null
assert.equal result.length, 0
it "equal NaN matches NaNs", ->
a = create()
result = _query a, total:NaN
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "Simple equals query with explicit $equal", ->
a = create()
result = _query a, title: {$equal: "About"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$contains operator", ->
a = create()
result = _query a, colors: {$contains: "blue"}
assert.equal result.length, 2
it "$ne operator", ->
a = create()
result = _query a, title: {$ne: "Home"}
assert.equal result.length, 2
it "$lt operator", ->
a = create()
result = _query a, likes: {$lt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$lt operator", ->
a = create()
assert.throws ->
result = _query a, score: {$lt: null}
it "$lte operator", ->
a = create()
result = _query a, likes: {$lte: 12}
assert.equal result.length, 2
it "$lte operator", ->
a = create()
assert.throws ->
result = _query a, score: {$lte: null}
it "$gt operator", ->
a = create()
result = _query a, likes: {$gt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$gt null", ->
a = create()
assert.throws ->
result = _query a, likes: {$gt: null}
it "$gte operator", ->
a = create()
result = _query a, likes: {$gte: 12}
assert.equal result.length, 2
it "$gte null", ->
a = create()
assert.throws ->
result = _query a, likes: {$gte: null}
it "$between operator", ->
a = create()
result = _query a, likes: {$between: [1,5]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$between operator is exclusive", ->
a = create()
result = _query a, likes: {$between: [1,2]}
assert.equal result.length, 0
it "$between operator with null", ->
a = create()
assert.throws ->
result = _query a, likes: {$between: [null, 5]}
it "$betweene operator is inclusive", ->
a = create()
result = _query a, likes: {$betweene: [1,2]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$betweene operator with null", ->
a = create()
assert.throws ->
result = _query a, likes: {$betweene: [null, 10]}
it "$mod operator", ->
a = create()
result = _query a, likes: {$mod: [3,0]}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$mod operator with null", ->
a = create()
result = _query a, likes: {$mod: [null, 5]}
assert.equal result.length, 0
result = _query a, likes: {$mod: [3, null]}
assert.equal result.length, 0
it "$in operator", ->
a = create()
result = _query a, title: {$in: ["Home","About"]}
assert.equal result.length, 2
it "$in operator with wrong query value", ->
a = create()
assert.throws ->
_query a, title: {$in: "Home"}
it "$nin operator", ->
a = create()
result = _query a, title: {$nin: ["Home","About"]}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$all operator", ->
a = create()
result = _query a, colors: {$all: ["red","blue"]}
assert.equal result.length, 2
it "$all operator (wrong values)", ->
a = create()
result = _query a, title: {$all: ["red","blue"]}
assert.equal result.length, 0
assert.throws ->
_query a, colors: {$all: "red"}
it "$any operator", ->
a = create()
result = _query a, colors: {$any: ["red","blue"]}
assert.equal result.length, 3
result = _query a, colors: {$any: ["yellow","blue"]}
assert.equal result.length, 2
it "$size operator", ->
a = create()
result = _query a, colors: {$size: 3}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$exists operator", ->
a = create()
result = _query a, featured: {$exists: true}
assert.equal result.length, 2
it "$has operator", ->
a = create()
result = _query a, featured: {$exists: false}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$like operator", ->
a = create()
result = _query a, content: {$like: "javascript"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$like operator 2", ->
a = create()
result = _query a, content: {$like: "content"}
assert.equal result.length, 3
it "$likeI operator", ->
a = create()
result = _query a, content: {$likeI: "dummy"}
assert.equal result.length, 3
result = _query a, content: {$like: "dummy"}
assert.equal result.length, 1
it "$startsWith operator", ->
a = create()
result = _query a, title: {$startsWith: "Ho"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$endsWith operator", ->
a = create()
result = _query a, title: {$endsWith: "me"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$regex", ->
a = create()
result = _query a, content: {$regex: /javascript/gi}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$regex2", ->
a = create()
result = _query a, content: {$regex: /dummy/}
assert.equal result.length, 1
it "$regex3", ->
a = create()
result = _query a, content: {$regex: /dummy/i}
assert.equal result.length, 3
it "$regex4", ->
a = create()
result = _query a, content: /javascript/i
assert.equal result.length, 1
it "$regex with object", ->
a = create()
result = _query a, content: {$regex: 'dummy'}
assert.equal result.length, 1
it "$regex with object+options", ->
a = create()
result = _query a, content: {$regex: 'dummy', $options: 'i'}
assert.equal result.length, 3
it "Dynamic equals query", ->
a = create()
result = _query a, title:()->"Homes"
assert.equal result.length, 0
result = _query a, title:()->"Home"
assert.equal result.length, 1
it "ensure dynamic query not cached", ->
a = create()
count = 12 - a.length
queryObj = likes: $lt: -> count += 1
result = _query(a, queryObj)
assert.equal (result).length, 1
result = _query(a, queryObj)
assert.equal (result).length, 2
it "$and operator", ->
a = create()
result = _query a, likes: {$gt: 5}, colors: {$contains: "yellow"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$and operator (explicit)", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}, colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$or operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
it "$or2 operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {featured: true}]
assert.equal result.length, 3
it "$or with multiple params in a condition", ->
dataset = [{x: 1, y: 2}, {x: 1.25, y: 3}, {x: 1.5, y: 3}, {x: 2, y: 4}]
result = _query(dataset, {
$or: [
{
x: { $gt: 1 },
y: { $lt: 4 }
}, {
foo: 1
}
]
})
assert.equal result.length, 2
it "$nor operator", ->
a = create()
result = _query a, $nor: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "Compound Queries", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}}], $or: [{content: {$like: "PHP"}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
result = _query a,
$and: [
likes: $lt: 15
]
$or: [
{
content:
$like: "Dummy"
},
{
featured:
$exists:true
}
]
$not:
colors: $contains: "yellow"
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
#These tests fail, but would pass if it $not worked parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
it "$not operator", ->
a = create()
result = _query a, likes: {$not: 12}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$equal: 12}}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$ne: 12}}
assert.equal result.length, 1
it "$elemMatch", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
b = [
{foo: [
{shape: "square", color: "purple", thick: false}
{shape: "circle", color: "red", thick: true}
]}
{foo: [
{shape: "square", color: "red", thick: true}
{shape: "circle", color: "purple", thick: false}
]}
]
text_search = {$likeI: "love"}
result = _query a, $or: [
{
comments:
$elemMatch:
text: text_search
},
{title: text_search}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /post/
]
assert.equal result.length, 1
result = _query a, $or: [
{
comments:
$elemMatch:
text: /post/
},
{title: /about/i}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /really/
]
assert.equal result.length, 1
result = _query b,
foo:
$elemMatch:
shape:"square"
color:"purple"
assert.equal result.length, 1
assert.equal result[0].foo[0].shape, "square"
assert.equal result[0].foo[0].color, "purple"
assert.equal result[0].foo[0].thick, false
it "$any and $all", ->
a = name: "test", tags1: ["red","yellow"], tags2: ["orange", "green", "red", "blue"]
b = name: "test1", tags1: ["purple","blue"], tags2: ["orange", "red", "blue"]
c = name: "test2", tags1: ["black","yellow"], tags2: ["green", "orange", "blue"]
d = name: "test3", tags1: ["red","yellow","blue"], tags2: ["green"]
e = [a,b,c,d]
result = _query e,
tags1: $any: ["red","purple"] # should match a, b, d
tags2: $all: ["orange","green"] # should match a, c
assert.equal result.length, 1
assert.equal result[0].name, "test"
it "$elemMatch - compound queries", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
result = _query a,
comments:
$elemMatch:
$not:
text:/page/
assert.equal result.length, 1
# Test from RobW - https://github.com/Rob--W
it "Explicit $and combined with matching $or must return the correct number of items", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [ # Matches both items
{equ: 'ok'}, # Matches both items
$or:
same: 'ok'
]
assert.equal result.length, 2
# Test from RobW - https://github.com/Rob--W
it "Implicit $and consisting of non-matching subquery and $or must return empty list", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [{equ: 'bogus'}] # Matches nothing
$or: [
same: 'ok' # Matches all items, but due to implicit $and, this subquery should not affect the result
]
assert.equal result.length, 0
it "Testing nested compound operators", ->
a = create()
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:12} # Matches 1
]
]
# And only matches 1
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 1
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:20} # Matches 3
]
]
# And only matches 2
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 2
it "works with queries supplied as arrays", ->
a = create()
result = _query a,
$or: [
{title:"Home"}
{title:"About"}
]
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
it "works with dot notation", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
result = _query collection, {"stats.likes":5}
assert.equal result.length, 1
assert.equal result[0].title, "About"
result = _query collection, {"stats.views.a.b":796}
assert.equal result.length, 1
assert.equal result[0].title, "Code"
it "Handles multiple inequalities", ->
a = create()
result = _query a, likes: { $gt: 2, $lt: 20 }
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, likes: { $gte: 2, $lt: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
result = _query a, likes: { $gt: 2, $lte: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20 }
assert.equal result.length, 3
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
assert.equal result[2].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20, $ne: 12 }
assert.equal result.length, 2
assert.equal result[0].title, "About"
assert.equal result[1].title, "Contact"
it "Handles nested multiple inequalities", ->
a = create()
result = _query a, $and: [likes: { $gt: 2, $lt: 20 }]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: { $gt: 2, $lt: 20}}
assert.equal result.length, 1
it "$not combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: {$not: { $gt: 2, $lt: 20}}}
assert.equal result.length, 2
it "$nor combination of $gt and $lt - expressions ", ->
a = create()
result = _query a, {$nor: [{likes: { $gt: 2}}, {likes: { $lt: 20}}]}
assert.equal result.length, 0
it "compound $ands (underscore-query #29)", ->
a = create()
res = _query(a, {
$and: [{
$and: [ likes: {$gt: 5 } ]
}]
})
assert.equal(res.length, 2)
# This query is not a valid MongoDB query, but if it were one would expect it to yield an empty set
# it "$nor combination of $gt and $lt - values", ->
# a = create()
# result = _query a, {likes: {$nor: [{ $gt: 2}, {$lt: 20}]}}
# assert.equal result.length, 0
# I wont support this query - I don't like the ambiguity of $not being an operator and a compound
# it "combination of $gt and $not", ->
# a = create()
# result = _query a, {likes: { $not: 2, $lt: 20}}
# assert.equal result.length, 1
# I don't really like the ambiguity of this query. It can be achieved through a $contains I think
# it.only "equal within an array (#21)", ->
# tweets = [{
# "entities": {
# "user_mentions": [{
# "id_str": "10228271"
# }]
# }
# }, {
# "entities": {
# "user_mentions": [{
# "id_str": "10228272"
# }]
# }
# }]
#
# res = _query tweets, {"entities.user_mentions.id_str": "10228272"}
# assert.equal(res.length, 1)
# res = _query tweets, {"entities.user_mentions.id_str": "10228273"}
# assert.equal(res.length, 0)
| 92432 | require "coffee-script"
assert = require "assert"
_ = require "underscore"
_collection = [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript", score: 0}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript", score: 5}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP", score: -1, total:NaN}
]
create = -> _.clone(_collection)
module.exports = (_query) ->
it "Equals query - single result", ->
a = create()
result = _query a, title:"Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "Equals query - 2 results", ->
a = create()
result = _query a, colors: "blue"
assert.equal result.length, 2
# it "Equals query with array value", ->
# a = create()
# result = _query a, colors: ["red", "blue"]
# assert.equal result.length, 1
it "Simple equals query (no results)", ->
a = create()
result = _query a, title:"Homes"
assert.equal result.length, 0
it "equal null doesn't match 0", ->
a = create()
result = _query a, score:null
assert.equal result.length, 0
it "equal NaN matches NaNs", ->
a = create()
result = _query a, total:NaN
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "Simple equals query with explicit $equal", ->
a = create()
result = _query a, title: {$equal: "About"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$contains operator", ->
a = create()
result = _query a, colors: {$contains: "blue"}
assert.equal result.length, 2
it "$ne operator", ->
a = create()
result = _query a, title: {$ne: "Home"}
assert.equal result.length, 2
it "$lt operator", ->
a = create()
result = _query a, likes: {$lt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$lt operator", ->
a = create()
assert.throws ->
result = _query a, score: {$lt: null}
it "$lte operator", ->
a = create()
result = _query a, likes: {$lte: 12}
assert.equal result.length, 2
it "$lte operator", ->
a = create()
assert.throws ->
result = _query a, score: {$lte: null}
it "$gt operator", ->
a = create()
result = _query a, likes: {$gt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$gt null", ->
a = create()
assert.throws ->
result = _query a, likes: {$gt: null}
it "$gte operator", ->
a = create()
result = _query a, likes: {$gte: 12}
assert.equal result.length, 2
it "$gte null", ->
a = create()
assert.throws ->
result = _query a, likes: {$gte: null}
it "$between operator", ->
a = create()
result = _query a, likes: {$between: [1,5]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$between operator is exclusive", ->
a = create()
result = _query a, likes: {$between: [1,2]}
assert.equal result.length, 0
it "$between operator with null", ->
a = create()
assert.throws ->
result = _query a, likes: {$between: [null, 5]}
it "$betweene operator is inclusive", ->
a = create()
result = _query a, likes: {$betweene: [1,2]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$betweene operator with null", ->
a = create()
assert.throws ->
result = _query a, likes: {$betweene: [null, 10]}
it "$mod operator", ->
a = create()
result = _query a, likes: {$mod: [3,0]}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$mod operator with null", ->
a = create()
result = _query a, likes: {$mod: [null, 5]}
assert.equal result.length, 0
result = _query a, likes: {$mod: [3, null]}
assert.equal result.length, 0
it "$in operator", ->
a = create()
result = _query a, title: {$in: ["Home","About"]}
assert.equal result.length, 2
it "$in operator with wrong query value", ->
a = create()
assert.throws ->
_query a, title: {$in: "Home"}
it "$nin operator", ->
a = create()
result = _query a, title: {$nin: ["Home","About"]}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$all operator", ->
a = create()
result = _query a, colors: {$all: ["red","blue"]}
assert.equal result.length, 2
it "$all operator (wrong values)", ->
a = create()
result = _query a, title: {$all: ["red","blue"]}
assert.equal result.length, 0
assert.throws ->
_query a, colors: {$all: "red"}
it "$any operator", ->
a = create()
result = _query a, colors: {$any: ["red","blue"]}
assert.equal result.length, 3
result = _query a, colors: {$any: ["yellow","blue"]}
assert.equal result.length, 2
it "$size operator", ->
a = create()
result = _query a, colors: {$size: 3}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$exists operator", ->
a = create()
result = _query a, featured: {$exists: true}
assert.equal result.length, 2
it "$has operator", ->
a = create()
result = _query a, featured: {$exists: false}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$like operator", ->
a = create()
result = _query a, content: {$like: "javascript"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$like operator 2", ->
a = create()
result = _query a, content: {$like: "content"}
assert.equal result.length, 3
it "$likeI operator", ->
a = create()
result = _query a, content: {$likeI: "dummy"}
assert.equal result.length, 3
result = _query a, content: {$like: "dummy"}
assert.equal result.length, 1
it "$startsWith operator", ->
a = create()
result = _query a, title: {$startsWith: "Ho"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$endsWith operator", ->
a = create()
result = _query a, title: {$endsWith: "me"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$regex", ->
a = create()
result = _query a, content: {$regex: /javascript/gi}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$regex2", ->
a = create()
result = _query a, content: {$regex: /dummy/}
assert.equal result.length, 1
it "$regex3", ->
a = create()
result = _query a, content: {$regex: /dummy/i}
assert.equal result.length, 3
it "$regex4", ->
a = create()
result = _query a, content: /javascript/i
assert.equal result.length, 1
it "$regex with object", ->
a = create()
result = _query a, content: {$regex: 'dummy'}
assert.equal result.length, 1
it "$regex with object+options", ->
a = create()
result = _query a, content: {$regex: 'dummy', $options: 'i'}
assert.equal result.length, 3
it "Dynamic equals query", ->
a = create()
result = _query a, title:()->"Homes"
assert.equal result.length, 0
result = _query a, title:()->"Home"
assert.equal result.length, 1
it "ensure dynamic query not cached", ->
a = create()
count = 12 - a.length
queryObj = likes: $lt: -> count += 1
result = _query(a, queryObj)
assert.equal (result).length, 1
result = _query(a, queryObj)
assert.equal (result).length, 2
it "$and operator", ->
a = create()
result = _query a, likes: {$gt: 5}, colors: {$contains: "yellow"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$and operator (explicit)", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}, colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$or operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
it "$or2 operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {featured: true}]
assert.equal result.length, 3
it "$or with multiple params in a condition", ->
dataset = [{x: 1, y: 2}, {x: 1.25, y: 3}, {x: 1.5, y: 3}, {x: 2, y: 4}]
result = _query(dataset, {
$or: [
{
x: { $gt: 1 },
y: { $lt: 4 }
}, {
foo: 1
}
]
})
assert.equal result.length, 2
it "$nor operator", ->
a = create()
result = _query a, $nor: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "Compound Queries", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}}], $or: [{content: {$like: "PHP"}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
result = _query a,
$and: [
likes: $lt: 15
]
$or: [
{
content:
$like: "<NAME>"
},
{
featured:
$exists:true
}
]
$not:
colors: $contains: "yellow"
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
#These tests fail, but would pass if it $not worked parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
it "$not operator", ->
a = create()
result = _query a, likes: {$not: 12}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$equal: 12}}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$ne: 12}}
assert.equal result.length, 1
it "$elemMatch", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
b = [
{foo: [
{shape: "square", color: "purple", thick: false}
{shape: "circle", color: "red", thick: true}
]}
{foo: [
{shape: "square", color: "red", thick: true}
{shape: "circle", color: "purple", thick: false}
]}
]
text_search = {$likeI: "love"}
result = _query a, $or: [
{
comments:
$elemMatch:
text: text_search
},
{title: text_search}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /post/
]
assert.equal result.length, 1
result = _query a, $or: [
{
comments:
$elemMatch:
text: /post/
},
{title: /about/i}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /really/
]
assert.equal result.length, 1
result = _query b,
foo:
$elemMatch:
shape:"square"
color:"purple"
assert.equal result.length, 1
assert.equal result[0].foo[0].shape, "square"
assert.equal result[0].foo[0].color, "purple"
assert.equal result[0].foo[0].thick, false
it "$any and $all", ->
a = name: "<NAME>", tags1: ["red","yellow"], tags2: ["orange", "green", "red", "blue"]
b = name: "<NAME>", tags1: ["purple","blue"], tags2: ["orange", "red", "blue"]
c = name: "<NAME>", tags1: ["black","yellow"], tags2: ["green", "orange", "blue"]
d = name: "test3", tags1: ["red","yellow","blue"], tags2: ["green"]
e = [a,b,c,d]
result = _query e,
tags1: $any: ["red","purple"] # should match a, b, d
tags2: $all: ["orange","green"] # should match a, c
assert.equal result.length, 1
assert.equal result[0].name, "test"
it "$elemMatch - compound queries", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
result = _query a,
comments:
$elemMatch:
$not:
text:/page/
assert.equal result.length, 1
# Test from RobW - https://github.com/Rob--W
it "Explicit $and combined with matching $or must return the correct number of items", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [ # Matches both items
{equ: 'ok'}, # Matches both items
$or:
same: 'ok'
]
assert.equal result.length, 2
# Test from RobW - https://github.com/Rob--W
it "Implicit $and consisting of non-matching subquery and $or must return empty list", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [{equ: 'bogus'}] # Matches nothing
$or: [
same: 'ok' # Matches all items, but due to implicit $and, this subquery should not affect the result
]
assert.equal result.length, 0
it "Testing nested compound operators", ->
a = create()
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:12} # Matches 1
]
]
# And only matches 1
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 1
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:20} # Matches 3
]
]
# And only matches 2
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 2
it "works with queries supplied as arrays", ->
a = create()
result = _query a,
$or: [
{title:"Home"}
{title:"About"}
]
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
it "works with dot notation", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
result = _query collection, {"stats.likes":5}
assert.equal result.length, 1
assert.equal result[0].title, "About"
result = _query collection, {"stats.views.a.b":796}
assert.equal result.length, 1
assert.equal result[0].title, "Code"
it "Handles multiple inequalities", ->
a = create()
result = _query a, likes: { $gt: 2, $lt: 20 }
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, likes: { $gte: 2, $lt: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
result = _query a, likes: { $gt: 2, $lte: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20 }
assert.equal result.length, 3
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
assert.equal result[2].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20, $ne: 12 }
assert.equal result.length, 2
assert.equal result[0].title, "About"
assert.equal result[1].title, "Contact"
it "Handles nested multiple inequalities", ->
a = create()
result = _query a, $and: [likes: { $gt: 2, $lt: 20 }]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: { $gt: 2, $lt: 20}}
assert.equal result.length, 1
it "$not combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: {$not: { $gt: 2, $lt: 20}}}
assert.equal result.length, 2
it "$nor combination of $gt and $lt - expressions ", ->
a = create()
result = _query a, {$nor: [{likes: { $gt: 2}}, {likes: { $lt: 20}}]}
assert.equal result.length, 0
it "compound $ands (underscore-query #29)", ->
a = create()
res = _query(a, {
$and: [{
$and: [ likes: {$gt: 5 } ]
}]
})
assert.equal(res.length, 2)
# This query is not a valid MongoDB query, but if it were one would expect it to yield an empty set
# it "$nor combination of $gt and $lt - values", ->
# a = create()
# result = _query a, {likes: {$nor: [{ $gt: 2}, {$lt: 20}]}}
# assert.equal result.length, 0
# I wont support this query - I don't like the ambiguity of $not being an operator and a compound
# it "combination of $gt and $not", ->
# a = create()
# result = _query a, {likes: { $not: 2, $lt: 20}}
# assert.equal result.length, 1
# I don't really like the ambiguity of this query. It can be achieved through a $contains I think
# it.only "equal within an array (#21)", ->
# tweets = [{
# "entities": {
# "user_mentions": [{
# "id_str": "10228271"
# }]
# }
# }, {
# "entities": {
# "user_mentions": [{
# "id_str": "10228272"
# }]
# }
# }]
#
# res = _query tweets, {"entities.user_mentions.id_str": "10228272"}
# assert.equal(res.length, 1)
# res = _query tweets, {"entities.user_mentions.id_str": "10228273"}
# assert.equal(res.length, 0)
| true | require "coffee-script"
assert = require "assert"
_ = require "underscore"
_collection = [
{title:"Home", colors:["red","yellow","blue"], likes:12, featured:true, content: "Dummy content about coffeescript", score: 0}
{title:"About", colors:["red"], likes:2, featured:true, content: "dummy content about javascript", score: 5}
{title:"Contact", colors:["red","blue"], likes:20, content: "Dummy content about PHP", score: -1, total:NaN}
]
create = -> _.clone(_collection)
module.exports = (_query) ->
it "Equals query - single result", ->
a = create()
result = _query a, title:"Home"
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "Equals query - 2 results", ->
a = create()
result = _query a, colors: "blue"
assert.equal result.length, 2
# it "Equals query with array value", ->
# a = create()
# result = _query a, colors: ["red", "blue"]
# assert.equal result.length, 1
it "Simple equals query (no results)", ->
a = create()
result = _query a, title:"Homes"
assert.equal result.length, 0
it "equal null doesn't match 0", ->
a = create()
result = _query a, score:null
assert.equal result.length, 0
it "equal NaN matches NaNs", ->
a = create()
result = _query a, total:NaN
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "Simple equals query with explicit $equal", ->
a = create()
result = _query a, title: {$equal: "About"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$contains operator", ->
a = create()
result = _query a, colors: {$contains: "blue"}
assert.equal result.length, 2
it "$ne operator", ->
a = create()
result = _query a, title: {$ne: "Home"}
assert.equal result.length, 2
it "$lt operator", ->
a = create()
result = _query a, likes: {$lt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$lt operator", ->
a = create()
assert.throws ->
result = _query a, score: {$lt: null}
it "$lte operator", ->
a = create()
result = _query a, likes: {$lte: 12}
assert.equal result.length, 2
it "$lte operator", ->
a = create()
assert.throws ->
result = _query a, score: {$lte: null}
it "$gt operator", ->
a = create()
result = _query a, likes: {$gt: 12}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$gt null", ->
a = create()
assert.throws ->
result = _query a, likes: {$gt: null}
it "$gte operator", ->
a = create()
result = _query a, likes: {$gte: 12}
assert.equal result.length, 2
it "$gte null", ->
a = create()
assert.throws ->
result = _query a, likes: {$gte: null}
it "$between operator", ->
a = create()
result = _query a, likes: {$between: [1,5]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$between operator is exclusive", ->
a = create()
result = _query a, likes: {$between: [1,2]}
assert.equal result.length, 0
it "$between operator with null", ->
a = create()
assert.throws ->
result = _query a, likes: {$between: [null, 5]}
it "$betweene operator is inclusive", ->
a = create()
result = _query a, likes: {$betweene: [1,2]}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$betweene operator with null", ->
a = create()
assert.throws ->
result = _query a, likes: {$betweene: [null, 10]}
it "$mod operator", ->
a = create()
result = _query a, likes: {$mod: [3,0]}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$mod operator with null", ->
a = create()
result = _query a, likes: {$mod: [null, 5]}
assert.equal result.length, 0
result = _query a, likes: {$mod: [3, null]}
assert.equal result.length, 0
it "$in operator", ->
a = create()
result = _query a, title: {$in: ["Home","About"]}
assert.equal result.length, 2
it "$in operator with wrong query value", ->
a = create()
assert.throws ->
_query a, title: {$in: "Home"}
it "$nin operator", ->
a = create()
result = _query a, title: {$nin: ["Home","About"]}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$all operator", ->
a = create()
result = _query a, colors: {$all: ["red","blue"]}
assert.equal result.length, 2
it "$all operator (wrong values)", ->
a = create()
result = _query a, title: {$all: ["red","blue"]}
assert.equal result.length, 0
assert.throws ->
_query a, colors: {$all: "red"}
it "$any operator", ->
a = create()
result = _query a, colors: {$any: ["red","blue"]}
assert.equal result.length, 3
result = _query a, colors: {$any: ["yellow","blue"]}
assert.equal result.length, 2
it "$size operator", ->
a = create()
result = _query a, colors: {$size: 3}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$exists operator", ->
a = create()
result = _query a, featured: {$exists: true}
assert.equal result.length, 2
it "$has operator", ->
a = create()
result = _query a, featured: {$exists: false}
assert.equal result.length, 1
assert.equal result[0].title, "Contact"
it "$like operator", ->
a = create()
result = _query a, content: {$like: "javascript"}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$like operator 2", ->
a = create()
result = _query a, content: {$like: "content"}
assert.equal result.length, 3
it "$likeI operator", ->
a = create()
result = _query a, content: {$likeI: "dummy"}
assert.equal result.length, 3
result = _query a, content: {$like: "dummy"}
assert.equal result.length, 1
it "$startsWith operator", ->
a = create()
result = _query a, title: {$startsWith: "Ho"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$endsWith operator", ->
a = create()
result = _query a, title: {$endsWith: "me"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$regex", ->
a = create()
result = _query a, content: {$regex: /javascript/gi}
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$regex2", ->
a = create()
result = _query a, content: {$regex: /dummy/}
assert.equal result.length, 1
it "$regex3", ->
a = create()
result = _query a, content: {$regex: /dummy/i}
assert.equal result.length, 3
it "$regex4", ->
a = create()
result = _query a, content: /javascript/i
assert.equal result.length, 1
it "$regex with object", ->
a = create()
result = _query a, content: {$regex: 'dummy'}
assert.equal result.length, 1
it "$regex with object+options", ->
a = create()
result = _query a, content: {$regex: 'dummy', $options: 'i'}
assert.equal result.length, 3
it "Dynamic equals query", ->
a = create()
result = _query a, title:()->"Homes"
assert.equal result.length, 0
result = _query a, title:()->"Home"
assert.equal result.length, 1
it "ensure dynamic query not cached", ->
a = create()
count = 12 - a.length
queryObj = likes: $lt: -> count += 1
result = _query(a, queryObj)
assert.equal (result).length, 1
result = _query(a, queryObj)
assert.equal (result).length, 2
it "$and operator", ->
a = create()
result = _query a, likes: {$gt: 5}, colors: {$contains: "yellow"}
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$and operator (explicit)", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}, colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "$or operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
it "$or2 operator", ->
a = create()
result = _query a, $or: [{likes: {$gt: 5}}, {featured: true}]
assert.equal result.length, 3
it "$or with multiple params in a condition", ->
dataset = [{x: 1, y: 2}, {x: 1.25, y: 3}, {x: 1.5, y: 3}, {x: 2, y: 4}]
result = _query(dataset, {
$or: [
{
x: { $gt: 1 },
y: { $lt: 4 }
}, {
foo: 1
}
]
})
assert.equal result.length, 2
it "$nor operator", ->
a = create()
result = _query a, $nor: [{likes: {$gt: 5}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "Compound Queries", ->
a = create()
result = _query a, $and: [{likes: {$gt: 5}}], $or: [{content: {$like: "PHP"}}, {colors: {$contains: "yellow"}}]
assert.equal result.length, 2
result = _query a,
$and: [
likes: $lt: 15
]
$or: [
{
content:
$like: "PI:NAME:<NAME>END_PI"
},
{
featured:
$exists:true
}
]
$not:
colors: $contains: "yellow"
assert.equal result.length, 1
assert.equal result[0].title, "About"
it "$not operator", ->
a = create()
result = _query a, {$not: {likes: {$lt: 12}}}
assert.equal result.length, 2
#These tests fail, but would pass if it $not worked parallel to MongoDB
it "$not operator", ->
a = create()
result = _query a, {likes: {$not: {$lt: 12}}}
assert.equal result.length, 2
it "$not operator", ->
a = create()
result = _query a, likes: {$not: 12}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$equal: 12}}
assert.equal result.length, 2
it "$not $equal operator", ->
a = create()
result = _query a, likes: {$not: {$ne: 12}}
assert.equal result.length, 1
it "$elemMatch", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
b = [
{foo: [
{shape: "square", color: "purple", thick: false}
{shape: "circle", color: "red", thick: true}
]}
{foo: [
{shape: "square", color: "red", thick: true}
{shape: "circle", color: "purple", thick: false}
]}
]
text_search = {$likeI: "love"}
result = _query a, $or: [
{
comments:
$elemMatch:
text: text_search
},
{title: text_search}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /post/
]
assert.equal result.length, 1
result = _query a, $or: [
{
comments:
$elemMatch:
text: /post/
},
{title: /about/i}
]
assert.equal result.length, 2
result = _query a, $or: [
comments:
$elemMatch:
text: /really/
]
assert.equal result.length, 1
result = _query b,
foo:
$elemMatch:
shape:"square"
color:"purple"
assert.equal result.length, 1
assert.equal result[0].foo[0].shape, "square"
assert.equal result[0].foo[0].color, "purple"
assert.equal result[0].foo[0].thick, false
it "$any and $all", ->
a = name: "PI:NAME:<NAME>END_PI", tags1: ["red","yellow"], tags2: ["orange", "green", "red", "blue"]
b = name: "PI:NAME:<NAME>END_PI", tags1: ["purple","blue"], tags2: ["orange", "red", "blue"]
c = name: "PI:NAME:<NAME>END_PI", tags1: ["black","yellow"], tags2: ["green", "orange", "blue"]
d = name: "test3", tags1: ["red","yellow","blue"], tags2: ["green"]
e = [a,b,c,d]
result = _query e,
tags1: $any: ["red","purple"] # should match a, b, d
tags2: $all: ["orange","green"] # should match a, c
assert.equal result.length, 1
assert.equal result[0].name, "test"
it "$elemMatch - compound queries", ->
a = [
{title: "Home", comments:[
{text:"I like this post"}
{text:"I love this post"}
{text:"I hate this post"}
]}
{title: "About", comments:[
{text:"I like this page"}
{text:"I love this page"}
{text:"I really like this page"}
]}
]
result = _query a,
comments:
$elemMatch:
$not:
text:/page/
assert.equal result.length, 1
# Test from RobW - https://github.com/Rob--W
it "Explicit $and combined with matching $or must return the correct number of items", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [ # Matches both items
{equ: 'ok'}, # Matches both items
$or:
same: 'ok'
]
assert.equal result.length, 2
# Test from RobW - https://github.com/Rob--W
it "Implicit $and consisting of non-matching subquery and $or must return empty list", ->
Col = [
{equ:'ok', same: 'ok'},
{equ:'ok', same: 'ok'}
]
result = _query Col,
$and: [{equ: 'bogus'}] # Matches nothing
$or: [
same: 'ok' # Matches all items, but due to implicit $and, this subquery should not affect the result
]
assert.equal result.length, 0
it "Testing nested compound operators", ->
a = create()
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:12} # Matches 1
]
]
# And only matches 1
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 1
result = _query a,
$and: [
{colors: $contains: "blue"} # Matches 1,3
$or: [
{featured:true} # Matches 1,2
{likes:20} # Matches 3
]
]
# And only matches 2
$or:[
{content:$like:"dummy"} # Matches 2
{content:$like:"Dummy"} # Matches 1,3
]
# Or matches 3
assert.equal result.length, 2
it "works with queries supplied as arrays", ->
a = create()
result = _query a,
$or: [
{title:"Home"}
{title:"About"}
]
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
it "works with dot notation", ->
collection = [
{title:"Home", stats:{likes:10, views:{a:{b:500}}}}
{title:"About", stats:{likes:5, views:{a:{b:234}}}}
{title:"Code", stats:{likes:25, views:{a:{b:796}}}}
]
result = _query collection, {"stats.likes":5}
assert.equal result.length, 1
assert.equal result[0].title, "About"
result = _query collection, {"stats.views.a.b":796}
assert.equal result.length, 1
assert.equal result[0].title, "Code"
it "Handles multiple inequalities", ->
a = create()
result = _query a, likes: { $gt: 2, $lt: 20 }
assert.equal result.length, 1
assert.equal result[0].title, "Home"
result = _query a, likes: { $gte: 2, $lt: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
result = _query a, likes: { $gt: 2, $lte: 20 }
assert.equal result.length, 2
assert.equal result[0].title, "Home"
assert.equal result[1].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20 }
assert.equal result.length, 3
assert.equal result[0].title, "Home"
assert.equal result[1].title, "About"
assert.equal result[2].title, "Contact"
result = _query a, likes: { $gte: 2, $lte: 20, $ne: 12 }
assert.equal result.length, 2
assert.equal result[0].title, "About"
assert.equal result[1].title, "Contact"
it "Handles nested multiple inequalities", ->
a = create()
result = _query a, $and: [likes: { $gt: 2, $lt: 20 }]
assert.equal result.length, 1
assert.equal result[0].title, "Home"
it "combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: { $gt: 2, $lt: 20}}
assert.equal result.length, 1
it "$not combination of $gt and $lt - mongo style", ->
a = create()
result = _query a, {likes: {$not: { $gt: 2, $lt: 20}}}
assert.equal result.length, 2
it "$nor combination of $gt and $lt - expressions ", ->
a = create()
result = _query a, {$nor: [{likes: { $gt: 2}}, {likes: { $lt: 20}}]}
assert.equal result.length, 0
it "compound $ands (underscore-query #29)", ->
a = create()
res = _query(a, {
$and: [{
$and: [ likes: {$gt: 5 } ]
}]
})
assert.equal(res.length, 2)
# This query is not a valid MongoDB query, but if it were one would expect it to yield an empty set
# it "$nor combination of $gt and $lt - values", ->
# a = create()
# result = _query a, {likes: {$nor: [{ $gt: 2}, {$lt: 20}]}}
# assert.equal result.length, 0
# I wont support this query - I don't like the ambiguity of $not being an operator and a compound
# it "combination of $gt and $not", ->
# a = create()
# result = _query a, {likes: { $not: 2, $lt: 20}}
# assert.equal result.length, 1
# I don't really like the ambiguity of this query. It can be achieved through a $contains I think
# it.only "equal within an array (#21)", ->
# tweets = [{
# "entities": {
# "user_mentions": [{
# "id_str": "10228271"
# }]
# }
# }, {
# "entities": {
# "user_mentions": [{
# "id_str": "10228272"
# }]
# }
# }]
#
# res = _query tweets, {"entities.user_mentions.id_str": "10228272"}
# assert.equal(res.length, 1)
# res = _query tweets, {"entities.user_mentions.id_str": "10228273"}
# assert.equal(res.length, 0)
|
[
{
"context": "d to parentWidget\n setting = {}\n setting.key = \"params-checkboxes\"\n\n setting.initialize = ->\n\n setting.toMetadata",
"end": 261,
"score": 0.9205004572868347,
"start": 244,
"tag": "KEY",
"value": "params-checkboxes"
}
] | src/components/widgets-settings/params-checkboxes/params-checkboxes.directive.coffee | agranado2k/impac-angular | 7 | module = angular.module('impac.components.widgets-settings.params-checkboxes',[])
module.controller('SettingParamsCheckboxesCtrl', ($scope) ->
w = $scope.parentWidget
# What will be passed to parentWidget
setting = {}
setting.key = "params-checkboxes"
setting.initialize = ->
setting.toMetadata = ->
param = {}
param[$scope.param] = _.map $scope.options, (opt) -> { id: opt.id, value: opt.value }
return param
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
$scope.deferred.resolve($scope.parentWidget)
)
module.directive('settingParamsCheckboxes', ($templateCache) ->
return {
restrict: 'A',
scope: {
parentWidget: '=',
deferred: '='
param: '@',
options: '=',
},
template: $templateCache.get('widgets-settings/params-checkboxes.tmpl.html'),
controller: 'SettingParamsCheckboxesCtrl'
}
)
| 102543 | module = angular.module('impac.components.widgets-settings.params-checkboxes',[])
module.controller('SettingParamsCheckboxesCtrl', ($scope) ->
w = $scope.parentWidget
# What will be passed to parentWidget
setting = {}
setting.key = "<KEY>"
setting.initialize = ->
setting.toMetadata = ->
param = {}
param[$scope.param] = _.map $scope.options, (opt) -> { id: opt.id, value: opt.value }
return param
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
$scope.deferred.resolve($scope.parentWidget)
)
module.directive('settingParamsCheckboxes', ($templateCache) ->
return {
restrict: 'A',
scope: {
parentWidget: '=',
deferred: '='
param: '@',
options: '=',
},
template: $templateCache.get('widgets-settings/params-checkboxes.tmpl.html'),
controller: 'SettingParamsCheckboxesCtrl'
}
)
| true | module = angular.module('impac.components.widgets-settings.params-checkboxes',[])
module.controller('SettingParamsCheckboxesCtrl', ($scope) ->
w = $scope.parentWidget
# What will be passed to parentWidget
setting = {}
setting.key = "PI:KEY:<KEY>END_PI"
setting.initialize = ->
setting.toMetadata = ->
param = {}
param[$scope.param] = _.map $scope.options, (opt) -> { id: opt.id, value: opt.value }
return param
w.settings.push(setting)
# Setting is ready: trigger load content
# ------------------------------------
$scope.deferred.resolve($scope.parentWidget)
)
module.directive('settingParamsCheckboxes', ($templateCache) ->
return {
restrict: 'A',
scope: {
parentWidget: '=',
deferred: '='
param: '@',
options: '=',
},
template: $templateCache.get('widgets-settings/params-checkboxes.tmpl.html'),
controller: 'SettingParamsCheckboxesCtrl'
}
)
|
[
{
"context": "=\n id: Number(Date.now())\n name: name\n paths: paths\n \n # Listen and serve inco",
"end": 3255,
"score": 0.9895871877670288,
"start": 3251,
"tag": "NAME",
"value": "name"
}
] | src/client/index.coffee | EngForDev/socketstream | 1 | # Client Asset Manager
# --------------------
# The Client Asset Manager allows you to define multiple single-page 'clients' which can be served on
# different URLs or to different devices. Note: The Client Asset Manager deliberately makes extensive use
# of synchronous code. This is because all operations only ever run once on startup (when packing the assets)
# unless you are running in dev mode
fs = require('fs')
path = require('path')
systemAssets = require('./system')
# Determine if assets should be (re)packed on startup
packAssets = process.env['SS_PACK']
# Set defaults
options =
packedAssets: packAssets || false
liveReload: ['code', 'css', 'static', 'templates', 'views']
dirs:
code: '/client/code'
css: '/client/css'
static: '/client/static'
assets: '/client/static/assets'
templates: '/client/templates'
views: '/client/views'
workers: '/client/workers'
# Store each client as an object
clients = {}
module.exports = (ss, router) ->
# Require sub modules
templateEngine = require('./template_engine')(ss)
formatters = require('./formatters')(ss)
http = require('./http')(ss, clients, options)
# Load default code formatters
formatters.add('javascript')
formatters.add('css')
formatters.add('html')
# Very basic check to see if we can find pre-packed assets
# TODO: Improve to test for complete set
determineLatestId = (client) ->
try
files = fs.readdirSync(path.join(ss.root, options.dirs.assets, client.name))
latestId = files.sort().pop()
id = latestId.split('.')[0]
throw ('Invalid Client ID length') unless id.length == 13
id
catch e
false
systemAssets.load()
# Return API
formatters: formatters
templateEngine: templateEngine
assets: systemAssets
options: options
# Merge optional options
set: (newOption) ->
throw new Error('ss.client.set() takes an object e.g. {liveReload: false}') unless typeof(newOption) == 'object'
for k, v of newOption
if v instanceof Object
options[k][x] = y for x, y of v
else
options[k] = v
# Tell the asset manager to pack and minimise all assets
packAssets: (opts) ->
throw new Error('Options passed to ss.client.packAssets() must be an object') if opts and typeof(opts) != 'object'
options.packedAssets = opts || true
# Define a new Single Page Client
define: (name, paths) ->
throw new Error("Client name '#{name}' has already been defined") if clients[name]?
throw new Error("You may only define one HTML view per single-page client. Please pass a filename as a string, not an Array") if typeof(paths.view) != 'string'
throw new Error("The '#{paths.view}' view must have a valid HTML extension (such as .html or .jade)") if paths.view.indexOf('.') == -1
# Alias 'templates' to 'tmpl'
paths.tmpl = paths.templates if paths.templates
# Force each into an array
['css','code','tmpl'].forEach (assetType) =>
paths[assetType] = [paths[assetType]] unless paths[assetType] instanceof Array
# Define new client object
clients[name] =
id: Number(Date.now())
name: name
paths: paths
# Listen and serve incoming asset requests
load: ->
# Cache instances of code formatters and template engines here
# This may change in the future as I don't like hanging system objects
# on the 'ss' internal API object, but for now it solves a problem
# we were having when repl.start() would erase vars cached inside a module
ss.client.formatters = formatters.load()
ss.client.templateEngines = templateEngine.load()
# Code to execute once everything is loaded
systemAssets.send('code', 'init', "require('/entry');")
if options.packedAssets
# Attempt to find and serve existing pre-packed assets
# If unsuccessful, assets will be re-packed automatically
unless packAssets
ss.log 'i'.green, "Attempting to find pre-packed assets... (force repack with SS_PACK=1)".grey
for name, client of clients
if id = options.packedAssets.id || determineLatestId(client)
client.id = id
ss.log '✓'.green, "Serving client '#{client.name}' from pre-packed assets ID #{client.id}".grey
else
ss.log '!'.red, "Unable to find pre-packed assets for '#{client.name}'. All assets will be repacked".grey
packAssets = true
# Pack Assets
if packAssets
pack = require('./pack')
pack(ss, client, options) for name, client of clients
# Else serve files and watch for changes to files in development
else
require('./serve/dev')(ss, router, options)
require('./live_reload')(ss, options) if options.liveReload
# Listen out for requests to async load new assets
require('./serve/ondemand')(ss, router, options)
| 33255 | # Client Asset Manager
# --------------------
# The Client Asset Manager allows you to define multiple single-page 'clients' which can be served on
# different URLs or to different devices. Note: The Client Asset Manager deliberately makes extensive use
# of synchronous code. This is because all operations only ever run once on startup (when packing the assets)
# unless you are running in dev mode
fs = require('fs')
path = require('path')
systemAssets = require('./system')
# Determine if assets should be (re)packed on startup
packAssets = process.env['SS_PACK']
# Set defaults
options =
packedAssets: packAssets || false
liveReload: ['code', 'css', 'static', 'templates', 'views']
dirs:
code: '/client/code'
css: '/client/css'
static: '/client/static'
assets: '/client/static/assets'
templates: '/client/templates'
views: '/client/views'
workers: '/client/workers'
# Store each client as an object
clients = {}
module.exports = (ss, router) ->
# Require sub modules
templateEngine = require('./template_engine')(ss)
formatters = require('./formatters')(ss)
http = require('./http')(ss, clients, options)
# Load default code formatters
formatters.add('javascript')
formatters.add('css')
formatters.add('html')
# Very basic check to see if we can find pre-packed assets
# TODO: Improve to test for complete set
determineLatestId = (client) ->
try
files = fs.readdirSync(path.join(ss.root, options.dirs.assets, client.name))
latestId = files.sort().pop()
id = latestId.split('.')[0]
throw ('Invalid Client ID length') unless id.length == 13
id
catch e
false
systemAssets.load()
# Return API
formatters: formatters
templateEngine: templateEngine
assets: systemAssets
options: options
# Merge optional options
set: (newOption) ->
throw new Error('ss.client.set() takes an object e.g. {liveReload: false}') unless typeof(newOption) == 'object'
for k, v of newOption
if v instanceof Object
options[k][x] = y for x, y of v
else
options[k] = v
# Tell the asset manager to pack and minimise all assets
packAssets: (opts) ->
throw new Error('Options passed to ss.client.packAssets() must be an object') if opts and typeof(opts) != 'object'
options.packedAssets = opts || true
# Define a new Single Page Client
define: (name, paths) ->
throw new Error("Client name '#{name}' has already been defined") if clients[name]?
throw new Error("You may only define one HTML view per single-page client. Please pass a filename as a string, not an Array") if typeof(paths.view) != 'string'
throw new Error("The '#{paths.view}' view must have a valid HTML extension (such as .html or .jade)") if paths.view.indexOf('.') == -1
# Alias 'templates' to 'tmpl'
paths.tmpl = paths.templates if paths.templates
# Force each into an array
['css','code','tmpl'].forEach (assetType) =>
paths[assetType] = [paths[assetType]] unless paths[assetType] instanceof Array
# Define new client object
clients[name] =
id: Number(Date.now())
name: <NAME>
paths: paths
# Listen and serve incoming asset requests
load: ->
# Cache instances of code formatters and template engines here
# This may change in the future as I don't like hanging system objects
# on the 'ss' internal API object, but for now it solves a problem
# we were having when repl.start() would erase vars cached inside a module
ss.client.formatters = formatters.load()
ss.client.templateEngines = templateEngine.load()
# Code to execute once everything is loaded
systemAssets.send('code', 'init', "require('/entry');")
if options.packedAssets
# Attempt to find and serve existing pre-packed assets
# If unsuccessful, assets will be re-packed automatically
unless packAssets
ss.log 'i'.green, "Attempting to find pre-packed assets... (force repack with SS_PACK=1)".grey
for name, client of clients
if id = options.packedAssets.id || determineLatestId(client)
client.id = id
ss.log '✓'.green, "Serving client '#{client.name}' from pre-packed assets ID #{client.id}".grey
else
ss.log '!'.red, "Unable to find pre-packed assets for '#{client.name}'. All assets will be repacked".grey
packAssets = true
# Pack Assets
if packAssets
pack = require('./pack')
pack(ss, client, options) for name, client of clients
# Else serve files and watch for changes to files in development
else
require('./serve/dev')(ss, router, options)
require('./live_reload')(ss, options) if options.liveReload
# Listen out for requests to async load new assets
require('./serve/ondemand')(ss, router, options)
| true | # Client Asset Manager
# --------------------
# The Client Asset Manager allows you to define multiple single-page 'clients' which can be served on
# different URLs or to different devices. Note: The Client Asset Manager deliberately makes extensive use
# of synchronous code. This is because all operations only ever run once on startup (when packing the assets)
# unless you are running in dev mode
fs = require('fs')
path = require('path')
systemAssets = require('./system')
# Determine if assets should be (re)packed on startup
packAssets = process.env['SS_PACK']
# Set defaults
options =
packedAssets: packAssets || false
liveReload: ['code', 'css', 'static', 'templates', 'views']
dirs:
code: '/client/code'
css: '/client/css'
static: '/client/static'
assets: '/client/static/assets'
templates: '/client/templates'
views: '/client/views'
workers: '/client/workers'
# Store each client as an object
clients = {}
module.exports = (ss, router) ->
# Require sub modules
templateEngine = require('./template_engine')(ss)
formatters = require('./formatters')(ss)
http = require('./http')(ss, clients, options)
# Load default code formatters
formatters.add('javascript')
formatters.add('css')
formatters.add('html')
# Very basic check to see if we can find pre-packed assets
# TODO: Improve to test for complete set
determineLatestId = (client) ->
try
files = fs.readdirSync(path.join(ss.root, options.dirs.assets, client.name))
latestId = files.sort().pop()
id = latestId.split('.')[0]
throw ('Invalid Client ID length') unless id.length == 13
id
catch e
false
systemAssets.load()
# Return API
formatters: formatters
templateEngine: templateEngine
assets: systemAssets
options: options
# Merge optional options
set: (newOption) ->
throw new Error('ss.client.set() takes an object e.g. {liveReload: false}') unless typeof(newOption) == 'object'
for k, v of newOption
if v instanceof Object
options[k][x] = y for x, y of v
else
options[k] = v
# Tell the asset manager to pack and minimise all assets
packAssets: (opts) ->
throw new Error('Options passed to ss.client.packAssets() must be an object') if opts and typeof(opts) != 'object'
options.packedAssets = opts || true
# Define a new Single Page Client
define: (name, paths) ->
throw new Error("Client name '#{name}' has already been defined") if clients[name]?
throw new Error("You may only define one HTML view per single-page client. Please pass a filename as a string, not an Array") if typeof(paths.view) != 'string'
throw new Error("The '#{paths.view}' view must have a valid HTML extension (such as .html or .jade)") if paths.view.indexOf('.') == -1
# Alias 'templates' to 'tmpl'
paths.tmpl = paths.templates if paths.templates
# Force each into an array
['css','code','tmpl'].forEach (assetType) =>
paths[assetType] = [paths[assetType]] unless paths[assetType] instanceof Array
# Define new client object
clients[name] =
id: Number(Date.now())
name: PI:NAME:<NAME>END_PI
paths: paths
# Listen and serve incoming asset requests
load: ->
# Cache instances of code formatters and template engines here
# This may change in the future as I don't like hanging system objects
# on the 'ss' internal API object, but for now it solves a problem
# we were having when repl.start() would erase vars cached inside a module
ss.client.formatters = formatters.load()
ss.client.templateEngines = templateEngine.load()
# Code to execute once everything is loaded
systemAssets.send('code', 'init', "require('/entry');")
if options.packedAssets
# Attempt to find and serve existing pre-packed assets
# If unsuccessful, assets will be re-packed automatically
unless packAssets
ss.log 'i'.green, "Attempting to find pre-packed assets... (force repack with SS_PACK=1)".grey
for name, client of clients
if id = options.packedAssets.id || determineLatestId(client)
client.id = id
ss.log '✓'.green, "Serving client '#{client.name}' from pre-packed assets ID #{client.id}".grey
else
ss.log '!'.red, "Unable to find pre-packed assets for '#{client.name}'. All assets will be repacked".grey
packAssets = true
# Pack Assets
if packAssets
pack = require('./pack')
pack(ss, client, options) for name, client of clients
# Else serve files and watch for changes to files in development
else
require('./serve/dev')(ss, router, options)
require('./live_reload')(ss, options) if options.liveReload
# Listen out for requests to async load new assets
require('./serve/ondemand')(ss, router, options)
|
[
{
"context": "# Good news everyone! <news> - Generates Professor Farnsworth\n#\n# khanify <text> - TEEEEEEEEEEEEEEEEEXT!\n#\n# No",
"end": 625,
"score": 0.7355307936668396,
"start": 615,
"tag": "NAME",
"value": "Farnsworth"
},
{
"context": "r.net/Instance_Create')\n .query\n ... | src/scripts/meme_generator.coffee | skanev/hubot-scripts | 1 | # Integrates with memegenerator.net
#
# Y U NO <text> - Generates the Y U NO GUY with the bottom caption
# of <text>
#
# I don't always <something> but when i do <text> - Generates The Most Interesting man in the World
#
# <text> ORLY? - Generates the ORLY? owl with the top caption of <text>
#
# <text> (SUCCESS|NAILED IT) - Generates success kid with the top caption of <text>
#
# <text> ALL the <things> - Generates ALL THE THINGS
#
# <text> TOO DAMN <high> - Generates THE RENT IS TOO DAMN HIGH guy
#
# Good news everyone! <news> - Generates Professor Farnsworth
#
# khanify <text> - TEEEEEEEEEEEEEEEEEXT!
#
# Not sure if <text> or <text> - Generates Futurama Fry
#
# Yo dawg <text> so <text> - Generates Yo Dawg
module.exports = (robot) ->
robot.respond /Y U NO (.+)/i, (msg) ->
caption = msg.match[1] || ""
memeGenerator msg, 2, 166088, "Y U NO", caption, (url) ->
msg.send url
robot.respond /(I DON'?T ALWAYS .*) (BUT WHEN I DO,? .*)/i, (msg) ->
memeGenerator msg, 74, 2485, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*)(O\s?RLY\??.*)/i, (msg) ->
memeGenerator msg, 920, 117049, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*)(SUCCESS|NAILED IT.*)/i, (msg) ->
memeGenerator msg, 121, 1031, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*) (ALL the .*)/i, (msg) ->
memeGenerator msg, 6013, 1121885, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*) (\w+\sTOO DAMN .*)/i, (msg) ->
memeGenerator msg, 998, 203665, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(GOOD NEWS EVERYONE[,.!]?) (.*)/i, (msg) ->
memeGenerator msg, 1591, 112464, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /khanify (.*)/i, (msg) ->
memeGenerator msg, 6443, 1123022, "", khanify(msg.match[1]), (url) ->
msg.send url
robot.respond /(NOT SURE IF .*) (OR .*)/i, (msg) ->
memeGenerator msg, 305, 84688, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(YO DAWG .*) (SO .*)/i, (msg) ->
memeGenerator msg, 79, 108785, msg.match[1], msg.match[2], (url) ->
msg.send url
memeGenerator = (msg, generatorID, imageID, text0, text1, callback) ->
username = process.env.HUBOT_MEMEGEN_USERNAME
password = process.env.HUBOT_MEMEGEN_PASSWORD
preferredDimensions = process.env.HUBOT_MEMEGEN_DIMENSIONS
unless username? and password?
msg.send "MemeGenerator account isn't setup. Sign up at http://memegenerator.net"
msg.send "Then ensure the HUBOT_MEMEGEN_USERNAME and HUBOT_MEMEGEN_PASSWORD environment variables are set"
return
msg.http('http://version1.api.memegenerator.net/Instance_Create')
.query
username: username,
password: password,
languageCode: 'en',
generatorID: generatorID,
imageID: imageID,
text0: text0,
text1: text1
.get() (err, res, body) ->
result = JSON.parse(body)['result']
if result? and result['instanceUrl']? and result['instanceImageUrl']? and result['instanceID']?
instanceID = result['instanceID']
instanceURL = result['instanceUrl']
img = result['instanceImageUrl']
msg.http(instanceURL).get() (err, res, body) ->
# Need to hit instanceURL so that image gets generated
if preferredDimensions?
callback "http://images.memegenerator.net/instances/#{preferredDimensions}/#{instanceID}.jpg"
else
callback "http://memegenerator.net#{img}"
else
msg.reply "Sorry, I couldn't generate that image."
khanify = (msg) ->
msg = msg.toUpperCase()
vowels = [ 'A', 'E', 'I', 'O', 'U' ]
index = -1
for v in vowels when msg.lastIndexOf(v) > index
index = msg.lastIndexOf(v)
"#{msg.slice 0, index}#{Array(10).join msg.charAt(index)}#{msg.slice index}!!!!!"
| 192371 | # Integrates with memegenerator.net
#
# Y U NO <text> - Generates the Y U NO GUY with the bottom caption
# of <text>
#
# I don't always <something> but when i do <text> - Generates The Most Interesting man in the World
#
# <text> ORLY? - Generates the ORLY? owl with the top caption of <text>
#
# <text> (SUCCESS|NAILED IT) - Generates success kid with the top caption of <text>
#
# <text> ALL the <things> - Generates ALL THE THINGS
#
# <text> TOO DAMN <high> - Generates THE RENT IS TOO DAMN HIGH guy
#
# Good news everyone! <news> - Generates Professor <NAME>
#
# khanify <text> - TEEEEEEEEEEEEEEEEEXT!
#
# Not sure if <text> or <text> - Generates Futurama Fry
#
# Yo dawg <text> so <text> - Generates Yo Dawg
module.exports = (robot) ->
robot.respond /Y U NO (.+)/i, (msg) ->
caption = msg.match[1] || ""
memeGenerator msg, 2, 166088, "Y U NO", caption, (url) ->
msg.send url
robot.respond /(I DON'?T ALWAYS .*) (BUT WHEN I DO,? .*)/i, (msg) ->
memeGenerator msg, 74, 2485, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*)(O\s?RLY\??.*)/i, (msg) ->
memeGenerator msg, 920, 117049, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*)(SUCCESS|NAILED IT.*)/i, (msg) ->
memeGenerator msg, 121, 1031, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*) (ALL the .*)/i, (msg) ->
memeGenerator msg, 6013, 1121885, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*) (\w+\sTOO DAMN .*)/i, (msg) ->
memeGenerator msg, 998, 203665, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(GOOD NEWS EVERYONE[,.!]?) (.*)/i, (msg) ->
memeGenerator msg, 1591, 112464, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /khanify (.*)/i, (msg) ->
memeGenerator msg, 6443, 1123022, "", khanify(msg.match[1]), (url) ->
msg.send url
robot.respond /(NOT SURE IF .*) (OR .*)/i, (msg) ->
memeGenerator msg, 305, 84688, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(YO DAWG .*) (SO .*)/i, (msg) ->
memeGenerator msg, 79, 108785, msg.match[1], msg.match[2], (url) ->
msg.send url
memeGenerator = (msg, generatorID, imageID, text0, text1, callback) ->
username = process.env.HUBOT_MEMEGEN_USERNAME
password = process.env.HUBOT_MEMEGEN_PASSWORD
preferredDimensions = process.env.HUBOT_MEMEGEN_DIMENSIONS
unless username? and password?
msg.send "MemeGenerator account isn't setup. Sign up at http://memegenerator.net"
msg.send "Then ensure the HUBOT_MEMEGEN_USERNAME and HUBOT_MEMEGEN_PASSWORD environment variables are set"
return
msg.http('http://version1.api.memegenerator.net/Instance_Create')
.query
username: username,
password: <PASSWORD>,
languageCode: 'en',
generatorID: generatorID,
imageID: imageID,
text0: text0,
text1: text1
.get() (err, res, body) ->
result = JSON.parse(body)['result']
if result? and result['instanceUrl']? and result['instanceImageUrl']? and result['instanceID']?
instanceID = result['instanceID']
instanceURL = result['instanceUrl']
img = result['instanceImageUrl']
msg.http(instanceURL).get() (err, res, body) ->
# Need to hit instanceURL so that image gets generated
if preferredDimensions?
callback "http://images.memegenerator.net/instances/#{preferredDimensions}/#{instanceID}.jpg"
else
callback "http://memegenerator.net#{img}"
else
msg.reply "Sorry, I couldn't generate that image."
khanify = (msg) ->
msg = msg.toUpperCase()
vowels = [ 'A', 'E', 'I', 'O', 'U' ]
index = -1
for v in vowels when msg.lastIndexOf(v) > index
index = msg.lastIndexOf(v)
"#{msg.slice 0, index}#{Array(10).join msg.charAt(index)}#{msg.slice index}!!!!!"
| true | # Integrates with memegenerator.net
#
# Y U NO <text> - Generates the Y U NO GUY with the bottom caption
# of <text>
#
# I don't always <something> but when i do <text> - Generates The Most Interesting man in the World
#
# <text> ORLY? - Generates the ORLY? owl with the top caption of <text>
#
# <text> (SUCCESS|NAILED IT) - Generates success kid with the top caption of <text>
#
# <text> ALL the <things> - Generates ALL THE THINGS
#
# <text> TOO DAMN <high> - Generates THE RENT IS TOO DAMN HIGH guy
#
# Good news everyone! <news> - Generates Professor PI:NAME:<NAME>END_PI
#
# khanify <text> - TEEEEEEEEEEEEEEEEEXT!
#
# Not sure if <text> or <text> - Generates Futurama Fry
#
# Yo dawg <text> so <text> - Generates Yo Dawg
module.exports = (robot) ->
robot.respond /Y U NO (.+)/i, (msg) ->
caption = msg.match[1] || ""
memeGenerator msg, 2, 166088, "Y U NO", caption, (url) ->
msg.send url
robot.respond /(I DON'?T ALWAYS .*) (BUT WHEN I DO,? .*)/i, (msg) ->
memeGenerator msg, 74, 2485, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*)(O\s?RLY\??.*)/i, (msg) ->
memeGenerator msg, 920, 117049, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*)(SUCCESS|NAILED IT.*)/i, (msg) ->
memeGenerator msg, 121, 1031, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*) (ALL the .*)/i, (msg) ->
memeGenerator msg, 6013, 1121885, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(.*) (\w+\sTOO DAMN .*)/i, (msg) ->
memeGenerator msg, 998, 203665, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(GOOD NEWS EVERYONE[,.!]?) (.*)/i, (msg) ->
memeGenerator msg, 1591, 112464, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /khanify (.*)/i, (msg) ->
memeGenerator msg, 6443, 1123022, "", khanify(msg.match[1]), (url) ->
msg.send url
robot.respond /(NOT SURE IF .*) (OR .*)/i, (msg) ->
memeGenerator msg, 305, 84688, msg.match[1], msg.match[2], (url) ->
msg.send url
robot.respond /(YO DAWG .*) (SO .*)/i, (msg) ->
memeGenerator msg, 79, 108785, msg.match[1], msg.match[2], (url) ->
msg.send url
memeGenerator = (msg, generatorID, imageID, text0, text1, callback) ->
username = process.env.HUBOT_MEMEGEN_USERNAME
password = process.env.HUBOT_MEMEGEN_PASSWORD
preferredDimensions = process.env.HUBOT_MEMEGEN_DIMENSIONS
unless username? and password?
msg.send "MemeGenerator account isn't setup. Sign up at http://memegenerator.net"
msg.send "Then ensure the HUBOT_MEMEGEN_USERNAME and HUBOT_MEMEGEN_PASSWORD environment variables are set"
return
msg.http('http://version1.api.memegenerator.net/Instance_Create')
.query
username: username,
password: PI:PASSWORD:<PASSWORD>END_PI,
languageCode: 'en',
generatorID: generatorID,
imageID: imageID,
text0: text0,
text1: text1
.get() (err, res, body) ->
result = JSON.parse(body)['result']
if result? and result['instanceUrl']? and result['instanceImageUrl']? and result['instanceID']?
instanceID = result['instanceID']
instanceURL = result['instanceUrl']
img = result['instanceImageUrl']
msg.http(instanceURL).get() (err, res, body) ->
# Need to hit instanceURL so that image gets generated
if preferredDimensions?
callback "http://images.memegenerator.net/instances/#{preferredDimensions}/#{instanceID}.jpg"
else
callback "http://memegenerator.net#{img}"
else
msg.reply "Sorry, I couldn't generate that image."
khanify = (msg) ->
msg = msg.toUpperCase()
vowels = [ 'A', 'E', 'I', 'O', 'U' ]
index = -1
for v in vowels when msg.lastIndexOf(v) > index
index = msg.lastIndexOf(v)
"#{msg.slice 0, index}#{Array(10).join msg.charAt(index)}#{msg.slice index}!!!!!"
|
[
{
"context": "um.*|.*Belvedere.*/i\n\n if organisationName is 'Stadt Wien'\n returnValue = 'Federal state'\n else i",
"end": 3918,
"score": 0.9858483672142029,
"start": 3908,
"tag": "NAME",
"value": "Stadt Wien"
},
{
"context": " {\n ... | packages/custom/transparency/coffee/server/controllers/transparencyServerCtrl.coffee | AnotherCodeArtist/medien-transparenz.at | 3 | 'use strict'
qfs = require 'q-io/fs'
fs = require 'fs'
config = require('meanio').loadConfig()
mongoose = require 'mongoose'
iconv = require 'iconv-lite'
_ = require 'lodash'
#mongooseWhen = require 'mongoose-when'
Q = require 'q'
#Promise = mongoose.Promise
sorty = require 'sorty'
#iconv.extendNodeEncodings()
Transfer = mongoose.model 'Transfer'
Event = mongoose.model 'Event'
Organisation = mongoose.model 'Organisation'
ZipCode = mongoose.model 'Zipcode'
Grouping = mongoose.model 'Grouping'
regex = /"?(.+?)"?;(\d{4})(\d);(\d{1,2});\d;"?(.+?)"?;(\d+(?:,\d{1,2})?).*/
#returns value for "others" / replaces promise
getTotalAmountOfTransfers = (entries) ->
amounts = (entry.total for entry in entries)
totalAmount = amounts.reduce(((total, num) ->
total + num), 0)
totalAmount
#matches media to federalState (due to lack of grouping)
mediaToFederalState = (mediaResult) ->
uniqueMedia= []
#console.log("Entries in result " +mediaResult.length)
for media in mediaResult
mediaNames = (name.organisation for name in uniqueMedia)
if media.organisation not in mediaNames
uniqueMedia.push(media)
else
# media is already there, add sum to media
#console.log (media.organisation + ' in media names')
for uniqueEntry in uniqueMedia
if uniqueEntry.organisation is media.organisation
#console.log(uniqueEntry.organisation + 'has already ' +uniqueEntry.total)
#console.log("The transfer adds "+ media.total)
uniqueEntry.total += media.total
#console.log("Entry has now " +uniqueEntry.total)
break
#console.log ("Entries after uniqueness: " + uniqueMedia.length)
uniqueMedia
#function for populate
getPopulateInformation = (sourceForPopulate, path) ->
#path: what to look for, select without id
populatePromise = Organisation.populate(sourceForPopulate, {path: path, select: '-_id'})
populatePromise
#Transfer of line to ZipCode
lineToZipCode = (line, numberOfZipCodes) ->
splittedLine = line.split(",")
if splittedLine.length != 2
throw new Error('Upload expects another file format')
#Skip first line
if splittedLine[0] != 'PLZ'
entry = new ZipCode()
entry.zipCode = splittedLine[0]
entry.federalState = splittedLine[1]
entry.save()
numberOfZipCodes++
numberOfZipCodes
# determines org type by name
determineOrganisationType = (organisationName) ->
#public: state (Land), city (Stadt), municipality (Gemeinde)
returnValue = 'Undetermined'
regexCompany = /(.+G(?:es|esellschaft)?\.?m\.?b\.?H\.?.?$)|.*Gesellschaft?.*|.*AG$|.*OG$|.*KG$|(.* d.o.o?.).*|.*s.r.o?.$|.*Sp.? z?.*|.*spol.r.s.o.|.*Sp.z.o.o..*|.* S\.R\.L\.$|.* in Liq.*|.*unternehmung|.*Limited.*|.*AD$|.*S.P.A.*|.*S.P.R.L.|.*Iberica SL|.*likvidaci.*|.*p\.l\.c\./i
regexIncorporatedCompany = /.* AG.*/
regexAssociation = /.*(Verband).*|.*(Verein).*/i
regexFoundation = /.*(Stiftung).*|.*(Holding)/i
regexCity = /^Stadt .+|.*Stadtwerke.*/i
regexMunicipality = /^(?:Markt)?gemeinde?.*|Stadtgemeinde .*|.*Sanitäts.*/i
regexState = /^Land .+/ #Stadt Wien -- provincial
regexMinistry = /^(?:Bundesministerium|Bundeskanzleramt)/
regexAgency = /.*(Bundesamt|Patentamt|Parlamentsdirektion|Präsidentschaftskanzlei|Verfassungsgerichtshof|Volksanwaltschaft|.*Agency.*|Arbeitsmarktservice|Agentur.*)/i #national - public agency
regexFund = /.*Fonds?.*/i
regexChamber = /.*?Kammer?.*/i
regexPolicyRelevant = /^(Alternativregion).*|.*BIFIE|.*FMA|.*Sprengel?.*|^Kleinregion .*|Arbeitsmarktservice|Verwaltungsgerichtshof/i
regexEducation = /.*(Alumni).*|.*(Universit).*|.*(Hochsch).*|.*Mittelschul.*|.*Schul.*|.*Päda.*/i
regexMuseum = /Albertina|.*Museum.*|.*Belvedere.*/i
if organisationName is 'Stadt Wien'
returnValue = 'Federal state'
else if organisationName.match regexCompany
returnValue = 'Company'
else if organisationName.match regexIncorporatedCompany
returnValue = 'Company'
else if organisationName.match regexAssociation
returnValue = 'Association'
else if organisationName.match regexChamber
returnValue = 'Chamber'
else if organisationName.match regexEducation
returnValue = 'Education'
else if organisationName.match regexFoundation
returnValue = 'Foundation'
else if organisationName.match regexMunicipality
returnValue = 'Municipality'
else if organisationName.match regexFund
returnValue = 'Fund'
else if organisationName.match regexPolicyRelevant
returnValue = 'Policy-relevant'
else if organisationName.match regexMinistry
returnValue = 'Ministry'
else if organisationName.match regexCity
returnValue = 'City'
else if organisationName.match regexState
returnValue = 'Federal state'
else if organisationName.match regexAgency
returnValue = 'Agency'
else if organisationName.match regexMuseum
returnValue = 'Museum'
console.log "Undetermined organisation type for: " + organisationName if returnValue is 'undetermined'
returnValue
#Transfer of line to Organisation
lineToOrganisation = (line, feedback) ->
if not feedback
console.log "THIS SHOULD NOT HAPPEN: Supposed to parse line #{line} but got no feedback object!"
splittedLine = line.split(";")
#Skip first and last lines
if splittedLine[0] != 'Bezeichnung des Rechtsträgers' and splittedLine[0] != ''
organisation = new Organisation()
organisation.name = splittedLine[0]
organisation.street = splittedLine[1]
organisation.zipCode = splittedLine[2]
organisation.city_de = splittedLine[3]
organisation.country_de = splittedLine[4]
# Setting the org type
organisation.type = determineOrganisationType splittedLine[0]
ZipCode.findOne({'zipCode': splittedLine[2]})
.then (results) ->
if results and organisation.country_de is 'Österreich'
organisation.federalState = results.federalState
else
organisation.federalState = 'Unknown'
organisation.save()
.then (ok) ->
feedback.entries++
feedback.notAustria++ if organisation.country_de != 'Österreich'
if organisation.federalState is 'Unknown' and organisation.country_de is 'Österreich'
feedback.unknownFederalState++
feedback.unknownFederalStateEntries.push organisation
# Feedback for org type
switch organisation.type
when 'Company' then feedback.organisationTypeCompany++
when 'Association' then feedback.organisationTypeAssociation++
when 'Chamber' then feedback.organisationTypeChamber++
when 'Education' then feedback.organisationTypeEducation++
when 'Foundation' then feedback.organisationTypeFoundation++
when 'Municipality' then feedback.organisationTypeMunicipality++
when 'Fund' then feedback.organisationTypeFund++
when 'Undetermined' then feedback.undeterminedOrganisationType++
when 'Policy-relevant' then feedback.organisationTypePolicyRelevant++
when 'Ministry' then feedback.organisationTypeMinistry++
when 'City' then feedback.organisationTypeCity++
when 'Federal state' then feedback.organisationTypeState++
when 'Agency' then feedback.organisationTypeAgency++
when 'Museum' then feedback.organisationTypeMuseum++
feedback
.catch (err) ->
feedback.errors+=1
feedback.errorEntries.push {organisation: organisation, errorMessage: err.errmsg, errorCode: err.code}
console.log "ERROR: Could not store organisation #{organisation.name}"
feedback
else
feedback.ignoredEntries++;
feedback
lineToTransfer = (line, feedback) ->
if not feedback
console.log "THIS SHOULD NOT HAPPEN: Supposed to parse line #{line} but got no feedback object!"
m = line.match regex
#console.log "Result: #{m} for line #{line}"
if m
transfer = new Transfer()
transfer.organisation = m[1].replace /""/g,'"'
transfer.year = parseInt m[2]
transfer.quarter = parseInt m[3]
transfer.transferType = parseInt m[4]
transfer.media = m[5].replace('""','"').replace(/http:\/\//i,'').replace('www.','').replace(/([\w\.-]+(?:\.at|\.com))/,(m)->m.toLowerCase())
transfer.period = parseInt(m[2] + m[3])
transfer.amount = parseFloat m[6].replace ',', '.'
transfer.organisationType = determineOrganisationType transfer.organisation
#Save reference
Organisation.findOne({ 'name': transfer.organisation }, 'name federalState')
.then (results) ->
if results
transfer.organisationReference = results._id
transfer.federalState = results.federalState
transfer.save()
else
console.log "WARNING: Could not find reference for #{transfer.organisation}!"
Organisation.findOne name: "Unknown"
.then (unknown) ->
if unknown
console.log "Setting org-reference for #{transfer.organisation} to 'Unknown' (#{unknown._id})"
transfer.federalState = 'Unknown'
transfer.organisationReference = unknown._id
unknownOrganisationNames = (org.organisation for org in feedback.unknownOrganisations)
feedback.unknownOrganisations.push {organisation: transfer.organisation} if transfer.organisation not in unknownOrganisationNames
transfer.save()
else
feedback.errors+=1
throw new Error("'Unknown' as placeholder was not found in organisation collection")
.then (ok) ->
feedback.quarter = transfer.quarter
feedback.year = transfer.year
feedback.entries++
feedback.paragraph2++ if transfer.transferType is 2
feedback.paragraph4++ if transfer.transferType is 4
feedback.paragraph31++ if transfer.transferType is 31
feedback.sumParagraph2 += transfer.amount if transfer.transferType is 2
feedback.sumParagraph4 += transfer.amount if transfer.transferType is 4
feedback.sumParagraph31 += transfer.amount if transfer.transferType is 31
feedback.sumTotal += transfer.amount
feedback
.catch (err) ->
feedback.errors+=1
feedback.errorEntries.push {errorMessage: err.errmsg, errorCode: err.code}
console.log "Error while importing data: #{JSON.stringify err}"
feedback
else feedback
mapEvent = (event,req) ->
event.name = req.body.name
event.startDate = req.body.startDate
event.numericStartDate = req.body.numericStartDate
event.endDate = req.body.endDate
event.predictable = req.body.predictable
if req.body.numericEndDate
event.numericEndDate = req.body.numericEndDate
event.tags = req.body.tags
event.region = req.body.region
event
handleGroupings = (groupings, transfers, limit) ->
console.log ("found " + groupings.length + " gropings");
console.log ("found " + transfers.length + " transfers");
transfersWithGrouping = transfers
for grouping in groupings
groupingTransfersAmount = (transfer.total for transfer in transfersWithGrouping when transfer.organisation in grouping.members)
groupingTransfersNames = (transfer.organisation for transfer in transfersWithGrouping when transfer.organisation in grouping.members)
groupingTotalAmount = groupingTransfersAmount.reduce ((total, sum) -> total + sum),0
#console.log("Grouping " + grouping.name + " with the member(s):"
#JSON.stringify(grouping.members)+ " has the sum of " + groupingTotalAmount+ "("+ groupingTransfersAmount.length+" transfer(s))")
#remove ALL transfers (filter) from results
transfersWithGrouping = transfersWithGrouping.filter((transfer) ->
transfer.organisation not in groupingTransfersNames
)
transfersWithGrouping.push({total: groupingTotalAmount, organisation: "(G) " + grouping.name, isGrouping: true})
#console.log( "Group entry added: " + JSON.stringify(transfersWithGrouping[transfersWithGrouping.length-1]))
#Sort array of transfers by total amount
sorty([{name: 'total', dir: 'desc', type: 'number'}], transfersWithGrouping)
transfersWithGrouping.splice(0,limit)
module.exports = (Transparency) ->
overview: (req, res) ->
queryPromise = Transfer.aggregate({$match: {}})
.group(
_id:
quarter: "$quarter"
year: "$year"
transferType: "$transferType"
entries: {$sum: 1}
total:
$sum: "$amount")
.project(quarter: "$_id.quarter", year: "$_id.year", transferType: "$_id.transferType", _id: 0, entries: 1, total: 1)
#.sort('-year -quarter transferType')
.group(
_id:
year: "$year"
quarters:
$addToSet: {quarter: "$quarter", transferType: "$transferType", entries: "$entries", total: "$total"}
total:
$sum: "$total")
.project(year: "$_id.year", _id: 0, quarters: 1, total: 1)
.sort("year")
.exec()
queryPromise.then(
(result) ->
res.send result
(err) ->
res.status(500).send "Could not load overview from Database: #{err}"
)
years: (req, res) ->
queryPromise = Transfer.aggregate($match: {})
.group(_id:
year: "$year")
.project(year: "$_id.year", _id: 0)
.sort("-year")
.exec()
queryPromise.then(
(result) ->
res.send years: result.map (e)->
e.year
(err) ->
res.status(500).send "Could not load overview from Database: #{err}"
)
upload: (req, res) ->
file = req.files.file;
feedback =
quarter: 0
year: 0
entries: 0
paragraph2: 0
sumParagraph2: 0
paragraph4: 0
sumParagraph4: 0
paragraph31: 0
sumParagraph31: 0
sumTotal: 0.0
unknownOrganisations: []
errors: 0
errorEntries: []
#qfs.read(file.path).then(
fs.readFile file.path, (err,data) ->
if err
res.send 500, "Error #{err.message}"
else
input = iconv.decode data,'latin1'
input.split("\n").reduce ((p,line) -> p.then((f) -> lineToTransfer line, f)), Q.fcall(->feedback)
.then (ok) ->
Transfer.count()
.then(
(transfersInDatabase) ->
feedback.savedInDatabase = transfersInDatabase
feedback.integrityCheck = true
res.status(200).send(feedback)
)
.catch (err) ->
res.send 500, "Error #{err.message}"
#Function for the upload of organisation-address-data
uploadOrganisation: (req, res) ->
file = req.files.file;
feedback =
entries: 0
ignoredEntries: 0
unknownFederalState: 0,
unknownFederalStateEntries: [],
undeterminedOrganisationType: 0,
organisationTypeCompany: 0,
organisationTypeAssociation: 0,
organisationTypeFoundation: 0,
organisationTypeMunicipality: 0,
organisationTypeState: 0,
organisationTypeCity: 0,
organisationTypeMinistry: 0,
organisationTypeAgency: 0,
organisationTypeFund: 0,
organisationTypeChamber: 0,
organisationTypePolicyRelevant: 0,
organisationTypeEducation: 0,
organisationTypeMuseum: 0,
notAustria: 0,
errors:0
errorEntries: []
fs.readFile file.path, (err,data) ->
if err
res.status(500).send("Error #{err.message}")
else
input = iconv.decode data, 'utf8'
input.split("\n").reduce ((p,line) -> p.then((f) -> lineToOrganisation line, f)), Q.fcall(->feedback)
.then (ok) ->
Organisation.count()
.then(
(organisationsInDatabase) ->
feedback.savedInDatabase = organisationsInDatabase
feedback.integrityCheck = true
res.status(200).send(feedback)
)
.catch (err) ->
res.send 500, "Error #{err.message}"
#Function for the upload of organisation-address-data
uploadZipCode: (req, res) ->
file = req.files.file;
response =
newZipCodes: 0
integrityCheck: false
savedInDatabase: 0
fs.readFile file.path, (err,data) ->
if err
res.status(500).send("Error #{err.message}")
else
input = iconv.decode data, 'utf8'
response.newZipCodes = lineToZipCode(line,response.newZipCodes) for line in input.split('\n')
ZipCode.count()
.then(
(codesInDatabase) ->
response.savedInDatabase = codesInDatabase
response.integrityCheck = true
res.status(200).send(response)
(error) ->
res.send 500, "Error #{error}"
)
periods: (req, res) ->
Transfer.aggregate(
$match: {}
)
.group(
_id:
year: "$year", quarter: "$quarter", period: "$period"
)
.project(
year: "$_id.year", quarter: "$_id.quarter", period: "$_id.period", _id: 0
)
.sort("-year -quarter")
.exec()
.then(
(data) ->
res.send data
(err) -> res.status(500).send("Could not load periods (#{err})!")
)
filteredflows: (req, res) ->
getOtherMedia = (organisations, media, period, paymentTypes, federalState) ->
result = []
if (organisations and organisations.length > 0) and (media and media.length > 0)
qry = {}
(qry.transferType = $in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(qry.organisation = $in: organisations) if organisations.length > 0
(qry.media = $nin: media) if media.length > 0
if period.$gte? or period.$lte?
qry.period = period
grp =
_id:
organisation: "$organisation"
organisationReference: "$organisationReference"
transferType: "$transferType"
amount:
$sum: "$amount"
Transfer.aggregate($match: qry)
.group grp
.exec()
.then (rslt) ->
for data in rslt
result.push {
amount: data.amount,
organisation: data._id.organisation,
transferType: data._id.transferType,
media: "Other media"
}
result
else
new Promise (resolve, reject) ->
resolve result
getOtherOrganisations = (organisations, media, period, paymentTypes, federalState) ->
result = []
if (media and media.length > 0) and (organisations and organisations.length > 0)
qry = {}
(qry.transferType = $in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(qry.organisation = $nin: organisations) if organisations.length > 0
(qry.media = $in: media) if media.length > 0
if period.$gte? or period.$lte?
qry.period = period
grp =
_id:
media: "$media"
transferType: "$transferType"
amount:
$sum: "$amount"
Transfer.aggregate($match: qry)
.group grp
.exec()
.then (rslt) ->
for data in rslt
result.push {
amount: data.amount,
media: data._id.media,
transferType: data._id.transferType,
organisation: "Other organisations"
}
result
else
new Promise (resolve, reject) ->
resolve result
try
maxLength = parseInt req.query.maxLength or "750"
federalState = req.query.federalState or ''
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
organisations = req.query.organisations or []
organisations = [organisations] if organisations not instanceof Array
media = req.query.media or []
media = [media] if media not instanceof Array
(query.organisation = $in: organisations) if organisations.length > 0
(query.media = $in: media) if media.length > 0
if period.$gte? or period.$lte?
query.period = period
group =
_id:
organisation: "$organisation"
organisationReference: "$organisationReference"
transferType: "$transferType"
media: "$media"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
organisation: "$_id.organisation",
organisationReference: "$_id.organisationReference",
transferType: "$_id.transferType",
media: "$_id.media"
_id: 0
amount: 1
)
.exec()
.then (result) ->
populatedPromise = getPopulateInformation(result, 'organisationReference')
.then(
(isPopulated) ->
if federalState
result = (transfer for transfer in result when transfer.organisationReference.federalState_en is federalState)
getOtherMedia(organisations, media, period, paymentTypes, "").then (otherMedia) ->
result = result.concat otherMedia
getOtherOrganisations(organisations, media, period, paymentTypes, "").then (otherOrganisations) ->
result = result.concat otherOrganisations
if result.length > maxLength
res.status(413).send {
error: "You query returns more then the specified maximum of #{maxLength}"
length: result.length
}
else
res.json result
)
.catch (err) ->
res.status(500).send error: "Could not load money flow: #{err}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
flowdetail: (req, res) ->
try
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
source = req.query.source or []
target = req.query.target or []
source = [source] if source not instanceof Array
target = [target] if target not instanceof Array
query = {}
if source.length > 0 then query.organisation = $in: source;
if target.length > 0 then query.media = $in: target;
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
Transfer.find query, {}, {sort: {year: 1, quarter: 1}}, (err, transfers) ->
result = {
data:
{
key: "Zahlungen"
values: []
}
tickvalues: []
}
i = 0
tmpObj = {
}
#find all years
Transfer.distinct 'year', (error, data) ->
if !error
years = data
years.sort()
tmpResult = {}
tickvalues = []
for year in years
for quarter in [0...4]
tmpObj[year + (quarter/4)] = 0
tickvalues.push (year + (quarter/4))
tickvalues.sort()
for transfer in transfers
tmpObj[""+ (transfer.year + (transfer.quarter-1)/4)] += transfer.amount
result.tickvalues = tickvalues
for tickvalue in tickvalues
result.data.values.push [tickvalue, tmpObj[tickvalue]]
res.json result
else
res.status 500
.send "Could not load years from database! #{error}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
annualcomparison: (req, res) ->
try
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
source = req.query.source or []
target = req.query.target or []
source = [source] if source not instanceof Array
target = [target] if target not instanceof Array
query = {}
if source.length > 0 then query.organisation = $in: source;
if target.length > 0 then query.media = $in: target;
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
years = []
#find all years
Transfer.distinct 'year', (error, data) ->
if !error
years = data
years.sort()
tmpResult = {}
for year in years
tmpResult[""+year] = {
quarters: {
'1': 0
'2': 0
'3': 0
'4': 0
}
}
else
res.status 500
.send "Could not load years from database! #{error}"
Transfer.find query, {}, {sort: {year: 1, quarter: 1}, transferType: 1}, (err, transfers) ->
for transfer in transfers
tmpResult[""+transfer.year].quarters[""+transfer.quarter] += transfer.amount
result = []
for year, quarters of tmpResult
quarterArr = []
for quarter, amount of quarters.quarters
quarterArr.push {
x: (Number(quarter)-1)/4
y: amount
}
result.push {
key: year
color: '#'+(Math.random()*0xFFFFFF<<0).toString(16)
values: quarterArr
}
res.json result
catch error
res.status(500).send error: "Could not load money flow: #{error}"
flows: (req, res) ->
try
maxLength = parseInt req.query.maxLength or "750"
federalState = req.query.federalState if req.query.federalState
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
orgType = req.query.orgType or 'org'
name = req.query.name
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
query[if orgType is 'org' then 'organisation' else 'media'] = name if name
if period.$gte? or period.$lte?
query.period = period
if req.query.filter
filter = req.query.filter
query.$or = [
{organisation: { $regex: ".*#{filter}.*", $options: "i"}}
{media: { $regex: ".*#{filter}.*", $options: "i"}}
]
if federalState?
query.federalState = federalState
group =
_id:
organisation: "$organisation"
transferType: "$transferType"
media: "$media"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
organisation: "$_id.organisation",
transferType: "$_id.transferType",
media: "$_id.media"
_id: 0
amount: 1
)
.exec()
.then (result) ->
if result.length > maxLength
res.status(413).send {
error: "You query returns more then the specified maximum of #{maxLength}"
length: result.length
}
else
res.json result
.catch (err) ->
res.status(500).send error: "Could not load money flow: #{err}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
topEntries: (req, res) ->
promiseToFullfill = []
federalState = req.query.federalState if req.query.federalState
includeGroupings = req.query.groupings if req.query.groupings
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
orgType = req.query.orgType or 'org'
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
limitOfResults = parseInt(req.query.x or '10')
orgCategories = req.query.orgCategories if req.query.orgCategories
orgCategories = [orgCategories] if orgCategories not instanceof Array and req.query.orgCategories
query = {}
project =
organisation: '$_id.organisation'
_id: 0
total: 1
if period.$gte? or period.$lte?
query.period = period
query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)
if federalState?
query.federalState = federalState
if orgCategories?
query.organisationType =
$in: orgCategories
group =
_id:
organisation: if orgType is 'org' then '$organisation' else '$media',
total:
$sum: '$amount'
options = {}
options.map = () ->
emit this.year, this.amount
options.reduce = (key, vals) ->
Array.sum vals
options.query = query
#console.log "Query: "
#console.log query
#console.log "Group: "
#console.log group
#console.log "Project: "
#console.log project
if not includeGroupings
topPromise = Transfer.aggregate($match: query)
.group(group)
.sort('-total')
.limit(limitOfResults)
.project(project)
.exec()
else
topPromise = Transfer.aggregate($match: query)
.group(group)
.sort('-total')
.project(project)
.exec()
promiseToFullfill.push(topPromise)
allPromise = Transfer.mapReduce options
promiseToFullfill.push allPromise
if includeGroupings
groupingQuery = {}
groupingQuery.isActive = true
groupingQuery.type = orgType
groupingQuery.region = if federalState then federalState else 'AT'
groupingsPromise = Grouping.find(groupingQuery)
.select('name owner members -_id')
.exec()
promiseToFullfill.push(groupingsPromise)
allPromise.then (r) ->
Q.all(promiseToFullfill)
.then (results) ->
try
result =
top: results[0]
all: results[1].reduce(
(sum, v)->
sum + v.value
0)
groupings: results[2] if results[2]
if result.groupings?
result.top = handleGroupings(result.groupings, result.top, limitOfResults)
res.send result
catch error
console.log error
res.status(500).send("No Data was found!")
.catch (err) ->
console.log "Error in Promise.when"
console.log err
res.status(500).send("Error #{err.message}")
search: (req,res) ->
name = req.query.name
federalState = req.query.federalState if req.query.federalState
if not name
res.status(400).send error: "'name' is required!"
return
types = if req.query.orgType then [req.query.orgType] else ['org','media']
buildRegex = (name,value) ->
q={}
q[name]= { $regex: ".*#{value}.*", $options: "i"}
q
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
group =
_id:
name: "$#{nameField}"
type: orgType
years:
$addToSet: "$year"
total: $sum: "$amount"
transferTypes: $addToSet: "$transferType"
project =
name: '$_id.name'
_id: 0
years: 1
total: 1
transferTypes: 1
if orgType is 'org'
group._id.organisationType = '$organisationType'
group._id.federalState = '$federalState'
project.organisationType = '$_id.organisationType'
project.federalState = '$_id.federalState'
$or = name.split(' ').reduce ((a,n)-> q={};a.push buildRegex(nameField,n);a) ,[]
if not federalState
query = $or: $or
else
query = $and: $or
query.$and.push {"federalState": federalState}
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name')
.exec()
all = Q.all types.map (t) ->
performQuery t
all.then (results) ->
result = types.reduce ((r,t,index) -> r[t] = results[index];r),{}
res.json result
.catch (err) ->
res.status(500).send error: "Could not perform search"
list: (req,res) ->
types = if req.query.orgType then [req.query.orgType] else ['org','media']
page = parseInt req.query.page or "0"
size = parseInt req.query.size or "50"
federalState = req.query.federalState
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
query = {}
if federalState?
query.federalState = federalState
project ={}
project =
name: '$_id.name'
_id: 0
years: 1
total: 1
transferTypes: 1
group =
_id:
name: "$#{nameField}"
type: orgType
years:
$addToSet: "$year"
total: $sum: "$amount"
transferTypes: $addToSet: "$transferType"
if orgType is 'org'
group._id.organisationType= '$organisationType'
project.organisationType = '$_id.organisationType'
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name').skip(page*size).limit(size)
.exec()
all = Q.all types.map (t) ->
performQuery t
all.then (results) ->
result = types.reduce ((r,t,index) -> r[t] = results[index];r),{}
res.json result
.catch (err) ->
res.status(500).send error: "Could not perform search #{err}"
count: (req,res) ->
type = req.query.orgType or 'org'
federalState = req.query.federalState if req.query.federalState
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
query = {}
group =
_id:
name: "$#{nameField}"
if federalState
query.federalState = federalState
group._id.federalState = federalState
Transfer.aggregate($match: query)
.group(group)
.exec()
performQuery(type)
.then (result) ->
res.json result.length
.catch (err) ->
res.status(500).send error: "Could not determine number of items #{err}"
getEvents: (req,res) ->
handleEventResponse = (err, data) ->
if err
res.status(500).send error: "Could not get events #{err}"
else if !data or data.length is 0
res.status(404).send()
else
res.json data
#todo: insert parameter checking
if req.query.region
Event.find {region: req.query.region}, handleEventResponse
else if req.query.id
Event.findById req.query.id, handleEventResponse
else
Event.find {}, handleEventResponse
createEvent: (req,res) ->
#todo: insert parameter checking
event = new Event()
event = mapEvent event, req
event.save (err) ->
if err
res.status(500).send error: "Could not create event #{err}"
else
res.json event
updateEvent: (req, res) ->
#todo: insert parameter checking
Event.findById req.body._id, (err, data) ->
if err
res.status(500).send error: "Could not update event #{err}"
if !data or data.length is 0
res.status(500).send error: "Could not find event #{req.body._id}"
else
event = mapEvent data, req
event.save (err) ->
if err
res.status(500).send error: "Could not create event #{err}"
else
res.json event
deleteEvent: (req, res) ->
#todo: insert parameter checking
Event.findById {_id: req.query.id}, (err, data) ->
if err
res.status(500).send error: "Could not find event #{err}"
data.remove (removeErr) ->
if removeErr
res.status(500).send error: "Could not delete event #{removeErr}"
res.json data
getEventTags: (req, res) ->
Event.find {}, (err, events) ->
if err
res.status(500).send error "Could not load events #{err}"
result = []
for event in events
if event.tags
Array.prototype.push.apply result, event.tags
res.json Array.from(new Set(result))
federalstates: (req, res) ->
try
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
organisationTypes = req.query.orgTypes or []
organisationTypes = [organisationTypes] if organisationTypes not instanceof Array
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(query.organisationType =
$in: organisationTypes.map (e)->
(e)) if organisationTypes.length > 0
if period.$gte? or period.$lte?
query.period = period
group =
_id:
federalState: "$federalState"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
federalState: "$_id.federalState",
_id: 0
amount: 1
)
.sort('federalState')
.exec()
.then (result) ->
for i in [1...10]
found = false
for r in result
found = false
if r.federalState is "AT-" + i
found = true
break
if !found
result.push {
amount: 0,
federalState: "AT-" + i
}
res.status(200).send result
.catch (error) ->
console.log "Error query data for map: #{error}"
res.status(500).send error: "Could not get data for map #{error}"
catch error
console.log error
res.status(500).send("Error with query for map")
#Grouping
getPossibleGroupMembers: (req, res) ->
type = req.query.orgType or 'org'
nameField = if type is 'org' then 'organisation' else 'media'
query = {}
project =
name: '$_id.name'
_id: 0
group =
_id:
name: "$#{nameField}"
if type is 'org'
group._id.federalState = '$federalState'
project.federalState = '$_id.federalState'
#console.log 'Query:'
#console.log query
#console.log 'Group'
#console.log group
#console.log 'Project'
#console.log project
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name')
.exec()
.then (result) ->
res.status(200).send result
.catch (error) ->
console.log "Error query possible group members: #{error}"
res.status(500).send error: "Could not get group members #{error}"
createGrouping: (req, res) ->
grouping = new Grouping()
grouping.name = req.body.params.name
grouping.type = req.body.params.type
grouping.region = req.body.params.region
grouping.members = req.body.params.members
grouping.isActive = req.body.params.isActive
if req.body.params.owner?
grouping.owner = req.body.params.owner
grouping.save (err) ->
if err
res.status(500).send error: "Could not create grouping #{err}"
else
res.status(200).send grouping
getGroupings: (req, res) ->
query = {}
if req.query.id?
query._id = req.query.id
page = parseInt "0"
size = parseInt "1"
else
page = parseInt req.query.page or "0"
size = parseInt req.query.size or "50"
if req.query.type?
query.type = req.query.type
Grouping
.find(query)
.sort('name')
.skip(page*size)
.limit(parseInt(size))
.exec()
.then(
(result) ->
res.status(200).send result
)
.catch (
(err) ->
res.status(500).send error: "Could not read grouping(s) #{err}"
)
updateGrouping: (req, res) ->
if req.body.params._id?
Grouping.findById(_id: req.body.params._id).exec()
.then(
(result) ->
grouping = result
grouping.name = req.body.params.name
grouping.type = req.body.params.type
grouping.region = req.body.params.region
grouping.isActive = req.body.params.isActive
grouping.members = req.body.params.members
if req.body.params.owner?
grouping.owner = req.body.params.owner
else
grouping.owner = ''
grouping.save()
.then (
(updated) ->
res.status(200).send updated
)
)
.catch (
(err) ->
res.status(500).send error: "Could not update grouping #{err}"
)
deleteGroupings: (req, res) ->
if req.query.id?
Grouping.findByIdAndRemove(req.query.id).exec()
.then(
(removed) ->
res.status(200).send removed
)
.catch (
(err) ->
res.status(500).send error: "Could not delete grouping #{err}"
)
else
res.status(500).send error: "Could not delete grouping #{err}"
countGroupings: (req, res) ->
Grouping.count().exec()
.then(
(counted) ->
res.status(200).send({count :counted})
)
.catch (
(err) ->
res.status(500).send error: "Could not count groupings #{err}"
)
getGroupingMembers: (req, res) ->
query = {}
query.isActive = true
query.name = req.query.name
Grouping.find(query)
.select('members type -_id')
.then(
(members) ->
res.status(200).send(members)
)
.catch (
(err) ->
res.status(500).send error: "Could not load grouping's member #{err}"
)
organisationTypes: (req, res) ->
Transfer.aggregate(
$match: {}
)
.group(
_id:
organisationType: "$organisationType"
)
.project(
type: "$_id.organisationType", _id: 0
)
.sort("type")
.exec()
.then(
(data) ->
res.send data
(err) -> res.status(500).send("Could not load organisation types (#{err})!")
)
| 58966 | 'use strict'
qfs = require 'q-io/fs'
fs = require 'fs'
config = require('meanio').loadConfig()
mongoose = require 'mongoose'
iconv = require 'iconv-lite'
_ = require 'lodash'
#mongooseWhen = require 'mongoose-when'
Q = require 'q'
#Promise = mongoose.Promise
sorty = require 'sorty'
#iconv.extendNodeEncodings()
Transfer = mongoose.model 'Transfer'
Event = mongoose.model 'Event'
Organisation = mongoose.model 'Organisation'
ZipCode = mongoose.model 'Zipcode'
Grouping = mongoose.model 'Grouping'
regex = /"?(.+?)"?;(\d{4})(\d);(\d{1,2});\d;"?(.+?)"?;(\d+(?:,\d{1,2})?).*/
#returns value for "others" / replaces promise
getTotalAmountOfTransfers = (entries) ->
amounts = (entry.total for entry in entries)
totalAmount = amounts.reduce(((total, num) ->
total + num), 0)
totalAmount
#matches media to federalState (due to lack of grouping)
mediaToFederalState = (mediaResult) ->
uniqueMedia= []
#console.log("Entries in result " +mediaResult.length)
for media in mediaResult
mediaNames = (name.organisation for name in uniqueMedia)
if media.organisation not in mediaNames
uniqueMedia.push(media)
else
# media is already there, add sum to media
#console.log (media.organisation + ' in media names')
for uniqueEntry in uniqueMedia
if uniqueEntry.organisation is media.organisation
#console.log(uniqueEntry.organisation + 'has already ' +uniqueEntry.total)
#console.log("The transfer adds "+ media.total)
uniqueEntry.total += media.total
#console.log("Entry has now " +uniqueEntry.total)
break
#console.log ("Entries after uniqueness: " + uniqueMedia.length)
uniqueMedia
#function for populate
getPopulateInformation = (sourceForPopulate, path) ->
#path: what to look for, select without id
populatePromise = Organisation.populate(sourceForPopulate, {path: path, select: '-_id'})
populatePromise
#Transfer of line to ZipCode
lineToZipCode = (line, numberOfZipCodes) ->
splittedLine = line.split(",")
if splittedLine.length != 2
throw new Error('Upload expects another file format')
#Skip first line
if splittedLine[0] != 'PLZ'
entry = new ZipCode()
entry.zipCode = splittedLine[0]
entry.federalState = splittedLine[1]
entry.save()
numberOfZipCodes++
numberOfZipCodes
# determines org type by name
determineOrganisationType = (organisationName) ->
#public: state (Land), city (Stadt), municipality (Gemeinde)
returnValue = 'Undetermined'
regexCompany = /(.+G(?:es|esellschaft)?\.?m\.?b\.?H\.?.?$)|.*Gesellschaft?.*|.*AG$|.*OG$|.*KG$|(.* d.o.o?.).*|.*s.r.o?.$|.*Sp.? z?.*|.*spol.r.s.o.|.*Sp.z.o.o..*|.* S\.R\.L\.$|.* in Liq.*|.*unternehmung|.*Limited.*|.*AD$|.*S.P.A.*|.*S.P.R.L.|.*Iberica SL|.*likvidaci.*|.*p\.l\.c\./i
regexIncorporatedCompany = /.* AG.*/
regexAssociation = /.*(Verband).*|.*(Verein).*/i
regexFoundation = /.*(Stiftung).*|.*(Holding)/i
regexCity = /^Stadt .+|.*Stadtwerke.*/i
regexMunicipality = /^(?:Markt)?gemeinde?.*|Stadtgemeinde .*|.*Sanitäts.*/i
regexState = /^Land .+/ #Stadt Wien -- provincial
regexMinistry = /^(?:Bundesministerium|Bundeskanzleramt)/
regexAgency = /.*(Bundesamt|Patentamt|Parlamentsdirektion|Präsidentschaftskanzlei|Verfassungsgerichtshof|Volksanwaltschaft|.*Agency.*|Arbeitsmarktservice|Agentur.*)/i #national - public agency
regexFund = /.*Fonds?.*/i
regexChamber = /.*?Kammer?.*/i
regexPolicyRelevant = /^(Alternativregion).*|.*BIFIE|.*FMA|.*Sprengel?.*|^Kleinregion .*|Arbeitsmarktservice|Verwaltungsgerichtshof/i
regexEducation = /.*(Alumni).*|.*(Universit).*|.*(Hochsch).*|.*Mittelschul.*|.*Schul.*|.*Päda.*/i
regexMuseum = /Albertina|.*Museum.*|.*Belvedere.*/i
if organisationName is '<NAME>'
returnValue = 'Federal state'
else if organisationName.match regexCompany
returnValue = 'Company'
else if organisationName.match regexIncorporatedCompany
returnValue = 'Company'
else if organisationName.match regexAssociation
returnValue = 'Association'
else if organisationName.match regexChamber
returnValue = 'Chamber'
else if organisationName.match regexEducation
returnValue = 'Education'
else if organisationName.match regexFoundation
returnValue = 'Foundation'
else if organisationName.match regexMunicipality
returnValue = 'Municipality'
else if organisationName.match regexFund
returnValue = 'Fund'
else if organisationName.match regexPolicyRelevant
returnValue = 'Policy-relevant'
else if organisationName.match regexMinistry
returnValue = 'Ministry'
else if organisationName.match regexCity
returnValue = 'City'
else if organisationName.match regexState
returnValue = 'Federal state'
else if organisationName.match regexAgency
returnValue = 'Agency'
else if organisationName.match regexMuseum
returnValue = 'Museum'
console.log "Undetermined organisation type for: " + organisationName if returnValue is 'undetermined'
returnValue
#Transfer of line to Organisation
lineToOrganisation = (line, feedback) ->
if not feedback
console.log "THIS SHOULD NOT HAPPEN: Supposed to parse line #{line} but got no feedback object!"
splittedLine = line.split(";")
#Skip first and last lines
if splittedLine[0] != 'Bezeichnung des Rechtsträgers' and splittedLine[0] != ''
organisation = new Organisation()
organisation.name = splittedLine[0]
organisation.street = splittedLine[1]
organisation.zipCode = splittedLine[2]
organisation.city_de = splittedLine[3]
organisation.country_de = splittedLine[4]
# Setting the org type
organisation.type = determineOrganisationType splittedLine[0]
ZipCode.findOne({'zipCode': splittedLine[2]})
.then (results) ->
if results and organisation.country_de is 'Österreich'
organisation.federalState = results.federalState
else
organisation.federalState = 'Unknown'
organisation.save()
.then (ok) ->
feedback.entries++
feedback.notAustria++ if organisation.country_de != 'Österreich'
if organisation.federalState is 'Unknown' and organisation.country_de is 'Österreich'
feedback.unknownFederalState++
feedback.unknownFederalStateEntries.push organisation
# Feedback for org type
switch organisation.type
when 'Company' then feedback.organisationTypeCompany++
when 'Association' then feedback.organisationTypeAssociation++
when 'Chamber' then feedback.organisationTypeChamber++
when 'Education' then feedback.organisationTypeEducation++
when 'Foundation' then feedback.organisationTypeFoundation++
when 'Municipality' then feedback.organisationTypeMunicipality++
when 'Fund' then feedback.organisationTypeFund++
when 'Undetermined' then feedback.undeterminedOrganisationType++
when 'Policy-relevant' then feedback.organisationTypePolicyRelevant++
when 'Ministry' then feedback.organisationTypeMinistry++
when 'City' then feedback.organisationTypeCity++
when 'Federal state' then feedback.organisationTypeState++
when 'Agency' then feedback.organisationTypeAgency++
when 'Museum' then feedback.organisationTypeMuseum++
feedback
.catch (err) ->
feedback.errors+=1
feedback.errorEntries.push {organisation: organisation, errorMessage: err.errmsg, errorCode: err.code}
console.log "ERROR: Could not store organisation #{organisation.name}"
feedback
else
feedback.ignoredEntries++;
feedback
lineToTransfer = (line, feedback) ->
if not feedback
console.log "THIS SHOULD NOT HAPPEN: Supposed to parse line #{line} but got no feedback object!"
m = line.match regex
#console.log "Result: #{m} for line #{line}"
if m
transfer = new Transfer()
transfer.organisation = m[1].replace /""/g,'"'
transfer.year = parseInt m[2]
transfer.quarter = parseInt m[3]
transfer.transferType = parseInt m[4]
transfer.media = m[5].replace('""','"').replace(/http:\/\//i,'').replace('www.','').replace(/([\w\.-]+(?:\.at|\.com))/,(m)->m.toLowerCase())
transfer.period = parseInt(m[2] + m[3])
transfer.amount = parseFloat m[6].replace ',', '.'
transfer.organisationType = determineOrganisationType transfer.organisation
#Save reference
Organisation.findOne({ 'name': transfer.organisation }, 'name federalState')
.then (results) ->
if results
transfer.organisationReference = results._id
transfer.federalState = results.federalState
transfer.save()
else
console.log "WARNING: Could not find reference for #{transfer.organisation}!"
Organisation.findOne name: "Unknown"
.then (unknown) ->
if unknown
console.log "Setting org-reference for #{transfer.organisation} to 'Unknown' (#{unknown._id})"
transfer.federalState = 'Unknown'
transfer.organisationReference = unknown._id
unknownOrganisationNames = (org.organisation for org in feedback.unknownOrganisations)
feedback.unknownOrganisations.push {organisation: transfer.organisation} if transfer.organisation not in unknownOrganisationNames
transfer.save()
else
feedback.errors+=1
throw new Error("'Unknown' as placeholder was not found in organisation collection")
.then (ok) ->
feedback.quarter = transfer.quarter
feedback.year = transfer.year
feedback.entries++
feedback.paragraph2++ if transfer.transferType is 2
feedback.paragraph4++ if transfer.transferType is 4
feedback.paragraph31++ if transfer.transferType is 31
feedback.sumParagraph2 += transfer.amount if transfer.transferType is 2
feedback.sumParagraph4 += transfer.amount if transfer.transferType is 4
feedback.sumParagraph31 += transfer.amount if transfer.transferType is 31
feedback.sumTotal += transfer.amount
feedback
.catch (err) ->
feedback.errors+=1
feedback.errorEntries.push {errorMessage: err.errmsg, errorCode: err.code}
console.log "Error while importing data: #{JSON.stringify err}"
feedback
else feedback
mapEvent = (event,req) ->
event.name = req.body.name
event.startDate = req.body.startDate
event.numericStartDate = req.body.numericStartDate
event.endDate = req.body.endDate
event.predictable = req.body.predictable
if req.body.numericEndDate
event.numericEndDate = req.body.numericEndDate
event.tags = req.body.tags
event.region = req.body.region
event
handleGroupings = (groupings, transfers, limit) ->
console.log ("found " + groupings.length + " gropings");
console.log ("found " + transfers.length + " transfers");
transfersWithGrouping = transfers
for grouping in groupings
groupingTransfersAmount = (transfer.total for transfer in transfersWithGrouping when transfer.organisation in grouping.members)
groupingTransfersNames = (transfer.organisation for transfer in transfersWithGrouping when transfer.organisation in grouping.members)
groupingTotalAmount = groupingTransfersAmount.reduce ((total, sum) -> total + sum),0
#console.log("Grouping " + grouping.name + " with the member(s):"
#JSON.stringify(grouping.members)+ " has the sum of " + groupingTotalAmount+ "("+ groupingTransfersAmount.length+" transfer(s))")
#remove ALL transfers (filter) from results
transfersWithGrouping = transfersWithGrouping.filter((transfer) ->
transfer.organisation not in groupingTransfersNames
)
transfersWithGrouping.push({total: groupingTotalAmount, organisation: "(G) " + grouping.name, isGrouping: true})
#console.log( "Group entry added: " + JSON.stringify(transfersWithGrouping[transfersWithGrouping.length-1]))
#Sort array of transfers by total amount
sorty([{name: 'total', dir: 'desc', type: 'number'}], transfersWithGrouping)
transfersWithGrouping.splice(0,limit)
module.exports = (Transparency) ->
overview: (req, res) ->
queryPromise = Transfer.aggregate({$match: {}})
.group(
_id:
quarter: "$quarter"
year: "$year"
transferType: "$transferType"
entries: {$sum: 1}
total:
$sum: "$amount")
.project(quarter: "$_id.quarter", year: "$_id.year", transferType: "$_id.transferType", _id: 0, entries: 1, total: 1)
#.sort('-year -quarter transferType')
.group(
_id:
year: "$year"
quarters:
$addToSet: {quarter: "$quarter", transferType: "$transferType", entries: "$entries", total: "$total"}
total:
$sum: "$total")
.project(year: "$_id.year", _id: 0, quarters: 1, total: 1)
.sort("year")
.exec()
queryPromise.then(
(result) ->
res.send result
(err) ->
res.status(500).send "Could not load overview from Database: #{err}"
)
years: (req, res) ->
queryPromise = Transfer.aggregate($match: {})
.group(_id:
year: "$year")
.project(year: "$_id.year", _id: 0)
.sort("-year")
.exec()
queryPromise.then(
(result) ->
res.send years: result.map (e)->
e.year
(err) ->
res.status(500).send "Could not load overview from Database: #{err}"
)
upload: (req, res) ->
file = req.files.file;
feedback =
quarter: 0
year: 0
entries: 0
paragraph2: 0
sumParagraph2: 0
paragraph4: 0
sumParagraph4: 0
paragraph31: 0
sumParagraph31: 0
sumTotal: 0.0
unknownOrganisations: []
errors: 0
errorEntries: []
#qfs.read(file.path).then(
fs.readFile file.path, (err,data) ->
if err
res.send 500, "Error #{err.message}"
else
input = iconv.decode data,'latin1'
input.split("\n").reduce ((p,line) -> p.then((f) -> lineToTransfer line, f)), Q.fcall(->feedback)
.then (ok) ->
Transfer.count()
.then(
(transfersInDatabase) ->
feedback.savedInDatabase = transfersInDatabase
feedback.integrityCheck = true
res.status(200).send(feedback)
)
.catch (err) ->
res.send 500, "Error #{err.message}"
#Function for the upload of organisation-address-data
uploadOrganisation: (req, res) ->
file = req.files.file;
feedback =
entries: 0
ignoredEntries: 0
unknownFederalState: 0,
unknownFederalStateEntries: [],
undeterminedOrganisationType: 0,
organisationTypeCompany: 0,
organisationTypeAssociation: 0,
organisationTypeFoundation: 0,
organisationTypeMunicipality: 0,
organisationTypeState: 0,
organisationTypeCity: 0,
organisationTypeMinistry: 0,
organisationTypeAgency: 0,
organisationTypeFund: 0,
organisationTypeChamber: 0,
organisationTypePolicyRelevant: 0,
organisationTypeEducation: 0,
organisationTypeMuseum: 0,
notAustria: 0,
errors:0
errorEntries: []
fs.readFile file.path, (err,data) ->
if err
res.status(500).send("Error #{err.message}")
else
input = iconv.decode data, 'utf8'
input.split("\n").reduce ((p,line) -> p.then((f) -> lineToOrganisation line, f)), Q.fcall(->feedback)
.then (ok) ->
Organisation.count()
.then(
(organisationsInDatabase) ->
feedback.savedInDatabase = organisationsInDatabase
feedback.integrityCheck = true
res.status(200).send(feedback)
)
.catch (err) ->
res.send 500, "Error #{err.message}"
#Function for the upload of organisation-address-data
uploadZipCode: (req, res) ->
file = req.files.file;
response =
newZipCodes: 0
integrityCheck: false
savedInDatabase: 0
fs.readFile file.path, (err,data) ->
if err
res.status(500).send("Error #{err.message}")
else
input = iconv.decode data, 'utf8'
response.newZipCodes = lineToZipCode(line,response.newZipCodes) for line in input.split('\n')
ZipCode.count()
.then(
(codesInDatabase) ->
response.savedInDatabase = codesInDatabase
response.integrityCheck = true
res.status(200).send(response)
(error) ->
res.send 500, "Error #{error}"
)
periods: (req, res) ->
Transfer.aggregate(
$match: {}
)
.group(
_id:
year: "$year", quarter: "$quarter", period: "$period"
)
.project(
year: "$_id.year", quarter: "$_id.quarter", period: "$_id.period", _id: 0
)
.sort("-year -quarter")
.exec()
.then(
(data) ->
res.send data
(err) -> res.status(500).send("Could not load periods (#{err})!")
)
filteredflows: (req, res) ->
getOtherMedia = (organisations, media, period, paymentTypes, federalState) ->
result = []
if (organisations and organisations.length > 0) and (media and media.length > 0)
qry = {}
(qry.transferType = $in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(qry.organisation = $in: organisations) if organisations.length > 0
(qry.media = $nin: media) if media.length > 0
if period.$gte? or period.$lte?
qry.period = period
grp =
_id:
organisation: "$organisation"
organisationReference: "$organisationReference"
transferType: "$transferType"
amount:
$sum: "$amount"
Transfer.aggregate($match: qry)
.group grp
.exec()
.then (rslt) ->
for data in rslt
result.push {
amount: data.amount,
organisation: data._id.organisation,
transferType: data._id.transferType,
media: "Other media"
}
result
else
new Promise (resolve, reject) ->
resolve result
getOtherOrganisations = (organisations, media, period, paymentTypes, federalState) ->
result = []
if (media and media.length > 0) and (organisations and organisations.length > 0)
qry = {}
(qry.transferType = $in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(qry.organisation = $nin: organisations) if organisations.length > 0
(qry.media = $in: media) if media.length > 0
if period.$gte? or period.$lte?
qry.period = period
grp =
_id:
media: "$media"
transferType: "$transferType"
amount:
$sum: "$amount"
Transfer.aggregate($match: qry)
.group grp
.exec()
.then (rslt) ->
for data in rslt
result.push {
amount: data.amount,
media: data._id.media,
transferType: data._id.transferType,
organisation: "Other organisations"
}
result
else
new Promise (resolve, reject) ->
resolve result
try
maxLength = parseInt req.query.maxLength or "750"
federalState = req.query.federalState or ''
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
organisations = req.query.organisations or []
organisations = [organisations] if organisations not instanceof Array
media = req.query.media or []
media = [media] if media not instanceof Array
(query.organisation = $in: organisations) if organisations.length > 0
(query.media = $in: media) if media.length > 0
if period.$gte? or period.$lte?
query.period = period
group =
_id:
organisation: "$organisation"
organisationReference: "$organisationReference"
transferType: "$transferType"
media: "$media"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
organisation: "$_id.organisation",
organisationReference: "$_id.organisationReference",
transferType: "$_id.transferType",
media: "$_id.media"
_id: 0
amount: 1
)
.exec()
.then (result) ->
populatedPromise = getPopulateInformation(result, 'organisationReference')
.then(
(isPopulated) ->
if federalState
result = (transfer for transfer in result when transfer.organisationReference.federalState_en is federalState)
getOtherMedia(organisations, media, period, paymentTypes, "").then (otherMedia) ->
result = result.concat otherMedia
getOtherOrganisations(organisations, media, period, paymentTypes, "").then (otherOrganisations) ->
result = result.concat otherOrganisations
if result.length > maxLength
res.status(413).send {
error: "You query returns more then the specified maximum of #{maxLength}"
length: result.length
}
else
res.json result
)
.catch (err) ->
res.status(500).send error: "Could not load money flow: #{err}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
flowdetail: (req, res) ->
try
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
source = req.query.source or []
target = req.query.target or []
source = [source] if source not instanceof Array
target = [target] if target not instanceof Array
query = {}
if source.length > 0 then query.organisation = $in: source;
if target.length > 0 then query.media = $in: target;
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
Transfer.find query, {}, {sort: {year: 1, quarter: 1}}, (err, transfers) ->
result = {
data:
{
key: "<KEY>"
values: []
}
tickvalues: []
}
i = 0
tmpObj = {
}
#find all years
Transfer.distinct 'year', (error, data) ->
if !error
years = data
years.sort()
tmpResult = {}
tickvalues = []
for year in years
for quarter in [0...4]
tmpObj[year + (quarter/4)] = 0
tickvalues.push (year + (quarter/4))
tickvalues.sort()
for transfer in transfers
tmpObj[""+ (transfer.year + (transfer.quarter-1)/4)] += transfer.amount
result.tickvalues = tickvalues
for tickvalue in tickvalues
result.data.values.push [tickvalue, tmpObj[tickvalue]]
res.json result
else
res.status 500
.send "Could not load years from database! #{error}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
annualcomparison: (req, res) ->
try
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
source = req.query.source or []
target = req.query.target or []
source = [source] if source not instanceof Array
target = [target] if target not instanceof Array
query = {}
if source.length > 0 then query.organisation = $in: source;
if target.length > 0 then query.media = $in: target;
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
years = []
#find all years
Transfer.distinct 'year', (error, data) ->
if !error
years = data
years.sort()
tmpResult = {}
for year in years
tmpResult[""+year] = {
quarters: {
'1': 0
'2': 0
'3': 0
'4': 0
}
}
else
res.status 500
.send "Could not load years from database! #{error}"
Transfer.find query, {}, {sort: {year: 1, quarter: 1}, transferType: 1}, (err, transfers) ->
for transfer in transfers
tmpResult[""+transfer.year].quarters[""+transfer.quarter] += transfer.amount
result = []
for year, quarters of tmpResult
quarterArr = []
for quarter, amount of quarters.quarters
quarterArr.push {
x: (Number(quarter)-1)/4
y: amount
}
result.push {
key: year
color: '#'+(Math.random()*0xFFFFFF<<0).toString(16)
values: quarterArr
}
res.json result
catch error
res.status(500).send error: "Could not load money flow: #{error}"
flows: (req, res) ->
try
maxLength = parseInt req.query.maxLength or "750"
federalState = req.query.federalState if req.query.federalState
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
orgType = req.query.orgType or 'org'
name = req.query.name
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
query[if orgType is 'org' then 'organisation' else 'media'] = name if name
if period.$gte? or period.$lte?
query.period = period
if req.query.filter
filter = req.query.filter
query.$or = [
{organisation: { $regex: ".*#{filter}.*", $options: "i"}}
{media: { $regex: ".*#{filter}.*", $options: "i"}}
]
if federalState?
query.federalState = federalState
group =
_id:
organisation: "$organisation"
transferType: "$transferType"
media: "$media"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
organisation: "$_id.organisation",
transferType: "$_id.transferType",
media: "$_id.media"
_id: 0
amount: 1
)
.exec()
.then (result) ->
if result.length > maxLength
res.status(413).send {
error: "You query returns more then the specified maximum of #{maxLength}"
length: result.length
}
else
res.json result
.catch (err) ->
res.status(500).send error: "Could not load money flow: #{err}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
topEntries: (req, res) ->
promiseToFullfill = []
federalState = req.query.federalState if req.query.federalState
includeGroupings = req.query.groupings if req.query.groupings
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
orgType = req.query.orgType or 'org'
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
limitOfResults = parseInt(req.query.x or '10')
orgCategories = req.query.orgCategories if req.query.orgCategories
orgCategories = [orgCategories] if orgCategories not instanceof Array and req.query.orgCategories
query = {}
project =
organisation: '$_id.organisation'
_id: 0
total: 1
if period.$gte? or period.$lte?
query.period = period
query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)
if federalState?
query.federalState = federalState
if orgCategories?
query.organisationType =
$in: orgCategories
group =
_id:
organisation: if orgType is 'org' then '$organisation' else '$media',
total:
$sum: '$amount'
options = {}
options.map = () ->
emit this.year, this.amount
options.reduce = (key, vals) ->
Array.sum vals
options.query = query
#console.log "Query: "
#console.log query
#console.log "Group: "
#console.log group
#console.log "Project: "
#console.log project
if not includeGroupings
topPromise = Transfer.aggregate($match: query)
.group(group)
.sort('-total')
.limit(limitOfResults)
.project(project)
.exec()
else
topPromise = Transfer.aggregate($match: query)
.group(group)
.sort('-total')
.project(project)
.exec()
promiseToFullfill.push(topPromise)
allPromise = Transfer.mapReduce options
promiseToFullfill.push allPromise
if includeGroupings
groupingQuery = {}
groupingQuery.isActive = true
groupingQuery.type = orgType
groupingQuery.region = if federalState then federalState else 'AT'
groupingsPromise = Grouping.find(groupingQuery)
.select('name owner members -_id')
.exec()
promiseToFullfill.push(groupingsPromise)
allPromise.then (r) ->
Q.all(promiseToFullfill)
.then (results) ->
try
result =
top: results[0]
all: results[1].reduce(
(sum, v)->
sum + v.value
0)
groupings: results[2] if results[2]
if result.groupings?
result.top = handleGroupings(result.groupings, result.top, limitOfResults)
res.send result
catch error
console.log error
res.status(500).send("No Data was found!")
.catch (err) ->
console.log "Error in Promise.when"
console.log err
res.status(500).send("Error #{err.message}")
search: (req,res) ->
name = req.query.name
federalState = req.query.federalState if req.query.federalState
if not name
res.status(400).send error: "'name' is required!"
return
types = if req.query.orgType then [req.query.orgType] else ['org','media']
buildRegex = (name,value) ->
q={}
q[name]= { $regex: ".*#{value}.*", $options: "i"}
q
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
group =
_id:
name: "$#{nameField}"
type: orgType
years:
$addToSet: "$year"
total: $sum: "$amount"
transferTypes: $addToSet: "$transferType"
project =
name: '$_id.name'
_id: 0
years: 1
total: 1
transferTypes: 1
if orgType is 'org'
group._id.organisationType = '$organisationType'
group._id.federalState = '$federalState'
project.organisationType = '$_id.organisationType'
project.federalState = '$_id.federalState'
$or = name.split(' ').reduce ((a,n)-> q={};a.push buildRegex(nameField,n);a) ,[]
if not federalState
query = $or: $or
else
query = $and: $or
query.$and.push {"federalState": federalState}
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name')
.exec()
all = Q.all types.map (t) ->
performQuery t
all.then (results) ->
result = types.reduce ((r,t,index) -> r[t] = results[index];r),{}
res.json result
.catch (err) ->
res.status(500).send error: "Could not perform search"
list: (req,res) ->
types = if req.query.orgType then [req.query.orgType] else ['org','media']
page = parseInt req.query.page or "0"
size = parseInt req.query.size or "50"
federalState = req.query.federalState
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
query = {}
if federalState?
query.federalState = federalState
project ={}
project =
name: '$_id.name'
_id: 0
years: 1
total: 1
transferTypes: 1
group =
_id:
name: "$#{nameField}"
type: orgType
years:
$addToSet: "$year"
total: $sum: "$amount"
transferTypes: $addToSet: "$transferType"
if orgType is 'org'
group._id.organisationType= '$organisationType'
project.organisationType = '$_id.organisationType'
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name').skip(page*size).limit(size)
.exec()
all = Q.all types.map (t) ->
performQuery t
all.then (results) ->
result = types.reduce ((r,t,index) -> r[t] = results[index];r),{}
res.json result
.catch (err) ->
res.status(500).send error: "Could not perform search #{err}"
count: (req,res) ->
type = req.query.orgType or 'org'
federalState = req.query.federalState if req.query.federalState
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
query = {}
group =
_id:
name: "$#{nameField}"
if federalState
query.federalState = federalState
group._id.federalState = federalState
Transfer.aggregate($match: query)
.group(group)
.exec()
performQuery(type)
.then (result) ->
res.json result.length
.catch (err) ->
res.status(500).send error: "Could not determine number of items #{err}"
getEvents: (req,res) ->
handleEventResponse = (err, data) ->
if err
res.status(500).send error: "Could not get events #{err}"
else if !data or data.length is 0
res.status(404).send()
else
res.json data
#todo: insert parameter checking
if req.query.region
Event.find {region: req.query.region}, handleEventResponse
else if req.query.id
Event.findById req.query.id, handleEventResponse
else
Event.find {}, handleEventResponse
createEvent: (req,res) ->
#todo: insert parameter checking
event = new Event()
event = mapEvent event, req
event.save (err) ->
if err
res.status(500).send error: "Could not create event #{err}"
else
res.json event
updateEvent: (req, res) ->
#todo: insert parameter checking
Event.findById req.body._id, (err, data) ->
if err
res.status(500).send error: "Could not update event #{err}"
if !data or data.length is 0
res.status(500).send error: "Could not find event #{req.body._id}"
else
event = mapEvent data, req
event.save (err) ->
if err
res.status(500).send error: "Could not create event #{err}"
else
res.json event
deleteEvent: (req, res) ->
#todo: insert parameter checking
Event.findById {_id: req.query.id}, (err, data) ->
if err
res.status(500).send error: "Could not find event #{err}"
data.remove (removeErr) ->
if removeErr
res.status(500).send error: "Could not delete event #{removeErr}"
res.json data
getEventTags: (req, res) ->
Event.find {}, (err, events) ->
if err
res.status(500).send error "Could not load events #{err}"
result = []
for event in events
if event.tags
Array.prototype.push.apply result, event.tags
res.json Array.from(new Set(result))
federalstates: (req, res) ->
try
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
organisationTypes = req.query.orgTypes or []
organisationTypes = [organisationTypes] if organisationTypes not instanceof Array
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(query.organisationType =
$in: organisationTypes.map (e)->
(e)) if organisationTypes.length > 0
if period.$gte? or period.$lte?
query.period = period
group =
_id:
federalState: "$federalState"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
federalState: "$_id.federalState",
_id: 0
amount: 1
)
.sort('federalState')
.exec()
.then (result) ->
for i in [1...10]
found = false
for r in result
found = false
if r.federalState is "AT-" + i
found = true
break
if !found
result.push {
amount: 0,
federalState: "AT-" + i
}
res.status(200).send result
.catch (error) ->
console.log "Error query data for map: #{error}"
res.status(500).send error: "Could not get data for map #{error}"
catch error
console.log error
res.status(500).send("Error with query for map")
#Grouping
getPossibleGroupMembers: (req, res) ->
type = req.query.orgType or 'org'
nameField = if type is 'org' then 'organisation' else 'media'
query = {}
project =
name: '$_id.name'
_id: 0
group =
_id:
name: "$#{nameField}"
if type is 'org'
group._id.federalState = '$federalState'
project.federalState = '$_id.federalState'
#console.log 'Query:'
#console.log query
#console.log 'Group'
#console.log group
#console.log 'Project'
#console.log project
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name')
.exec()
.then (result) ->
res.status(200).send result
.catch (error) ->
console.log "Error query possible group members: #{error}"
res.status(500).send error: "Could not get group members #{error}"
createGrouping: (req, res) ->
grouping = new Grouping()
grouping.name = req.body.params.name
grouping.type = req.body.params.type
grouping.region = req.body.params.region
grouping.members = req.body.params.members
grouping.isActive = req.body.params.isActive
if req.body.params.owner?
grouping.owner = req.body.params.owner
grouping.save (err) ->
if err
res.status(500).send error: "Could not create grouping #{err}"
else
res.status(200).send grouping
getGroupings: (req, res) ->
query = {}
if req.query.id?
query._id = req.query.id
page = parseInt "0"
size = parseInt "1"
else
page = parseInt req.query.page or "0"
size = parseInt req.query.size or "50"
if req.query.type?
query.type = req.query.type
Grouping
.find(query)
.sort('name')
.skip(page*size)
.limit(parseInt(size))
.exec()
.then(
(result) ->
res.status(200).send result
)
.catch (
(err) ->
res.status(500).send error: "Could not read grouping(s) #{err}"
)
updateGrouping: (req, res) ->
if req.body.params._id?
Grouping.findById(_id: req.body.params._id).exec()
.then(
(result) ->
grouping = result
grouping.name = req.body.params.name
grouping.type = req.body.params.type
grouping.region = req.body.params.region
grouping.isActive = req.body.params.isActive
grouping.members = req.body.params.members
if req.body.params.owner?
grouping.owner = req.body.params.owner
else
grouping.owner = ''
grouping.save()
.then (
(updated) ->
res.status(200).send updated
)
)
.catch (
(err) ->
res.status(500).send error: "Could not update grouping #{err}"
)
deleteGroupings: (req, res) ->
if req.query.id?
Grouping.findByIdAndRemove(req.query.id).exec()
.then(
(removed) ->
res.status(200).send removed
)
.catch (
(err) ->
res.status(500).send error: "Could not delete grouping #{err}"
)
else
res.status(500).send error: "Could not delete grouping #{err}"
countGroupings: (req, res) ->
Grouping.count().exec()
.then(
(counted) ->
res.status(200).send({count :counted})
)
.catch (
(err) ->
res.status(500).send error: "Could not count groupings #{err}"
)
getGroupingMembers: (req, res) ->
query = {}
query.isActive = true
query.name = req.query.name
Grouping.find(query)
.select('members type -_id')
.then(
(members) ->
res.status(200).send(members)
)
.catch (
(err) ->
res.status(500).send error: "Could not load grouping's member #{err}"
)
organisationTypes: (req, res) ->
Transfer.aggregate(
$match: {}
)
.group(
_id:
organisationType: "$organisationType"
)
.project(
type: "$_id.organisationType", _id: 0
)
.sort("type")
.exec()
.then(
(data) ->
res.send data
(err) -> res.status(500).send("Could not load organisation types (#{err})!")
)
| true | 'use strict'
qfs = require 'q-io/fs'
fs = require 'fs'
config = require('meanio').loadConfig()
mongoose = require 'mongoose'
iconv = require 'iconv-lite'
_ = require 'lodash'
#mongooseWhen = require 'mongoose-when'
Q = require 'q'
#Promise = mongoose.Promise
sorty = require 'sorty'
#iconv.extendNodeEncodings()
Transfer = mongoose.model 'Transfer'
Event = mongoose.model 'Event'
Organisation = mongoose.model 'Organisation'
ZipCode = mongoose.model 'Zipcode'
Grouping = mongoose.model 'Grouping'
regex = /"?(.+?)"?;(\d{4})(\d);(\d{1,2});\d;"?(.+?)"?;(\d+(?:,\d{1,2})?).*/
#returns value for "others" / replaces promise
getTotalAmountOfTransfers = (entries) ->
amounts = (entry.total for entry in entries)
totalAmount = amounts.reduce(((total, num) ->
total + num), 0)
totalAmount
#matches media to federalState (due to lack of grouping)
mediaToFederalState = (mediaResult) ->
uniqueMedia= []
#console.log("Entries in result " +mediaResult.length)
for media in mediaResult
mediaNames = (name.organisation for name in uniqueMedia)
if media.organisation not in mediaNames
uniqueMedia.push(media)
else
# media is already there, add sum to media
#console.log (media.organisation + ' in media names')
for uniqueEntry in uniqueMedia
if uniqueEntry.organisation is media.organisation
#console.log(uniqueEntry.organisation + 'has already ' +uniqueEntry.total)
#console.log("The transfer adds "+ media.total)
uniqueEntry.total += media.total
#console.log("Entry has now " +uniqueEntry.total)
break
#console.log ("Entries after uniqueness: " + uniqueMedia.length)
uniqueMedia
#function for populate
getPopulateInformation = (sourceForPopulate, path) ->
#path: what to look for, select without id
populatePromise = Organisation.populate(sourceForPopulate, {path: path, select: '-_id'})
populatePromise
#Transfer of line to ZipCode
lineToZipCode = (line, numberOfZipCodes) ->
splittedLine = line.split(",")
if splittedLine.length != 2
throw new Error('Upload expects another file format')
#Skip first line
if splittedLine[0] != 'PLZ'
entry = new ZipCode()
entry.zipCode = splittedLine[0]
entry.federalState = splittedLine[1]
entry.save()
numberOfZipCodes++
numberOfZipCodes
# determines org type by name
determineOrganisationType = (organisationName) ->
#public: state (Land), city (Stadt), municipality (Gemeinde)
returnValue = 'Undetermined'
regexCompany = /(.+G(?:es|esellschaft)?\.?m\.?b\.?H\.?.?$)|.*Gesellschaft?.*|.*AG$|.*OG$|.*KG$|(.* d.o.o?.).*|.*s.r.o?.$|.*Sp.? z?.*|.*spol.r.s.o.|.*Sp.z.o.o..*|.* S\.R\.L\.$|.* in Liq.*|.*unternehmung|.*Limited.*|.*AD$|.*S.P.A.*|.*S.P.R.L.|.*Iberica SL|.*likvidaci.*|.*p\.l\.c\./i
regexIncorporatedCompany = /.* AG.*/
regexAssociation = /.*(Verband).*|.*(Verein).*/i
regexFoundation = /.*(Stiftung).*|.*(Holding)/i
regexCity = /^Stadt .+|.*Stadtwerke.*/i
regexMunicipality = /^(?:Markt)?gemeinde?.*|Stadtgemeinde .*|.*Sanitäts.*/i
regexState = /^Land .+/ #Stadt Wien -- provincial
regexMinistry = /^(?:Bundesministerium|Bundeskanzleramt)/
regexAgency = /.*(Bundesamt|Patentamt|Parlamentsdirektion|Präsidentschaftskanzlei|Verfassungsgerichtshof|Volksanwaltschaft|.*Agency.*|Arbeitsmarktservice|Agentur.*)/i #national - public agency
regexFund = /.*Fonds?.*/i
regexChamber = /.*?Kammer?.*/i
regexPolicyRelevant = /^(Alternativregion).*|.*BIFIE|.*FMA|.*Sprengel?.*|^Kleinregion .*|Arbeitsmarktservice|Verwaltungsgerichtshof/i
regexEducation = /.*(Alumni).*|.*(Universit).*|.*(Hochsch).*|.*Mittelschul.*|.*Schul.*|.*Päda.*/i
regexMuseum = /Albertina|.*Museum.*|.*Belvedere.*/i
if organisationName is 'PI:NAME:<NAME>END_PI'
returnValue = 'Federal state'
else if organisationName.match regexCompany
returnValue = 'Company'
else if organisationName.match regexIncorporatedCompany
returnValue = 'Company'
else if organisationName.match regexAssociation
returnValue = 'Association'
else if organisationName.match regexChamber
returnValue = 'Chamber'
else if organisationName.match regexEducation
returnValue = 'Education'
else if organisationName.match regexFoundation
returnValue = 'Foundation'
else if organisationName.match regexMunicipality
returnValue = 'Municipality'
else if organisationName.match regexFund
returnValue = 'Fund'
else if organisationName.match regexPolicyRelevant
returnValue = 'Policy-relevant'
else if organisationName.match regexMinistry
returnValue = 'Ministry'
else if organisationName.match regexCity
returnValue = 'City'
else if organisationName.match regexState
returnValue = 'Federal state'
else if organisationName.match regexAgency
returnValue = 'Agency'
else if organisationName.match regexMuseum
returnValue = 'Museum'
console.log "Undetermined organisation type for: " + organisationName if returnValue is 'undetermined'
returnValue
#Transfer of line to Organisation
lineToOrganisation = (line, feedback) ->
if not feedback
console.log "THIS SHOULD NOT HAPPEN: Supposed to parse line #{line} but got no feedback object!"
splittedLine = line.split(";")
#Skip first and last lines
if splittedLine[0] != 'Bezeichnung des Rechtsträgers' and splittedLine[0] != ''
organisation = new Organisation()
organisation.name = splittedLine[0]
organisation.street = splittedLine[1]
organisation.zipCode = splittedLine[2]
organisation.city_de = splittedLine[3]
organisation.country_de = splittedLine[4]
# Setting the org type
organisation.type = determineOrganisationType splittedLine[0]
ZipCode.findOne({'zipCode': splittedLine[2]})
.then (results) ->
if results and organisation.country_de is 'Österreich'
organisation.federalState = results.federalState
else
organisation.federalState = 'Unknown'
organisation.save()
.then (ok) ->
feedback.entries++
feedback.notAustria++ if organisation.country_de != 'Österreich'
if organisation.federalState is 'Unknown' and organisation.country_de is 'Österreich'
feedback.unknownFederalState++
feedback.unknownFederalStateEntries.push organisation
# Feedback for org type
switch organisation.type
when 'Company' then feedback.organisationTypeCompany++
when 'Association' then feedback.organisationTypeAssociation++
when 'Chamber' then feedback.organisationTypeChamber++
when 'Education' then feedback.organisationTypeEducation++
when 'Foundation' then feedback.organisationTypeFoundation++
when 'Municipality' then feedback.organisationTypeMunicipality++
when 'Fund' then feedback.organisationTypeFund++
when 'Undetermined' then feedback.undeterminedOrganisationType++
when 'Policy-relevant' then feedback.organisationTypePolicyRelevant++
when 'Ministry' then feedback.organisationTypeMinistry++
when 'City' then feedback.organisationTypeCity++
when 'Federal state' then feedback.organisationTypeState++
when 'Agency' then feedback.organisationTypeAgency++
when 'Museum' then feedback.organisationTypeMuseum++
feedback
.catch (err) ->
feedback.errors+=1
feedback.errorEntries.push {organisation: organisation, errorMessage: err.errmsg, errorCode: err.code}
console.log "ERROR: Could not store organisation #{organisation.name}"
feedback
else
feedback.ignoredEntries++;
feedback
lineToTransfer = (line, feedback) ->
if not feedback
console.log "THIS SHOULD NOT HAPPEN: Supposed to parse line #{line} but got no feedback object!"
m = line.match regex
#console.log "Result: #{m} for line #{line}"
if m
transfer = new Transfer()
transfer.organisation = m[1].replace /""/g,'"'
transfer.year = parseInt m[2]
transfer.quarter = parseInt m[3]
transfer.transferType = parseInt m[4]
transfer.media = m[5].replace('""','"').replace(/http:\/\//i,'').replace('www.','').replace(/([\w\.-]+(?:\.at|\.com))/,(m)->m.toLowerCase())
transfer.period = parseInt(m[2] + m[3])
transfer.amount = parseFloat m[6].replace ',', '.'
transfer.organisationType = determineOrganisationType transfer.organisation
#Save reference
Organisation.findOne({ 'name': transfer.organisation }, 'name federalState')
.then (results) ->
if results
transfer.organisationReference = results._id
transfer.federalState = results.federalState
transfer.save()
else
console.log "WARNING: Could not find reference for #{transfer.organisation}!"
Organisation.findOne name: "Unknown"
.then (unknown) ->
if unknown
console.log "Setting org-reference for #{transfer.organisation} to 'Unknown' (#{unknown._id})"
transfer.federalState = 'Unknown'
transfer.organisationReference = unknown._id
unknownOrganisationNames = (org.organisation for org in feedback.unknownOrganisations)
feedback.unknownOrganisations.push {organisation: transfer.organisation} if transfer.organisation not in unknownOrganisationNames
transfer.save()
else
feedback.errors+=1
throw new Error("'Unknown' as placeholder was not found in organisation collection")
.then (ok) ->
feedback.quarter = transfer.quarter
feedback.year = transfer.year
feedback.entries++
feedback.paragraph2++ if transfer.transferType is 2
feedback.paragraph4++ if transfer.transferType is 4
feedback.paragraph31++ if transfer.transferType is 31
feedback.sumParagraph2 += transfer.amount if transfer.transferType is 2
feedback.sumParagraph4 += transfer.amount if transfer.transferType is 4
feedback.sumParagraph31 += transfer.amount if transfer.transferType is 31
feedback.sumTotal += transfer.amount
feedback
.catch (err) ->
feedback.errors+=1
feedback.errorEntries.push {errorMessage: err.errmsg, errorCode: err.code}
console.log "Error while importing data: #{JSON.stringify err}"
feedback
else feedback
mapEvent = (event,req) ->
event.name = req.body.name
event.startDate = req.body.startDate
event.numericStartDate = req.body.numericStartDate
event.endDate = req.body.endDate
event.predictable = req.body.predictable
if req.body.numericEndDate
event.numericEndDate = req.body.numericEndDate
event.tags = req.body.tags
event.region = req.body.region
event
handleGroupings = (groupings, transfers, limit) ->
console.log ("found " + groupings.length + " gropings");
console.log ("found " + transfers.length + " transfers");
transfersWithGrouping = transfers
for grouping in groupings
groupingTransfersAmount = (transfer.total for transfer in transfersWithGrouping when transfer.organisation in grouping.members)
groupingTransfersNames = (transfer.organisation for transfer in transfersWithGrouping when transfer.organisation in grouping.members)
groupingTotalAmount = groupingTransfersAmount.reduce ((total, sum) -> total + sum),0
#console.log("Grouping " + grouping.name + " with the member(s):"
#JSON.stringify(grouping.members)+ " has the sum of " + groupingTotalAmount+ "("+ groupingTransfersAmount.length+" transfer(s))")
#remove ALL transfers (filter) from results
transfersWithGrouping = transfersWithGrouping.filter((transfer) ->
transfer.organisation not in groupingTransfersNames
)
transfersWithGrouping.push({total: groupingTotalAmount, organisation: "(G) " + grouping.name, isGrouping: true})
#console.log( "Group entry added: " + JSON.stringify(transfersWithGrouping[transfersWithGrouping.length-1]))
#Sort array of transfers by total amount
sorty([{name: 'total', dir: 'desc', type: 'number'}], transfersWithGrouping)
transfersWithGrouping.splice(0,limit)
module.exports = (Transparency) ->
overview: (req, res) ->
queryPromise = Transfer.aggregate({$match: {}})
.group(
_id:
quarter: "$quarter"
year: "$year"
transferType: "$transferType"
entries: {$sum: 1}
total:
$sum: "$amount")
.project(quarter: "$_id.quarter", year: "$_id.year", transferType: "$_id.transferType", _id: 0, entries: 1, total: 1)
#.sort('-year -quarter transferType')
.group(
_id:
year: "$year"
quarters:
$addToSet: {quarter: "$quarter", transferType: "$transferType", entries: "$entries", total: "$total"}
total:
$sum: "$total")
.project(year: "$_id.year", _id: 0, quarters: 1, total: 1)
.sort("year")
.exec()
queryPromise.then(
(result) ->
res.send result
(err) ->
res.status(500).send "Could not load overview from Database: #{err}"
)
years: (req, res) ->
queryPromise = Transfer.aggregate($match: {})
.group(_id:
year: "$year")
.project(year: "$_id.year", _id: 0)
.sort("-year")
.exec()
queryPromise.then(
(result) ->
res.send years: result.map (e)->
e.year
(err) ->
res.status(500).send "Could not load overview from Database: #{err}"
)
upload: (req, res) ->
file = req.files.file;
feedback =
quarter: 0
year: 0
entries: 0
paragraph2: 0
sumParagraph2: 0
paragraph4: 0
sumParagraph4: 0
paragraph31: 0
sumParagraph31: 0
sumTotal: 0.0
unknownOrganisations: []
errors: 0
errorEntries: []
#qfs.read(file.path).then(
fs.readFile file.path, (err,data) ->
if err
res.send 500, "Error #{err.message}"
else
input = iconv.decode data,'latin1'
input.split("\n").reduce ((p,line) -> p.then((f) -> lineToTransfer line, f)), Q.fcall(->feedback)
.then (ok) ->
Transfer.count()
.then(
(transfersInDatabase) ->
feedback.savedInDatabase = transfersInDatabase
feedback.integrityCheck = true
res.status(200).send(feedback)
)
.catch (err) ->
res.send 500, "Error #{err.message}"
#Function for the upload of organisation-address-data
uploadOrganisation: (req, res) ->
file = req.files.file;
feedback =
entries: 0
ignoredEntries: 0
unknownFederalState: 0,
unknownFederalStateEntries: [],
undeterminedOrganisationType: 0,
organisationTypeCompany: 0,
organisationTypeAssociation: 0,
organisationTypeFoundation: 0,
organisationTypeMunicipality: 0,
organisationTypeState: 0,
organisationTypeCity: 0,
organisationTypeMinistry: 0,
organisationTypeAgency: 0,
organisationTypeFund: 0,
organisationTypeChamber: 0,
organisationTypePolicyRelevant: 0,
organisationTypeEducation: 0,
organisationTypeMuseum: 0,
notAustria: 0,
errors:0
errorEntries: []
fs.readFile file.path, (err,data) ->
if err
res.status(500).send("Error #{err.message}")
else
input = iconv.decode data, 'utf8'
input.split("\n").reduce ((p,line) -> p.then((f) -> lineToOrganisation line, f)), Q.fcall(->feedback)
.then (ok) ->
Organisation.count()
.then(
(organisationsInDatabase) ->
feedback.savedInDatabase = organisationsInDatabase
feedback.integrityCheck = true
res.status(200).send(feedback)
)
.catch (err) ->
res.send 500, "Error #{err.message}"
#Function for the upload of organisation-address-data
uploadZipCode: (req, res) ->
file = req.files.file;
response =
newZipCodes: 0
integrityCheck: false
savedInDatabase: 0
fs.readFile file.path, (err,data) ->
if err
res.status(500).send("Error #{err.message}")
else
input = iconv.decode data, 'utf8'
response.newZipCodes = lineToZipCode(line,response.newZipCodes) for line in input.split('\n')
ZipCode.count()
.then(
(codesInDatabase) ->
response.savedInDatabase = codesInDatabase
response.integrityCheck = true
res.status(200).send(response)
(error) ->
res.send 500, "Error #{error}"
)
periods: (req, res) ->
Transfer.aggregate(
$match: {}
)
.group(
_id:
year: "$year", quarter: "$quarter", period: "$period"
)
.project(
year: "$_id.year", quarter: "$_id.quarter", period: "$_id.period", _id: 0
)
.sort("-year -quarter")
.exec()
.then(
(data) ->
res.send data
(err) -> res.status(500).send("Could not load periods (#{err})!")
)
filteredflows: (req, res) ->
getOtherMedia = (organisations, media, period, paymentTypes, federalState) ->
result = []
if (organisations and organisations.length > 0) and (media and media.length > 0)
qry = {}
(qry.transferType = $in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(qry.organisation = $in: organisations) if organisations.length > 0
(qry.media = $nin: media) if media.length > 0
if period.$gte? or period.$lte?
qry.period = period
grp =
_id:
organisation: "$organisation"
organisationReference: "$organisationReference"
transferType: "$transferType"
amount:
$sum: "$amount"
Transfer.aggregate($match: qry)
.group grp
.exec()
.then (rslt) ->
for data in rslt
result.push {
amount: data.amount,
organisation: data._id.organisation,
transferType: data._id.transferType,
media: "Other media"
}
result
else
new Promise (resolve, reject) ->
resolve result
getOtherOrganisations = (organisations, media, period, paymentTypes, federalState) ->
result = []
if (media and media.length > 0) and (organisations and organisations.length > 0)
qry = {}
(qry.transferType = $in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(qry.organisation = $nin: organisations) if organisations.length > 0
(qry.media = $in: media) if media.length > 0
if period.$gte? or period.$lte?
qry.period = period
grp =
_id:
media: "$media"
transferType: "$transferType"
amount:
$sum: "$amount"
Transfer.aggregate($match: qry)
.group grp
.exec()
.then (rslt) ->
for data in rslt
result.push {
amount: data.amount,
media: data._id.media,
transferType: data._id.transferType,
organisation: "Other organisations"
}
result
else
new Promise (resolve, reject) ->
resolve result
try
maxLength = parseInt req.query.maxLength or "750"
federalState = req.query.federalState or ''
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
organisations = req.query.organisations or []
organisations = [organisations] if organisations not instanceof Array
media = req.query.media or []
media = [media] if media not instanceof Array
(query.organisation = $in: organisations) if organisations.length > 0
(query.media = $in: media) if media.length > 0
if period.$gte? or period.$lte?
query.period = period
group =
_id:
organisation: "$organisation"
organisationReference: "$organisationReference"
transferType: "$transferType"
media: "$media"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
organisation: "$_id.organisation",
organisationReference: "$_id.organisationReference",
transferType: "$_id.transferType",
media: "$_id.media"
_id: 0
amount: 1
)
.exec()
.then (result) ->
populatedPromise = getPopulateInformation(result, 'organisationReference')
.then(
(isPopulated) ->
if federalState
result = (transfer for transfer in result when transfer.organisationReference.federalState_en is federalState)
getOtherMedia(organisations, media, period, paymentTypes, "").then (otherMedia) ->
result = result.concat otherMedia
getOtherOrganisations(organisations, media, period, paymentTypes, "").then (otherOrganisations) ->
result = result.concat otherOrganisations
if result.length > maxLength
res.status(413).send {
error: "You query returns more then the specified maximum of #{maxLength}"
length: result.length
}
else
res.json result
)
.catch (err) ->
res.status(500).send error: "Could not load money flow: #{err}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
flowdetail: (req, res) ->
try
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
source = req.query.source or []
target = req.query.target or []
source = [source] if source not instanceof Array
target = [target] if target not instanceof Array
query = {}
if source.length > 0 then query.organisation = $in: source;
if target.length > 0 then query.media = $in: target;
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
Transfer.find query, {}, {sort: {year: 1, quarter: 1}}, (err, transfers) ->
result = {
data:
{
key: "PI:KEY:<KEY>END_PI"
values: []
}
tickvalues: []
}
i = 0
tmpObj = {
}
#find all years
Transfer.distinct 'year', (error, data) ->
if !error
years = data
years.sort()
tmpResult = {}
tickvalues = []
for year in years
for quarter in [0...4]
tmpObj[year + (quarter/4)] = 0
tickvalues.push (year + (quarter/4))
tickvalues.sort()
for transfer in transfers
tmpObj[""+ (transfer.year + (transfer.quarter-1)/4)] += transfer.amount
result.tickvalues = tickvalues
for tickvalue in tickvalues
result.data.values.push [tickvalue, tmpObj[tickvalue]]
res.json result
else
res.status 500
.send "Could not load years from database! #{error}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
annualcomparison: (req, res) ->
try
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
source = req.query.source or []
target = req.query.target or []
source = [source] if source not instanceof Array
target = [target] if target not instanceof Array
query = {}
if source.length > 0 then query.organisation = $in: source;
if target.length > 0 then query.media = $in: target;
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
years = []
#find all years
Transfer.distinct 'year', (error, data) ->
if !error
years = data
years.sort()
tmpResult = {}
for year in years
tmpResult[""+year] = {
quarters: {
'1': 0
'2': 0
'3': 0
'4': 0
}
}
else
res.status 500
.send "Could not load years from database! #{error}"
Transfer.find query, {}, {sort: {year: 1, quarter: 1}, transferType: 1}, (err, transfers) ->
for transfer in transfers
tmpResult[""+transfer.year].quarters[""+transfer.quarter] += transfer.amount
result = []
for year, quarters of tmpResult
quarterArr = []
for quarter, amount of quarters.quarters
quarterArr.push {
x: (Number(quarter)-1)/4
y: amount
}
result.push {
key: year
color: '#'+(Math.random()*0xFFFFFF<<0).toString(16)
values: quarterArr
}
res.json result
catch error
res.status(500).send error: "Could not load money flow: #{error}"
flows: (req, res) ->
try
maxLength = parseInt req.query.maxLength or "750"
federalState = req.query.federalState if req.query.federalState
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
orgType = req.query.orgType or 'org'
name = req.query.name
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
query[if orgType is 'org' then 'organisation' else 'media'] = name if name
if period.$gte? or period.$lte?
query.period = period
if req.query.filter
filter = req.query.filter
query.$or = [
{organisation: { $regex: ".*#{filter}.*", $options: "i"}}
{media: { $regex: ".*#{filter}.*", $options: "i"}}
]
if federalState?
query.federalState = federalState
group =
_id:
organisation: "$organisation"
transferType: "$transferType"
media: "$media"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
organisation: "$_id.organisation",
transferType: "$_id.transferType",
media: "$_id.media"
_id: 0
amount: 1
)
.exec()
.then (result) ->
if result.length > maxLength
res.status(413).send {
error: "You query returns more then the specified maximum of #{maxLength}"
length: result.length
}
else
res.json result
.catch (err) ->
res.status(500).send error: "Could not load money flow: #{err}"
catch error
res.status(500).send error: "Could not load money flow: #{error}"
topEntries: (req, res) ->
promiseToFullfill = []
federalState = req.query.federalState if req.query.federalState
includeGroupings = req.query.groupings if req.query.groupings
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
orgType = req.query.orgType or 'org'
paymentTypes = req.query.pType or ['2']
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
limitOfResults = parseInt(req.query.x or '10')
orgCategories = req.query.orgCategories if req.query.orgCategories
orgCategories = [orgCategories] if orgCategories not instanceof Array and req.query.orgCategories
query = {}
project =
organisation: '$_id.organisation'
_id: 0
total: 1
if period.$gte? or period.$lte?
query.period = period
query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)
if federalState?
query.federalState = federalState
if orgCategories?
query.organisationType =
$in: orgCategories
group =
_id:
organisation: if orgType is 'org' then '$organisation' else '$media',
total:
$sum: '$amount'
options = {}
options.map = () ->
emit this.year, this.amount
options.reduce = (key, vals) ->
Array.sum vals
options.query = query
#console.log "Query: "
#console.log query
#console.log "Group: "
#console.log group
#console.log "Project: "
#console.log project
if not includeGroupings
topPromise = Transfer.aggregate($match: query)
.group(group)
.sort('-total')
.limit(limitOfResults)
.project(project)
.exec()
else
topPromise = Transfer.aggregate($match: query)
.group(group)
.sort('-total')
.project(project)
.exec()
promiseToFullfill.push(topPromise)
allPromise = Transfer.mapReduce options
promiseToFullfill.push allPromise
if includeGroupings
groupingQuery = {}
groupingQuery.isActive = true
groupingQuery.type = orgType
groupingQuery.region = if federalState then federalState else 'AT'
groupingsPromise = Grouping.find(groupingQuery)
.select('name owner members -_id')
.exec()
promiseToFullfill.push(groupingsPromise)
allPromise.then (r) ->
Q.all(promiseToFullfill)
.then (results) ->
try
result =
top: results[0]
all: results[1].reduce(
(sum, v)->
sum + v.value
0)
groupings: results[2] if results[2]
if result.groupings?
result.top = handleGroupings(result.groupings, result.top, limitOfResults)
res.send result
catch error
console.log error
res.status(500).send("No Data was found!")
.catch (err) ->
console.log "Error in Promise.when"
console.log err
res.status(500).send("Error #{err.message}")
search: (req,res) ->
name = req.query.name
federalState = req.query.federalState if req.query.federalState
if not name
res.status(400).send error: "'name' is required!"
return
types = if req.query.orgType then [req.query.orgType] else ['org','media']
buildRegex = (name,value) ->
q={}
q[name]= { $regex: ".*#{value}.*", $options: "i"}
q
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
group =
_id:
name: "$#{nameField}"
type: orgType
years:
$addToSet: "$year"
total: $sum: "$amount"
transferTypes: $addToSet: "$transferType"
project =
name: '$_id.name'
_id: 0
years: 1
total: 1
transferTypes: 1
if orgType is 'org'
group._id.organisationType = '$organisationType'
group._id.federalState = '$federalState'
project.organisationType = '$_id.organisationType'
project.federalState = '$_id.federalState'
$or = name.split(' ').reduce ((a,n)-> q={};a.push buildRegex(nameField,n);a) ,[]
if not federalState
query = $or: $or
else
query = $and: $or
query.$and.push {"federalState": federalState}
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name')
.exec()
all = Q.all types.map (t) ->
performQuery t
all.then (results) ->
result = types.reduce ((r,t,index) -> r[t] = results[index];r),{}
res.json result
.catch (err) ->
res.status(500).send error: "Could not perform search"
list: (req,res) ->
types = if req.query.orgType then [req.query.orgType] else ['org','media']
page = parseInt req.query.page or "0"
size = parseInt req.query.size or "50"
federalState = req.query.federalState
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
query = {}
if federalState?
query.federalState = federalState
project ={}
project =
name: '$_id.name'
_id: 0
years: 1
total: 1
transferTypes: 1
group =
_id:
name: "$#{nameField}"
type: orgType
years:
$addToSet: "$year"
total: $sum: "$amount"
transferTypes: $addToSet: "$transferType"
if orgType is 'org'
group._id.organisationType= '$organisationType'
project.organisationType = '$_id.organisationType'
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name').skip(page*size).limit(size)
.exec()
all = Q.all types.map (t) ->
performQuery t
all.then (results) ->
result = types.reduce ((r,t,index) -> r[t] = results[index];r),{}
res.json result
.catch (err) ->
res.status(500).send error: "Could not perform search #{err}"
count: (req,res) ->
type = req.query.orgType or 'org'
federalState = req.query.federalState if req.query.federalState
performQuery = (orgType) ->
nameField = if orgType is 'org' then 'organisation' else 'media'
query = {}
group =
_id:
name: "$#{nameField}"
if federalState
query.federalState = federalState
group._id.federalState = federalState
Transfer.aggregate($match: query)
.group(group)
.exec()
performQuery(type)
.then (result) ->
res.json result.length
.catch (err) ->
res.status(500).send error: "Could not determine number of items #{err}"
getEvents: (req,res) ->
handleEventResponse = (err, data) ->
if err
res.status(500).send error: "Could not get events #{err}"
else if !data or data.length is 0
res.status(404).send()
else
res.json data
#todo: insert parameter checking
if req.query.region
Event.find {region: req.query.region}, handleEventResponse
else if req.query.id
Event.findById req.query.id, handleEventResponse
else
Event.find {}, handleEventResponse
createEvent: (req,res) ->
#todo: insert parameter checking
event = new Event()
event = mapEvent event, req
event.save (err) ->
if err
res.status(500).send error: "Could not create event #{err}"
else
res.json event
updateEvent: (req, res) ->
#todo: insert parameter checking
Event.findById req.body._id, (err, data) ->
if err
res.status(500).send error: "Could not update event #{err}"
if !data or data.length is 0
res.status(500).send error: "Could not find event #{req.body._id}"
else
event = mapEvent data, req
event.save (err) ->
if err
res.status(500).send error: "Could not create event #{err}"
else
res.json event
deleteEvent: (req, res) ->
#todo: insert parameter checking
Event.findById {_id: req.query.id}, (err, data) ->
if err
res.status(500).send error: "Could not find event #{err}"
data.remove (removeErr) ->
if removeErr
res.status(500).send error: "Could not delete event #{removeErr}"
res.json data
getEventTags: (req, res) ->
Event.find {}, (err, events) ->
if err
res.status(500).send error "Could not load events #{err}"
result = []
for event in events
if event.tags
Array.prototype.push.apply result, event.tags
res.json Array.from(new Set(result))
federalstates: (req, res) ->
try
period = {}
period['$gte'] = parseInt(req.query.from) if req.query.from
period['$lte'] = parseInt(req.query.to) if req.query.to
paymentTypes = req.query.pType or []
paymentTypes = [paymentTypes] if paymentTypes not instanceof Array
organisationTypes = req.query.orgTypes or []
organisationTypes = [organisationTypes] if organisationTypes not instanceof Array
query = {}
(query.transferType =
$in: paymentTypes.map (e)->
parseInt(e)) if paymentTypes.length > 0
(query.organisationType =
$in: organisationTypes.map (e)->
(e)) if organisationTypes.length > 0
if period.$gte? or period.$lte?
query.period = period
group =
_id:
federalState: "$federalState"
amount:
$sum: "$amount"
Transfer.aggregate($match: query)
.group(group)
.project(
federalState: "$_id.federalState",
_id: 0
amount: 1
)
.sort('federalState')
.exec()
.then (result) ->
for i in [1...10]
found = false
for r in result
found = false
if r.federalState is "AT-" + i
found = true
break
if !found
result.push {
amount: 0,
federalState: "AT-" + i
}
res.status(200).send result
.catch (error) ->
console.log "Error query data for map: #{error}"
res.status(500).send error: "Could not get data for map #{error}"
catch error
console.log error
res.status(500).send("Error with query for map")
#Grouping
getPossibleGroupMembers: (req, res) ->
type = req.query.orgType or 'org'
nameField = if type is 'org' then 'organisation' else 'media'
query = {}
project =
name: '$_id.name'
_id: 0
group =
_id:
name: "$#{nameField}"
if type is 'org'
group._id.federalState = '$federalState'
project.federalState = '$_id.federalState'
#console.log 'Query:'
#console.log query
#console.log 'Group'
#console.log group
#console.log 'Project'
#console.log project
Transfer.aggregate($match: query)
.group(group)
.project(project)
.sort('name')
.exec()
.then (result) ->
res.status(200).send result
.catch (error) ->
console.log "Error query possible group members: #{error}"
res.status(500).send error: "Could not get group members #{error}"
createGrouping: (req, res) ->
grouping = new Grouping()
grouping.name = req.body.params.name
grouping.type = req.body.params.type
grouping.region = req.body.params.region
grouping.members = req.body.params.members
grouping.isActive = req.body.params.isActive
if req.body.params.owner?
grouping.owner = req.body.params.owner
grouping.save (err) ->
if err
res.status(500).send error: "Could not create grouping #{err}"
else
res.status(200).send grouping
getGroupings: (req, res) ->
query = {}
if req.query.id?
query._id = req.query.id
page = parseInt "0"
size = parseInt "1"
else
page = parseInt req.query.page or "0"
size = parseInt req.query.size or "50"
if req.query.type?
query.type = req.query.type
Grouping
.find(query)
.sort('name')
.skip(page*size)
.limit(parseInt(size))
.exec()
.then(
(result) ->
res.status(200).send result
)
.catch (
(err) ->
res.status(500).send error: "Could not read grouping(s) #{err}"
)
updateGrouping: (req, res) ->
if req.body.params._id?
Grouping.findById(_id: req.body.params._id).exec()
.then(
(result) ->
grouping = result
grouping.name = req.body.params.name
grouping.type = req.body.params.type
grouping.region = req.body.params.region
grouping.isActive = req.body.params.isActive
grouping.members = req.body.params.members
if req.body.params.owner?
grouping.owner = req.body.params.owner
else
grouping.owner = ''
grouping.save()
.then (
(updated) ->
res.status(200).send updated
)
)
.catch (
(err) ->
res.status(500).send error: "Could not update grouping #{err}"
)
deleteGroupings: (req, res) ->
if req.query.id?
Grouping.findByIdAndRemove(req.query.id).exec()
.then(
(removed) ->
res.status(200).send removed
)
.catch (
(err) ->
res.status(500).send error: "Could not delete grouping #{err}"
)
else
res.status(500).send error: "Could not delete grouping #{err}"
countGroupings: (req, res) ->
Grouping.count().exec()
.then(
(counted) ->
res.status(200).send({count :counted})
)
.catch (
(err) ->
res.status(500).send error: "Could not count groupings #{err}"
)
getGroupingMembers: (req, res) ->
query = {}
query.isActive = true
query.name = req.query.name
Grouping.find(query)
.select('members type -_id')
.then(
(members) ->
res.status(200).send(members)
)
.catch (
(err) ->
res.status(500).send error: "Could not load grouping's member #{err}"
)
organisationTypes: (req, res) ->
Transfer.aggregate(
$match: {}
)
.group(
_id:
organisationType: "$organisationType"
)
.project(
type: "$_id.organisationType", _id: 0
)
.sort("type")
.exec()
.then(
(data) ->
res.send data
(err) -> res.status(500).send("Could not load organisation types (#{err})!")
)
|
[
{
"context": "get.min.js\n###\n\n###* @preserve https://github.com/jawj/github-widget\nCopyright (c) 2011 - 2012 George Ma",
"end": 224,
"score": 0.9987042546272278,
"start": 220,
"tag": "USERNAME",
"value": "jawj"
},
{
"context": "b.com/jawj/github-widget\nCopyright (c) 2011 - 2012 ... | github-widget/github-widget.coffee | dougmolineux/dougmolineux.github.io | 1 | ###
# to minify:
java -jar /usr/local/closure-compiler/compiler.jar \
--compilation_level SIMPLE_OPTIMIZATIONS \
--js github-widget.js \
--js_output_file github-widget.min.js
###
###* @preserve https://github.com/jawj/github-widget
Copyright (c) 2011 - 2012 George MacKerron
Released under the MIT licence: http://opensource.org/licenses/mit-license ###
makeWidget = (payload, div) ->
make cls: 'gw-clearer', prevSib: div
user = div.getAttribute 'data-user'
opts = div.getAttribute 'data-options'
opts = if typeof opts is 'string' then JSON.parse(opts) else {}
siteRepoNames = ["#{user}.github.com".toLowerCase(), "#{user}.github.io".toLowerCase()]
sortBy = opts.sortBy or 'watchers'
limit = parseInt(opts.limit) or Infinity
made = 0
for repo in payload.data.sort((a, b) -> b[sortBy] - a[sortBy])
continue if (not opts.forks and repo.fork) or repo.name.toLowerCase() in siteRepoNames or not repo.description
break if made++ is limit
make parent: div, cls: 'gw-repo-outer', kids: [
make cls: 'gw-repo', kids: [
make cls: 'gw-title', kids: [
make tag: 'ul', cls: 'gw-stats', kids: [
make tag: 'li', text: repo.watchers, cls: 'gw-watchers'
make tag: 'li', text: repo.forks, cls: 'gw-forks']
make tag: 'a', href: repo.html_url, text: repo.name, cls: 'gw-name']
make cls: 'gw-lang', text: repo.language if repo.language?
make cls: 'gw-repo-desc', text: repo.description]]
init = ->
for div in (get tag: 'div', cls: 'github-widget')
do (div) -> # close over correct div
url = "https://api.github.com/users/#{div.getAttribute 'data-user'}/repos?callback=<cb>"
jsonp url: url, success: (payload) -> makeWidget payload, div
# support functions
cls = (el, opts = {}) -> # cut-down version: no manipulation support
classHash = {}
classes = el.className.match(cls.re)
if classes?
(classHash[c] = yes) for c in classes
hasClasses = opts.has?.match(cls.re)
if hasClasses?
(return no unless classHash[c]) for c in hasClasses
return yes
null
cls.re = /\S+/g
get = (opts = {}) ->
inside = opts.inside ? document
tag = opts.tag ? '*'
if opts.id?
return inside.getElementById opts.id
hasCls = opts.cls?
if hasCls and tag is '*' and inside.getElementsByClassName?
return inside.getElementsByClassName opts.cls
els = inside.getElementsByTagName tag
if hasCls then els = (el for el in els when cls el, has: opts.cls)
if not opts.multi? and tag.toLowerCase() in get.uniqueTags then els[0] ? null else els
get.uniqueTags = 'html body frameset head title base'.split(' ')
text = (t) -> document.createTextNode '' + t
make = (opts = {}) -> # opts: tag, parent, prevSib, text, cls, [attrib]
t = document.createElement opts.tag ? 'div'
for own k, v of opts
switch k
when 'tag' then continue
when 'parent' then v.appendChild t
when 'kids' then t.appendChild c for c in v when c?
when 'prevSib' then v.parentNode.insertBefore t, v.nextSibling
when 'text' then t.appendChild text v
when 'cls' then t.className = v
else t[k] = v
t
jsonp = (opts) ->
callbackName = opts.callback ? '_JSONPCallback_' + jsonp.callbackNum++
url = opts.url.replace '<cb>', callbackName
window[callbackName] = opts.success ? jsonp.noop
make tag: 'script', src: url, parent: (get tag: 'head')
jsonp.callbackNum = 0
jsonp.noop = ->
# do it!
init()
| 14977 | ###
# to minify:
java -jar /usr/local/closure-compiler/compiler.jar \
--compilation_level SIMPLE_OPTIMIZATIONS \
--js github-widget.js \
--js_output_file github-widget.min.js
###
###* @preserve https://github.com/jawj/github-widget
Copyright (c) 2011 - 2012 <NAME>
Released under the MIT licence: http://opensource.org/licenses/mit-license ###
makeWidget = (payload, div) ->
make cls: 'gw-clearer', prevSib: div
user = div.getAttribute 'data-user'
opts = div.getAttribute 'data-options'
opts = if typeof opts is 'string' then JSON.parse(opts) else {}
siteRepoNames = ["#{user}.github.com".toLowerCase(), "#{user}.github.io".toLowerCase()]
sortBy = opts.sortBy or 'watchers'
limit = parseInt(opts.limit) or Infinity
made = 0
for repo in payload.data.sort((a, b) -> b[sortBy] - a[sortBy])
continue if (not opts.forks and repo.fork) or repo.name.toLowerCase() in siteRepoNames or not repo.description
break if made++ is limit
make parent: div, cls: 'gw-repo-outer', kids: [
make cls: 'gw-repo', kids: [
make cls: 'gw-title', kids: [
make tag: 'ul', cls: 'gw-stats', kids: [
make tag: 'li', text: repo.watchers, cls: 'gw-watchers'
make tag: 'li', text: repo.forks, cls: 'gw-forks']
make tag: 'a', href: repo.html_url, text: repo.name, cls: 'gw-name']
make cls: 'gw-lang', text: repo.language if repo.language?
make cls: 'gw-repo-desc', text: repo.description]]
init = ->
for div in (get tag: 'div', cls: 'github-widget')
do (div) -> # close over correct div
url = "https://api.github.com/users/#{div.getAttribute 'data-user'}/repos?callback=<cb>"
jsonp url: url, success: (payload) -> makeWidget payload, div
# support functions
cls = (el, opts = {}) -> # cut-down version: no manipulation support
classHash = {}
classes = el.className.match(cls.re)
if classes?
(classHash[c] = yes) for c in classes
hasClasses = opts.has?.match(cls.re)
if hasClasses?
(return no unless classHash[c]) for c in hasClasses
return yes
null
cls.re = /\S+/g
get = (opts = {}) ->
inside = opts.inside ? document
tag = opts.tag ? '*'
if opts.id?
return inside.getElementById opts.id
hasCls = opts.cls?
if hasCls and tag is '*' and inside.getElementsByClassName?
return inside.getElementsByClassName opts.cls
els = inside.getElementsByTagName tag
if hasCls then els = (el for el in els when cls el, has: opts.cls)
if not opts.multi? and tag.toLowerCase() in get.uniqueTags then els[0] ? null else els
get.uniqueTags = 'html body frameset head title base'.split(' ')
text = (t) -> document.createTextNode '' + t
make = (opts = {}) -> # opts: tag, parent, prevSib, text, cls, [attrib]
t = document.createElement opts.tag ? 'div'
for own k, v of opts
switch k
when 'tag' then continue
when 'parent' then v.appendChild t
when 'kids' then t.appendChild c for c in v when c?
when 'prevSib' then v.parentNode.insertBefore t, v.nextSibling
when 'text' then t.appendChild text v
when 'cls' then t.className = v
else t[k] = v
t
jsonp = (opts) ->
callbackName = opts.callback ? '_JSONPCallback_' + jsonp.callbackNum++
url = opts.url.replace '<cb>', callbackName
window[callbackName] = opts.success ? jsonp.noop
make tag: 'script', src: url, parent: (get tag: 'head')
jsonp.callbackNum = 0
jsonp.noop = ->
# do it!
init()
| true | ###
# to minify:
java -jar /usr/local/closure-compiler/compiler.jar \
--compilation_level SIMPLE_OPTIMIZATIONS \
--js github-widget.js \
--js_output_file github-widget.min.js
###
###* @preserve https://github.com/jawj/github-widget
Copyright (c) 2011 - 2012 PI:NAME:<NAME>END_PI
Released under the MIT licence: http://opensource.org/licenses/mit-license ###
makeWidget = (payload, div) ->
make cls: 'gw-clearer', prevSib: div
user = div.getAttribute 'data-user'
opts = div.getAttribute 'data-options'
opts = if typeof opts is 'string' then JSON.parse(opts) else {}
siteRepoNames = ["#{user}.github.com".toLowerCase(), "#{user}.github.io".toLowerCase()]
sortBy = opts.sortBy or 'watchers'
limit = parseInt(opts.limit) or Infinity
made = 0
for repo in payload.data.sort((a, b) -> b[sortBy] - a[sortBy])
continue if (not opts.forks and repo.fork) or repo.name.toLowerCase() in siteRepoNames or not repo.description
break if made++ is limit
make parent: div, cls: 'gw-repo-outer', kids: [
make cls: 'gw-repo', kids: [
make cls: 'gw-title', kids: [
make tag: 'ul', cls: 'gw-stats', kids: [
make tag: 'li', text: repo.watchers, cls: 'gw-watchers'
make tag: 'li', text: repo.forks, cls: 'gw-forks']
make tag: 'a', href: repo.html_url, text: repo.name, cls: 'gw-name']
make cls: 'gw-lang', text: repo.language if repo.language?
make cls: 'gw-repo-desc', text: repo.description]]
init = ->
for div in (get tag: 'div', cls: 'github-widget')
do (div) -> # close over correct div
url = "https://api.github.com/users/#{div.getAttribute 'data-user'}/repos?callback=<cb>"
jsonp url: url, success: (payload) -> makeWidget payload, div
# support functions
cls = (el, opts = {}) -> # cut-down version: no manipulation support
classHash = {}
classes = el.className.match(cls.re)
if classes?
(classHash[c] = yes) for c in classes
hasClasses = opts.has?.match(cls.re)
if hasClasses?
(return no unless classHash[c]) for c in hasClasses
return yes
null
cls.re = /\S+/g
get = (opts = {}) ->
inside = opts.inside ? document
tag = opts.tag ? '*'
if opts.id?
return inside.getElementById opts.id
hasCls = opts.cls?
if hasCls and tag is '*' and inside.getElementsByClassName?
return inside.getElementsByClassName opts.cls
els = inside.getElementsByTagName tag
if hasCls then els = (el for el in els when cls el, has: opts.cls)
if not opts.multi? and tag.toLowerCase() in get.uniqueTags then els[0] ? null else els
get.uniqueTags = 'html body frameset head title base'.split(' ')
text = (t) -> document.createTextNode '' + t
make = (opts = {}) -> # opts: tag, parent, prevSib, text, cls, [attrib]
t = document.createElement opts.tag ? 'div'
for own k, v of opts
switch k
when 'tag' then continue
when 'parent' then v.appendChild t
when 'kids' then t.appendChild c for c in v when c?
when 'prevSib' then v.parentNode.insertBefore t, v.nextSibling
when 'text' then t.appendChild text v
when 'cls' then t.className = v
else t[k] = v
t
jsonp = (opts) ->
callbackName = opts.callback ? '_JSONPCallback_' + jsonp.callbackNum++
url = opts.url.replace '<cb>', callbackName
window[callbackName] = opts.success ? jsonp.noop
make tag: 'script', src: url, parent: (get tag: 'head')
jsonp.callbackNum = 0
jsonp.noop = ->
# do it!
init()
|
[
{
"context": "i or module.exports.db or \"mongodb://user:password@example.com:port/database\"\n<% } %>\n\nexports.main()\n",
"end": 1615,
"score": 0.8088208436965942,
"start": 1603,
"tag": "EMAIL",
"value": "@example.com"
}
] | app/templates/src/name.coffee | 1egoman/generator-cmj | 0 | ###
* <%= props.name %>
* <%= props.homepage %>
*
* Copyright (c) <%= currentYear %> <%= props.authorName %>
* Licensed under the <%= props.license %> license.
###
'use strict';
app = require("express")()
chalk = require "chalk"
path = require "path"
bodyParser = require "body-parser"
exports.main = ->
# connect to database
<% if (props.models) { %>
exports.connectToDB()
<% } %>
# set ejs as view engine
app.set "view engine", "ejs"
# include all the required middleware
exports.middleware app
# some sample routes
<% if (props.views) { %>
app.get "/", (req, res) ->
res.render "index"
<% } else { %>
app.get "/", (req, res) ->
res.send "'Allo, World!"
<% } %>
# listen for requests
PORT = process.argv.port or 8000
app.listen PORT, ->
console.log chalk.blue "-> :#{PORT}"
exports.middleware = (app) ->
<% if (props.bodyparser === "form") { %>
# form body parser
app.use bodyParser.urlencoded
extended: true
<% } else if (props.bodyparser === "json") { %>
# json body parser
app.use bodyParser.json()
<% } %>
<% if (props.frontend) { %>
# include sass middleware to auto-compile sass stylesheets
node_sass = require "node-sass-middleware"
app.use node_sass
src: path.join(__dirname, "../public"),
dest: path.join(__dirname, "../public"),
debug: true
<% } %>
# serve static assets
app.use require("express-static") path.join(__dirname, '../public')
<% if (props.models) { %>
exports.connectToDB = ->
require("./db") module.exports.mongouri or module.exports.db or "mongodb://user:password@example.com:port/database"
<% } %>
exports.main()
| 96981 | ###
* <%= props.name %>
* <%= props.homepage %>
*
* Copyright (c) <%= currentYear %> <%= props.authorName %>
* Licensed under the <%= props.license %> license.
###
'use strict';
app = require("express")()
chalk = require "chalk"
path = require "path"
bodyParser = require "body-parser"
exports.main = ->
# connect to database
<% if (props.models) { %>
exports.connectToDB()
<% } %>
# set ejs as view engine
app.set "view engine", "ejs"
# include all the required middleware
exports.middleware app
# some sample routes
<% if (props.views) { %>
app.get "/", (req, res) ->
res.render "index"
<% } else { %>
app.get "/", (req, res) ->
res.send "'Allo, World!"
<% } %>
# listen for requests
PORT = process.argv.port or 8000
app.listen PORT, ->
console.log chalk.blue "-> :#{PORT}"
exports.middleware = (app) ->
<% if (props.bodyparser === "form") { %>
# form body parser
app.use bodyParser.urlencoded
extended: true
<% } else if (props.bodyparser === "json") { %>
# json body parser
app.use bodyParser.json()
<% } %>
<% if (props.frontend) { %>
# include sass middleware to auto-compile sass stylesheets
node_sass = require "node-sass-middleware"
app.use node_sass
src: path.join(__dirname, "../public"),
dest: path.join(__dirname, "../public"),
debug: true
<% } %>
# serve static assets
app.use require("express-static") path.join(__dirname, '../public')
<% if (props.models) { %>
exports.connectToDB = ->
require("./db") module.exports.mongouri or module.exports.db or "mongodb://user:password<EMAIL>:port/database"
<% } %>
exports.main()
| true | ###
* <%= props.name %>
* <%= props.homepage %>
*
* Copyright (c) <%= currentYear %> <%= props.authorName %>
* Licensed under the <%= props.license %> license.
###
'use strict';
app = require("express")()
chalk = require "chalk"
path = require "path"
bodyParser = require "body-parser"
exports.main = ->
# connect to database
<% if (props.models) { %>
exports.connectToDB()
<% } %>
# set ejs as view engine
app.set "view engine", "ejs"
# include all the required middleware
exports.middleware app
# some sample routes
<% if (props.views) { %>
app.get "/", (req, res) ->
res.render "index"
<% } else { %>
app.get "/", (req, res) ->
res.send "'Allo, World!"
<% } %>
# listen for requests
PORT = process.argv.port or 8000
app.listen PORT, ->
console.log chalk.blue "-> :#{PORT}"
exports.middleware = (app) ->
<% if (props.bodyparser === "form") { %>
# form body parser
app.use bodyParser.urlencoded
extended: true
<% } else if (props.bodyparser === "json") { %>
# json body parser
app.use bodyParser.json()
<% } %>
<% if (props.frontend) { %>
# include sass middleware to auto-compile sass stylesheets
node_sass = require "node-sass-middleware"
app.use node_sass
src: path.join(__dirname, "../public"),
dest: path.join(__dirname, "../public"),
debug: true
<% } %>
# serve static assets
app.use require("express-static") path.join(__dirname, '../public')
<% if (props.models) { %>
exports.connectToDB = ->
require("./db") module.exports.mongouri or module.exports.db or "mongodb://user:passwordPI:EMAIL:<EMAIL>END_PI:port/database"
<% } %>
exports.main()
|
[
{
"context": ", esc defer sig\n packet = new Signature { key : pair.ekid(), payload, sig, detached }\n cb null, pac",
"end": 1822,
"score": 0.6464026570320129,
"start": 1818,
"tag": "KEY",
"value": "pair"
},
{
"context": "defer sig\n packet = new Signature { key : pair.ekid(... | src/keybase/packet/signature.iced | johnnyRose/kbpgp | 151 |
konst = require '../../const'
K = konst.kb
C = konst.openpgp
{Packet} = require './base'
{make_esc} = require 'iced-error'
{eddsa} = require '../../nacl/main'
#=================================================================================
class Signature extends Packet
@SIG_TYPE : K.public_key_algorithms.NACL_EDDSA
@HASH_TYPE : C.hash_algorithms.SHA512
#------------------
@tag : () -> K.packet_tags.signature
tag : () -> Signature.tag()
#------------------
constructor : ({@key, @payload, @sig, @detached}) ->
super()
#------------------
get_packet_body : () ->
sig_type = Signature.SIG_TYPE
hash_type = Signature.HASH_TYPE
{ @key, @payload, @sig, @detached, sig_type, hash_type }
#------------------
@alloc : ({tag,body}) ->
ret = null
err = if tag isnt Signature.tag() then new Error "wrong tag found: #{tag}"
else if (a = body.hash_type) isnt (b = Signature.HASH_TYPE)
new Error "Expected SHA512 (type #{b}); got #{a}"
else if (a = body.sig_type) isnt (b = Signature.SIG_TYPE)
err = new Error "Expected EDDSA (type #{b}); got #{a}"
else
ret = new Signature body
null
throw err if err?
ret
#------------------
is_signature : () -> true
#------------------
verify : (cb) ->
esc = make_esc cb, "verify"
err = km = null
[err, pair] = eddsa.Pair.parse_kb @key
if not err?
await pair.verify_kb @, esc defer()
cb err, { keypair : pair, @payload }
#------------------
unbox : (params, cb) ->
await @verify defer err, res
cb err, res
#------------------
@box : ({km, payload}, cb) ->
esc = make_esc cb, "@sign"
pair = km.get_keypair()
detached = true
await pair.sign_kb { payload, detached }, esc defer sig
packet = new Signature { key : pair.ekid(), payload, sig, detached }
cb null, packet
#=================================================================================
exports.Signature = Signature
exports.sign = Signature.sign
| 9610 |
konst = require '../../const'
K = konst.kb
C = konst.openpgp
{Packet} = require './base'
{make_esc} = require 'iced-error'
{eddsa} = require '../../nacl/main'
#=================================================================================
class Signature extends Packet
@SIG_TYPE : K.public_key_algorithms.NACL_EDDSA
@HASH_TYPE : C.hash_algorithms.SHA512
#------------------
@tag : () -> K.packet_tags.signature
tag : () -> Signature.tag()
#------------------
constructor : ({@key, @payload, @sig, @detached}) ->
super()
#------------------
get_packet_body : () ->
sig_type = Signature.SIG_TYPE
hash_type = Signature.HASH_TYPE
{ @key, @payload, @sig, @detached, sig_type, hash_type }
#------------------
@alloc : ({tag,body}) ->
ret = null
err = if tag isnt Signature.tag() then new Error "wrong tag found: #{tag}"
else if (a = body.hash_type) isnt (b = Signature.HASH_TYPE)
new Error "Expected SHA512 (type #{b}); got #{a}"
else if (a = body.sig_type) isnt (b = Signature.SIG_TYPE)
err = new Error "Expected EDDSA (type #{b}); got #{a}"
else
ret = new Signature body
null
throw err if err?
ret
#------------------
is_signature : () -> true
#------------------
verify : (cb) ->
esc = make_esc cb, "verify"
err = km = null
[err, pair] = eddsa.Pair.parse_kb @key
if not err?
await pair.verify_kb @, esc defer()
cb err, { keypair : pair, @payload }
#------------------
unbox : (params, cb) ->
await @verify defer err, res
cb err, res
#------------------
@box : ({km, payload}, cb) ->
esc = make_esc cb, "@sign"
pair = km.get_keypair()
detached = true
await pair.sign_kb { payload, detached }, esc defer sig
packet = new Signature { key : <KEY>.<KEY>(), payload, sig, detached }
cb null, packet
#=================================================================================
exports.Signature = Signature
exports.sign = Signature.sign
| true |
konst = require '../../const'
K = konst.kb
C = konst.openpgp
{Packet} = require './base'
{make_esc} = require 'iced-error'
{eddsa} = require '../../nacl/main'
#=================================================================================
class Signature extends Packet
@SIG_TYPE : K.public_key_algorithms.NACL_EDDSA
@HASH_TYPE : C.hash_algorithms.SHA512
#------------------
@tag : () -> K.packet_tags.signature
tag : () -> Signature.tag()
#------------------
constructor : ({@key, @payload, @sig, @detached}) ->
super()
#------------------
get_packet_body : () ->
sig_type = Signature.SIG_TYPE
hash_type = Signature.HASH_TYPE
{ @key, @payload, @sig, @detached, sig_type, hash_type }
#------------------
@alloc : ({tag,body}) ->
ret = null
err = if tag isnt Signature.tag() then new Error "wrong tag found: #{tag}"
else if (a = body.hash_type) isnt (b = Signature.HASH_TYPE)
new Error "Expected SHA512 (type #{b}); got #{a}"
else if (a = body.sig_type) isnt (b = Signature.SIG_TYPE)
err = new Error "Expected EDDSA (type #{b}); got #{a}"
else
ret = new Signature body
null
throw err if err?
ret
#------------------
is_signature : () -> true
#------------------
verify : (cb) ->
esc = make_esc cb, "verify"
err = km = null
[err, pair] = eddsa.Pair.parse_kb @key
if not err?
await pair.verify_kb @, esc defer()
cb err, { keypair : pair, @payload }
#------------------
unbox : (params, cb) ->
await @verify defer err, res
cb err, res
#------------------
@box : ({km, payload}, cb) ->
esc = make_esc cb, "@sign"
pair = km.get_keypair()
detached = true
await pair.sign_kb { payload, detached }, esc defer sig
packet = new Signature { key : PI:KEY:<KEY>END_PI.PI:KEY:<KEY>END_PI(), payload, sig, detached }
cb null, packet
#=================================================================================
exports.Signature = Signature
exports.sign = Signature.sign
|
[
{
"context": "e should return outcome', -> #<https://github.com/fhirbase/fhirbase-plv8/issues/95>\n schema.fhir_drop_sto",
"end": 2914,
"score": 0.9994145631790161,
"start": 2906,
"tag": "USERNAME",
"value": "fhirbase"
},
{
"context": " resourceType: 'Patient', queryString: '... | test/fhir/search_spec.coffee | micabe/fhirbase | 0 | search = require('../../src/fhir/search')
schema = require('../../src/core/schema')
crud = require('../../src/fhir/crud')
honey = require('../../src/honey')
plv8 = require('../../plpl/src/plv8')
fs = require('fs')
test = require('../helpers.coffee')
assert = require('assert')
# plv8.debug = true
get_in = (obj, path)->
cur = obj
cur = cur[item] for item in path when cur
cur
match = (x)-> (y)-> y.indexOf(x) > -1
# plv8.debug = true
# console.log plv8.execute("SET search_path='user1';")
# console.log plv8.execute("SHOW search_path;")
FILTER = 'uri'
FILTER = 'incl'
FILTER = 'search'
fs.readdirSync("#{__dirname}/search").filter(match(FILTER)).forEach (yml)->
spec = test.loadYaml("#{__dirname}/search/#{yml}")
describe spec.title, ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
# plv8.debug = true
for res in spec.resources
schema.fhir_create_storage(plv8, resourceType: res)
schema.fhir_truncate_storage(plv8, resourceType: res)
for res in spec.fixtures
crud.fhir_create_resource(plv8, allowId: true, resource: res)
for idx in (spec.indices or [])
search.fhir_unindex_parameter(plv8, idx)
console.log("INDEX", idx);
search.fhir_index_parameter(plv8, idx)
for idx_ord in (spec.index_order or [])
search.fhir_unindex_order(plv8, idx_ord.query)
search.fhir_index_order(plv8, idx_ord.query)
for res in spec.resources
search.fhir_analyze_storage(plv8, resourceType: res)
spec.queries.forEach (q)->
it "#{JSON.stringify(q.query)}", ->
plv8.execute "SET enable_seqscan = OFF;" if (q.indexed or q.indexed_order)
res = search.fhir_search(plv8, q.query)
# console.log(JSON.stringify(res))
explain = JSON.stringify(search.fhir_explain_search(plv8, q.query))
#console.log(JSON.stringify(search.fhir_search_sql(plv8, q.query), null, 2))
plv8.execute "SET enable_seqscan = ON;" if (q.indexed or q.indexed_order)
if q.total || q.total == 0
if q.total == "_undefined"
assert.equal(res.total, undefined)
else
assert.equal(res.total, q.total)
(q.probes || []).forEach (probe)->
if probe.result == "_undefined"
assert.equal(get_in(res, probe.path), undefined)
else
assert.equal(get_in(res, probe.path), probe.result)
# console.log(explain)
if q.indexed
assert(explain.indexOf("Index Cond") > -1, "Should be indexed but #{explain}")
if q.indexed_order
assert((explain.indexOf("Index Scan") > -1) && (explain.indexOf("Scan Direction") > -1), "Should be indexed but #{explain}")
describe 'Search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
it 'by nonexistent resource storge should return outcome', -> #<https://github.com/fhirbase/fhirbase-plv8/issues/95>
schema.fhir_drop_storage(plv8, resourceType: 'Patient')
outcome = search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'name=foobar'
)
assert.equal(outcome.resourceType, 'OperationOutcome')
assert.equal(outcome.issue[0].code, 'not-found')
assert.equal(outcome.issue[0].details.coding[0].code, 'MSG_UNKNOWN_TYPE')
assert.equal(
outcome.issue[0].details.coding[0].display,
'Resource Type "Patient" not recognised'
)
describe 'AuditEvent search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_drop_storage(plv8, resourceType: 'AuditEvent')
schema.fhir_create_storage(plv8, resourceType: 'AuditEvent')
search.fhir_index_parameter(plv8,
resourceType: 'AuditEvent', name: 'action')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'AuditEvent')
crud.fhir_create_resource(plv8, resource: {
resourceType: 'AuditEvent',
entity: {name: 'foo'}
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'AuditEvent',
entity: {name: 'bar'},
action: 'R'
})
it 'action', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'action=R').total,
1)
it 'entity-name', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=foo').total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=bar').total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=muhaha').total,
0)
it 'entity-name and action', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=foo,action=R').total,
1)
describe 'Search normalize', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Patient')
schema.fhir_create_storage(plv8, resourceType: 'MedicationAdministration')
search.fhir_index_parameter(plv8, resourceType: 'Patient', name: 'name')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Patient')
schema.fhir_truncate_storage(plv8, resourceType: 'MedicationAdministration')
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'patient-id', resourceType: 'Patient', name: [{given: ['bar']}]
})
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'medication-administration-id',
resourceType: 'MedicationAdministration',
patient: {reference: 'Patient/patient-id'}
})
describe 'by id', ->
it 'as id', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=patient-id'
).total,
1)
it 'as URL', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=http://fhirbase/Patient/patient-id'
).total,
1)
it 'as URL with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=http://fhirbase/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
describe 'by reference', ->
it 'as reference', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=Patient/patient-id'
).total,
1)
it 'as reference beginning with slash', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=/Patient/patient-id'
).total,
1)
it 'as reference beginning with slash with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
it 'as URL', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=http://fhirbase/Patient/patient-id'
).total,
1)
it 'as URL with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=https://fhirbase/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
describe 'Date search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Patient')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Patient')
it 'by birthDate', ->
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '1970-01-01'
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '2000-01-01'
})
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=lt2010').total,
2)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: 'birthdate=ge2000-01-01&birthdate=le2010-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=gt2010').total,
0)
it 'with format 1970-12-31T01:23+0300', ->
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '1989-02-07T05:26+0300'
})
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=lt2000').total,
1)
it 'by lastUpdated', ->
createPatient = (dateString)->
patient = crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
})
patient.meta.lastUpdated = new Date(dateString)
plv8.execute(
'''
UPDATE patient
SET created_at = $1::timestamptz,
updated_at = $1::timestamptz,
resource = $2
WHERE id = $3
''',
[JSON.stringify(dateString), JSON.stringify(patient), patient.id]
)
createPatient('1970-01-01')
createPatient('2010-01-01')
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=eq1970-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=1970-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=lt1970-01-01'
).total,
0)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=ge1970-01-01&_lastUpdated=le2010-01-01'
).total,
2)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=gt1960-01-01&_lastUpdated=lt2000-01-01'
).total,
1)
describe 'Encounter search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Encounter')
schema.fhir_create_storage(plv8, resourceType: 'Patient')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Encounter')
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'patient-id', resourceType: 'Patient', name: [{given: ['John']}]
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Encounter',
status: 'planned'
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Encounter',
patient: {reference: 'Patient/patient-id'},
status: 'finished'
})
it 'by patient name', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'patient:Patient.name=John'
).total,
1)
it 'by status', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'status=finished'
).total,
1)
it 'by patient name AND status should raise error', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'patient:Patient.name=John&status=finished'
).total,
1)
| 156576 | search = require('../../src/fhir/search')
schema = require('../../src/core/schema')
crud = require('../../src/fhir/crud')
honey = require('../../src/honey')
plv8 = require('../../plpl/src/plv8')
fs = require('fs')
test = require('../helpers.coffee')
assert = require('assert')
# plv8.debug = true
get_in = (obj, path)->
cur = obj
cur = cur[item] for item in path when cur
cur
match = (x)-> (y)-> y.indexOf(x) > -1
# plv8.debug = true
# console.log plv8.execute("SET search_path='user1';")
# console.log plv8.execute("SHOW search_path;")
FILTER = 'uri'
FILTER = 'incl'
FILTER = 'search'
fs.readdirSync("#{__dirname}/search").filter(match(FILTER)).forEach (yml)->
spec = test.loadYaml("#{__dirname}/search/#{yml}")
describe spec.title, ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
# plv8.debug = true
for res in spec.resources
schema.fhir_create_storage(plv8, resourceType: res)
schema.fhir_truncate_storage(plv8, resourceType: res)
for res in spec.fixtures
crud.fhir_create_resource(plv8, allowId: true, resource: res)
for idx in (spec.indices or [])
search.fhir_unindex_parameter(plv8, idx)
console.log("INDEX", idx);
search.fhir_index_parameter(plv8, idx)
for idx_ord in (spec.index_order or [])
search.fhir_unindex_order(plv8, idx_ord.query)
search.fhir_index_order(plv8, idx_ord.query)
for res in spec.resources
search.fhir_analyze_storage(plv8, resourceType: res)
spec.queries.forEach (q)->
it "#{JSON.stringify(q.query)}", ->
plv8.execute "SET enable_seqscan = OFF;" if (q.indexed or q.indexed_order)
res = search.fhir_search(plv8, q.query)
# console.log(JSON.stringify(res))
explain = JSON.stringify(search.fhir_explain_search(plv8, q.query))
#console.log(JSON.stringify(search.fhir_search_sql(plv8, q.query), null, 2))
plv8.execute "SET enable_seqscan = ON;" if (q.indexed or q.indexed_order)
if q.total || q.total == 0
if q.total == "_undefined"
assert.equal(res.total, undefined)
else
assert.equal(res.total, q.total)
(q.probes || []).forEach (probe)->
if probe.result == "_undefined"
assert.equal(get_in(res, probe.path), undefined)
else
assert.equal(get_in(res, probe.path), probe.result)
# console.log(explain)
if q.indexed
assert(explain.indexOf("Index Cond") > -1, "Should be indexed but #{explain}")
if q.indexed_order
assert((explain.indexOf("Index Scan") > -1) && (explain.indexOf("Scan Direction") > -1), "Should be indexed but #{explain}")
describe 'Search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
it 'by nonexistent resource storge should return outcome', -> #<https://github.com/fhirbase/fhirbase-plv8/issues/95>
schema.fhir_drop_storage(plv8, resourceType: 'Patient')
outcome = search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'name=foobar'
)
assert.equal(outcome.resourceType, 'OperationOutcome')
assert.equal(outcome.issue[0].code, 'not-found')
assert.equal(outcome.issue[0].details.coding[0].code, 'MSG_UNKNOWN_TYPE')
assert.equal(
outcome.issue[0].details.coding[0].display,
'Resource Type "Patient" not recognised'
)
describe 'AuditEvent search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_drop_storage(plv8, resourceType: 'AuditEvent')
schema.fhir_create_storage(plv8, resourceType: 'AuditEvent')
search.fhir_index_parameter(plv8,
resourceType: 'AuditEvent', name: 'action')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'AuditEvent')
crud.fhir_create_resource(plv8, resource: {
resourceType: 'AuditEvent',
entity: {name: '<NAME>'}
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'AuditEvent',
entity: {name: '<NAME>'},
action: 'R'
})
it 'action', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'action=R').total,
1)
it 'entity-name', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=foo').total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=bar').total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=muhaha').total,
0)
it 'entity-name and action', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=foo,action=R').total,
1)
describe 'Search normalize', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Patient')
schema.fhir_create_storage(plv8, resourceType: 'MedicationAdministration')
search.fhir_index_parameter(plv8, resourceType: 'Patient', name: 'name')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Patient')
schema.fhir_truncate_storage(plv8, resourceType: 'MedicationAdministration')
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'patient-id', resourceType: 'Patient', name: [{given: ['<NAME>']}]
})
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'medication-administration-id',
resourceType: 'MedicationAdministration',
patient: {reference: 'Patient/patient-id'}
})
describe 'by id', ->
it 'as id', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=patient-id'
).total,
1)
it 'as URL', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=http://fhirbase/Patient/patient-id'
).total,
1)
it 'as URL with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=http://fhirbase/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
describe 'by reference', ->
it 'as reference', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=Patient/patient-id'
).total,
1)
it 'as reference beginning with slash', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=/Patient/patient-id'
).total,
1)
it 'as reference beginning with slash with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
it 'as URL', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=http://fhirbase/Patient/patient-id'
).total,
1)
it 'as URL with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=https://fhirbase/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
describe 'Date search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Patient')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Patient')
it 'by birthDate', ->
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '1970-01-01'
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '2000-01-01'
})
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=lt2010').total,
2)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: 'birthdate=ge2000-01-01&birthdate=le2010-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=gt2010').total,
0)
it 'with format 1970-12-31T01:23+0300', ->
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '1989-02-07T05:26+0300'
})
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=lt2000').total,
1)
it 'by lastUpdated', ->
createPatient = (dateString)->
patient = crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
})
patient.meta.lastUpdated = new Date(dateString)
plv8.execute(
'''
UPDATE patient
SET created_at = $1::timestamptz,
updated_at = $1::timestamptz,
resource = $2
WHERE id = $3
''',
[JSON.stringify(dateString), JSON.stringify(patient), patient.id]
)
createPatient('1970-01-01')
createPatient('2010-01-01')
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=eq1970-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=1970-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=lt1970-01-01'
).total,
0)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=ge1970-01-01&_lastUpdated=le2010-01-01'
).total,
2)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=gt1960-01-01&_lastUpdated=lt2000-01-01'
).total,
1)
describe 'Encounter search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Encounter')
schema.fhir_create_storage(plv8, resourceType: 'Patient')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Encounter')
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'patient-id', resourceType: 'Patient', name: [{given: ['<NAME>']}]
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Encounter',
status: 'planned'
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Encounter',
patient: {reference: 'Patient/patient-id'},
status: 'finished'
})
it 'by patient name', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'patient:Patient.name=<NAME>'
).total,
1)
it 'by status', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'status=finished'
).total,
1)
it 'by patient name AND status should raise error', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'patient:Patient.name=<NAME>&status=finished'
).total,
1)
| true | search = require('../../src/fhir/search')
schema = require('../../src/core/schema')
crud = require('../../src/fhir/crud')
honey = require('../../src/honey')
plv8 = require('../../plpl/src/plv8')
fs = require('fs')
test = require('../helpers.coffee')
assert = require('assert')
# plv8.debug = true
get_in = (obj, path)->
cur = obj
cur = cur[item] for item in path when cur
cur
match = (x)-> (y)-> y.indexOf(x) > -1
# plv8.debug = true
# console.log plv8.execute("SET search_path='user1';")
# console.log plv8.execute("SHOW search_path;")
FILTER = 'uri'
FILTER = 'incl'
FILTER = 'search'
fs.readdirSync("#{__dirname}/search").filter(match(FILTER)).forEach (yml)->
spec = test.loadYaml("#{__dirname}/search/#{yml}")
describe spec.title, ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
# plv8.debug = true
for res in spec.resources
schema.fhir_create_storage(plv8, resourceType: res)
schema.fhir_truncate_storage(plv8, resourceType: res)
for res in spec.fixtures
crud.fhir_create_resource(plv8, allowId: true, resource: res)
for idx in (spec.indices or [])
search.fhir_unindex_parameter(plv8, idx)
console.log("INDEX", idx);
search.fhir_index_parameter(plv8, idx)
for idx_ord in (spec.index_order or [])
search.fhir_unindex_order(plv8, idx_ord.query)
search.fhir_index_order(plv8, idx_ord.query)
for res in spec.resources
search.fhir_analyze_storage(plv8, resourceType: res)
spec.queries.forEach (q)->
it "#{JSON.stringify(q.query)}", ->
plv8.execute "SET enable_seqscan = OFF;" if (q.indexed or q.indexed_order)
res = search.fhir_search(plv8, q.query)
# console.log(JSON.stringify(res))
explain = JSON.stringify(search.fhir_explain_search(plv8, q.query))
#console.log(JSON.stringify(search.fhir_search_sql(plv8, q.query), null, 2))
plv8.execute "SET enable_seqscan = ON;" if (q.indexed or q.indexed_order)
if q.total || q.total == 0
if q.total == "_undefined"
assert.equal(res.total, undefined)
else
assert.equal(res.total, q.total)
(q.probes || []).forEach (probe)->
if probe.result == "_undefined"
assert.equal(get_in(res, probe.path), undefined)
else
assert.equal(get_in(res, probe.path), probe.result)
# console.log(explain)
if q.indexed
assert(explain.indexOf("Index Cond") > -1, "Should be indexed but #{explain}")
if q.indexed_order
assert((explain.indexOf("Index Scan") > -1) && (explain.indexOf("Scan Direction") > -1), "Should be indexed but #{explain}")
describe 'Search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
it 'by nonexistent resource storge should return outcome', -> #<https://github.com/fhirbase/fhirbase-plv8/issues/95>
schema.fhir_drop_storage(plv8, resourceType: 'Patient')
outcome = search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'name=foobar'
)
assert.equal(outcome.resourceType, 'OperationOutcome')
assert.equal(outcome.issue[0].code, 'not-found')
assert.equal(outcome.issue[0].details.coding[0].code, 'MSG_UNKNOWN_TYPE')
assert.equal(
outcome.issue[0].details.coding[0].display,
'Resource Type "Patient" not recognised'
)
describe 'AuditEvent search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_drop_storage(plv8, resourceType: 'AuditEvent')
schema.fhir_create_storage(plv8, resourceType: 'AuditEvent')
search.fhir_index_parameter(plv8,
resourceType: 'AuditEvent', name: 'action')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'AuditEvent')
crud.fhir_create_resource(plv8, resource: {
resourceType: 'AuditEvent',
entity: {name: 'PI:NAME:<NAME>END_PI'}
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'AuditEvent',
entity: {name: 'PI:NAME:<NAME>END_PI'},
action: 'R'
})
it 'action', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'action=R').total,
1)
it 'entity-name', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=foo').total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=bar').total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=muhaha').total,
0)
it 'entity-name and action', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'AuditEvent', queryString: 'entity-name=foo,action=R').total,
1)
describe 'Search normalize', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Patient')
schema.fhir_create_storage(plv8, resourceType: 'MedicationAdministration')
search.fhir_index_parameter(plv8, resourceType: 'Patient', name: 'name')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Patient')
schema.fhir_truncate_storage(plv8, resourceType: 'MedicationAdministration')
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'patient-id', resourceType: 'Patient', name: [{given: ['PI:NAME:<NAME>END_PI']}]
})
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'medication-administration-id',
resourceType: 'MedicationAdministration',
patient: {reference: 'Patient/patient-id'}
})
describe 'by id', ->
it 'as id', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=patient-id'
).total,
1)
it 'as URL', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=http://fhirbase/Patient/patient-id'
).total,
1)
it 'as URL with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'Patient',
queryString: '_id=http://fhirbase/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
describe 'by reference', ->
it 'as reference', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=Patient/patient-id'
).total,
1)
it 'as reference beginning with slash', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=/Patient/patient-id'
).total,
1)
it 'as reference beginning with slash with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
it 'as URL', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=http://fhirbase/Patient/patient-id'
).total,
1)
it 'as URL with history', ->
assert.equal(
search.fhir_search(
plv8,
resourceType: 'MedicationAdministration',
queryString: 'patient=https://fhirbase/Patient/patient-id/_history/patient-fake-history-id'
).total,
1)
describe 'Date search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Patient')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Patient')
it 'by birthDate', ->
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '1970-01-01'
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '2000-01-01'
})
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=lt2010').total,
2)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: 'birthdate=ge2000-01-01&birthdate=le2010-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=gt2010').total,
0)
it 'with format 1970-12-31T01:23+0300', ->
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
birthDate: '1989-02-07T05:26+0300'
})
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient', queryString: 'birthdate=lt2000').total,
1)
it 'by lastUpdated', ->
createPatient = (dateString)->
patient = crud.fhir_create_resource(plv8, resource: {
resourceType: 'Patient'
})
patient.meta.lastUpdated = new Date(dateString)
plv8.execute(
'''
UPDATE patient
SET created_at = $1::timestamptz,
updated_at = $1::timestamptz,
resource = $2
WHERE id = $3
''',
[JSON.stringify(dateString), JSON.stringify(patient), patient.id]
)
createPatient('1970-01-01')
createPatient('2010-01-01')
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=eq1970-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=1970-01-01'
).total,
1)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=lt1970-01-01'
).total,
0)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=ge1970-01-01&_lastUpdated=le2010-01-01'
).total,
2)
assert.equal(
search.fhir_search(plv8,
resourceType: 'Patient',
queryString: '_lastUpdated=gt1960-01-01&_lastUpdated=lt2000-01-01'
).total,
1)
describe 'Encounter search', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
schema.fhir_create_storage(plv8, resourceType: 'Encounter')
schema.fhir_create_storage(plv8, resourceType: 'Patient')
beforeEach ->
schema.fhir_truncate_storage(plv8, resourceType: 'Encounter')
crud.fhir_create_resource(plv8, allowId: true, resource: {
id: 'patient-id', resourceType: 'Patient', name: [{given: ['PI:NAME:<NAME>END_PI']}]
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Encounter',
status: 'planned'
})
crud.fhir_create_resource(plv8, resource: {
resourceType: 'Encounter',
patient: {reference: 'Patient/patient-id'},
status: 'finished'
})
it 'by patient name', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'patient:Patient.name=PI:NAME:<NAME>END_PI'
).total,
1)
it 'by status', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'status=finished'
).total,
1)
it 'by patient name AND status should raise error', ->
assert.equal(
search.fhir_search(plv8,
resourceType: 'Encounter',
queryString: 'patient:Patient.name=PI:NAME:<NAME>END_PI&status=finished'
).total,
1)
|
[
{
"context": " \n ```matlab\n % 定义一个struct\n >> patient.name = 'John Doe';\n >> patient.billing = 127.00;\n >> patient.tes",
"end": 1231,
"score": 0.9998037815093994,
"start": 1223,
"tag": "NAME",
"value": "John Doe"
}
] | notes/7bb43dfcc558fc6eb437.cson | Shadowalker1995/Boostnote | 0 | createdAt: "2018-05-17T05:45:07.763Z"
updatedAt: "2018-05-17T05:45:09.632Z"
type: "MARKDOWN_NOTE"
folder: "e48cec40c409e6ad5488"
title: "MATLAB exist判断文件夹是否存在并建立文件夹 isfield判断struct isempty判断矩阵"
content: '''
# MATLAB exist判断文件夹是否存在并建立文件夹 isfield判断struct isempty判断矩阵
### 1. exist判断当前目录是否存在指定文件夹
- **example 1**
```matlab
if ~exist('Directtory', 'dir')
mkdir('Directory') % if not exist, make dir 'Directory' in current directory
end
```
- **example2**
判断并建立多层目录
```matlab
if ~exist('./fig/Figure', 'dir')
mkdir('./fig/Figure') % if not exist, make a series dirs in current directory
end
```
### 2. exist 还可以用于判断目录、内置函数(buildin)、文件、class和变量(var)是否存在
**Syntax**
`exist name`
`exist name kind`
`A = exist('name','kind')`
kind包括:
1. **builtin** Checks only for built-in functions.
. **class** Checks only for classes.
. **dir** Checks only for directories.
. **file** Checks only for files or directories.
. **var** Checks only for variables.
注意这里的var不能用于struct内的子field判定,field可参考下一段
### 3. isfield判断struct是否有指定子filed
```matlab
% 定义一个struct
>> patient.name = 'John Doe';
>> patient.billing = 127.00;
>> patient.test = [79 75 73; 180 178 177.5; 220 210 205];
% 检测该struct是否存在指定filed
>> isfield(patient,'billing')
ans = 1
```
### 4. isempty用于判断矩阵是否为空
```matlab
>> B = rand(2,2,2);
>> B(:,:,:) = []; % B此时为零矩阵
>> isempty(B)
ans = 1
```
'''
tags: []
isStarred: false
isTrashed: false
| 162032 | createdAt: "2018-05-17T05:45:07.763Z"
updatedAt: "2018-05-17T05:45:09.632Z"
type: "MARKDOWN_NOTE"
folder: "e48cec40c409e6ad5488"
title: "MATLAB exist判断文件夹是否存在并建立文件夹 isfield判断struct isempty判断矩阵"
content: '''
# MATLAB exist判断文件夹是否存在并建立文件夹 isfield判断struct isempty判断矩阵
### 1. exist判断当前目录是否存在指定文件夹
- **example 1**
```matlab
if ~exist('Directtory', 'dir')
mkdir('Directory') % if not exist, make dir 'Directory' in current directory
end
```
- **example2**
判断并建立多层目录
```matlab
if ~exist('./fig/Figure', 'dir')
mkdir('./fig/Figure') % if not exist, make a series dirs in current directory
end
```
### 2. exist 还可以用于判断目录、内置函数(buildin)、文件、class和变量(var)是否存在
**Syntax**
`exist name`
`exist name kind`
`A = exist('name','kind')`
kind包括:
1. **builtin** Checks only for built-in functions.
. **class** Checks only for classes.
. **dir** Checks only for directories.
. **file** Checks only for files or directories.
. **var** Checks only for variables.
注意这里的var不能用于struct内的子field判定,field可参考下一段
### 3. isfield判断struct是否有指定子filed
```matlab
% 定义一个struct
>> patient.name = '<NAME>';
>> patient.billing = 127.00;
>> patient.test = [79 75 73; 180 178 177.5; 220 210 205];
% 检测该struct是否存在指定filed
>> isfield(patient,'billing')
ans = 1
```
### 4. isempty用于判断矩阵是否为空
```matlab
>> B = rand(2,2,2);
>> B(:,:,:) = []; % B此时为零矩阵
>> isempty(B)
ans = 1
```
'''
tags: []
isStarred: false
isTrashed: false
| true | createdAt: "2018-05-17T05:45:07.763Z"
updatedAt: "2018-05-17T05:45:09.632Z"
type: "MARKDOWN_NOTE"
folder: "e48cec40c409e6ad5488"
title: "MATLAB exist判断文件夹是否存在并建立文件夹 isfield判断struct isempty判断矩阵"
content: '''
# MATLAB exist判断文件夹是否存在并建立文件夹 isfield判断struct isempty判断矩阵
### 1. exist判断当前目录是否存在指定文件夹
- **example 1**
```matlab
if ~exist('Directtory', 'dir')
mkdir('Directory') % if not exist, make dir 'Directory' in current directory
end
```
- **example2**
判断并建立多层目录
```matlab
if ~exist('./fig/Figure', 'dir')
mkdir('./fig/Figure') % if not exist, make a series dirs in current directory
end
```
### 2. exist 还可以用于判断目录、内置函数(buildin)、文件、class和变量(var)是否存在
**Syntax**
`exist name`
`exist name kind`
`A = exist('name','kind')`
kind包括:
1. **builtin** Checks only for built-in functions.
. **class** Checks only for classes.
. **dir** Checks only for directories.
. **file** Checks only for files or directories.
. **var** Checks only for variables.
注意这里的var不能用于struct内的子field判定,field可参考下一段
### 3. isfield判断struct是否有指定子filed
```matlab
% 定义一个struct
>> patient.name = 'PI:NAME:<NAME>END_PI';
>> patient.billing = 127.00;
>> patient.test = [79 75 73; 180 178 177.5; 220 210 205];
% 检测该struct是否存在指定filed
>> isfield(patient,'billing')
ans = 1
```
### 4. isempty用于判断矩阵是否为空
```matlab
>> B = rand(2,2,2);
>> B(:,:,:) = []; % B此时为零矩阵
>> isempty(B)
ans = 1
```
'''
tags: []
isStarred: false
isTrashed: false
|
[
{
"context": "ort}\"\n session = new flowdock.Session('test', 'password')\n session.on 'error', -> #noop\n\n afterEach -",
"end": 402,
"score": 0.9894372224807739,
"start": 394,
"tag": "PASSWORD",
"value": "password"
}
] | test/flowdock.test.coffee | cos-forks/node-flowdock | 17 | assert = require 'assert'
flowdock = require __dirname + '/../src/flowdock'
Mockdock = require('./helper').Mockdock
describe 'Flowdock', ->
mockdock = Mockdock.start()
session = null
beforeEach ->
process.env.FLOWDOCK_STREAM_URL = "http://localhost:#{mockdock.port}"
process.env.FLOWDOCK_API_URL = "http://localhost:#{mockdock.port}"
session = new flowdock.Session('test', 'password')
session.on 'error', -> #noop
afterEach ->
mockdock.removeAllListeners()
describe 'stream', ->
it 'can handle array parameter', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/?filter=example%3Amain%2Cexample%3Atest'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
stream = session.stream ['example:main', 'example:test']
stream.on 'connected', ->
stream.removeAllListeners()
done()
assert.deepEqual stream.flows, ['example:main', 'example:test']
it 'can handle single flow', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/?filter=example%3Amain'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
stream = session.stream 'example:main'
stream.on 'connected', ->
stream.removeAllListeners()
done()
assert.deepEqual stream.flows, ['example:main']
describe 'invitations', ->
it 'can send an invitation', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/org1/flow1/invitations'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
session.invite 'flow1', 'org1', 'test@localhost', 'test message', (err, data, result) ->
assert.equal err, null
done()
describe '_request', ->
it 'makes a sensible request', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/find?id=acdcabbacd1234567890'
res.setHeader('Content-Type', 'application/json')
res.end('{"flow":"foo"}')
session._request 'get', '/flows/find', {id: 'acdcabbacd1234567890'}, (err, data, res) ->
assert.equal err, null
assert.deepEqual data, {flow: "foo"}
done()
describe 'Session', ->
it 'should optionally take a URL', (done) ->
alt_mockdock = Mockdock.start()
alt_session = new flowdock.Session('test', 'password', "http://localhost:#{alt_mockdock.port}")
alt_mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/find?id=acdcabbacd1234567890'
res.setHeader('Content-Type', 'application/json')
res.end('{"flow":"foo"}')
alt_session._request 'get', '/flows/find', {id: 'acdcabbacd1234567890'}, (err, data, res) ->
assert.equal err, null
assert.deepEqual data, {flow: "foo"}
alt_mockdock.removeAllListeners()
done()
| 44911 | assert = require 'assert'
flowdock = require __dirname + '/../src/flowdock'
Mockdock = require('./helper').Mockdock
describe 'Flowdock', ->
mockdock = Mockdock.start()
session = null
beforeEach ->
process.env.FLOWDOCK_STREAM_URL = "http://localhost:#{mockdock.port}"
process.env.FLOWDOCK_API_URL = "http://localhost:#{mockdock.port}"
session = new flowdock.Session('test', '<PASSWORD>')
session.on 'error', -> #noop
afterEach ->
mockdock.removeAllListeners()
describe 'stream', ->
it 'can handle array parameter', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/?filter=example%3Amain%2Cexample%3Atest'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
stream = session.stream ['example:main', 'example:test']
stream.on 'connected', ->
stream.removeAllListeners()
done()
assert.deepEqual stream.flows, ['example:main', 'example:test']
it 'can handle single flow', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/?filter=example%3Amain'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
stream = session.stream 'example:main'
stream.on 'connected', ->
stream.removeAllListeners()
done()
assert.deepEqual stream.flows, ['example:main']
describe 'invitations', ->
it 'can send an invitation', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/org1/flow1/invitations'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
session.invite 'flow1', 'org1', 'test@localhost', 'test message', (err, data, result) ->
assert.equal err, null
done()
describe '_request', ->
it 'makes a sensible request', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/find?id=acdcabbacd1234567890'
res.setHeader('Content-Type', 'application/json')
res.end('{"flow":"foo"}')
session._request 'get', '/flows/find', {id: 'acdcabbacd1234567890'}, (err, data, res) ->
assert.equal err, null
assert.deepEqual data, {flow: "foo"}
done()
describe 'Session', ->
it 'should optionally take a URL', (done) ->
alt_mockdock = Mockdock.start()
alt_session = new flowdock.Session('test', 'password', "http://localhost:#{alt_mockdock.port}")
alt_mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/find?id=acdcabbacd1234567890'
res.setHeader('Content-Type', 'application/json')
res.end('{"flow":"foo"}')
alt_session._request 'get', '/flows/find', {id: 'acdcabbacd1234567890'}, (err, data, res) ->
assert.equal err, null
assert.deepEqual data, {flow: "foo"}
alt_mockdock.removeAllListeners()
done()
| true | assert = require 'assert'
flowdock = require __dirname + '/../src/flowdock'
Mockdock = require('./helper').Mockdock
describe 'Flowdock', ->
mockdock = Mockdock.start()
session = null
beforeEach ->
process.env.FLOWDOCK_STREAM_URL = "http://localhost:#{mockdock.port}"
process.env.FLOWDOCK_API_URL = "http://localhost:#{mockdock.port}"
session = new flowdock.Session('test', 'PI:PASSWORD:<PASSWORD>END_PI')
session.on 'error', -> #noop
afterEach ->
mockdock.removeAllListeners()
describe 'stream', ->
it 'can handle array parameter', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/?filter=example%3Amain%2Cexample%3Atest'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
stream = session.stream ['example:main', 'example:test']
stream.on 'connected', ->
stream.removeAllListeners()
done()
assert.deepEqual stream.flows, ['example:main', 'example:test']
it 'can handle single flow', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/?filter=example%3Amain'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
stream = session.stream 'example:main'
stream.on 'connected', ->
stream.removeAllListeners()
done()
assert.deepEqual stream.flows, ['example:main']
describe 'invitations', ->
it 'can send an invitation', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/org1/flow1/invitations'
res.setHeader('Content-Type', 'application/json')
res.end('{}')
session.invite 'flow1', 'org1', 'test@localhost', 'test message', (err, data, result) ->
assert.equal err, null
done()
describe '_request', ->
it 'makes a sensible request', (done) ->
mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/find?id=acdcabbacd1234567890'
res.setHeader('Content-Type', 'application/json')
res.end('{"flow":"foo"}')
session._request 'get', '/flows/find', {id: 'acdcabbacd1234567890'}, (err, data, res) ->
assert.equal err, null
assert.deepEqual data, {flow: "foo"}
done()
describe 'Session', ->
it 'should optionally take a URL', (done) ->
alt_mockdock = Mockdock.start()
alt_session = new flowdock.Session('test', 'password', "http://localhost:#{alt_mockdock.port}")
alt_mockdock.on 'request', (req, res) ->
assert.equal req.url, '/flows/find?id=acdcabbacd1234567890'
res.setHeader('Content-Type', 'application/json')
res.end('{"flow":"foo"}')
alt_session._request 'get', '/flows/find', {id: 'acdcabbacd1234567890'}, (err, data, res) ->
assert.equal err, null
assert.deepEqual data, {flow: "foo"}
alt_mockdock.removeAllListeners()
done()
|
[
{
"context": " it 'works for the Oauth example', ->\n key = 'kd94hf93k423kf44&pfkkdhi9sl3r4s00'\n string = 'GET&http%3A%2F%2Fphotos.example.ne",
"end": 584,
"score": 0.9996312856674194,
"start": 551,
"tag": "KEY",
"value": "kd94hf93k423kf44&pfkkdhi9sl3r4s00"
},
{
"context": "p... | test/src/fast/util/hmac_test.coffee | expo/dropbox-js | 64 | describe 'Dropbox.Util.hmac', ->
it 'works for an empty message with an empty key', ->
# Source:
# http://en.wikipedia.org/wiki/Hash-based_message_authentication_code#Examples_of_HMAC_.28MD5.2C_SHA1.2C_SHA256.29
expect(Dropbox.Util.hmac('', '')).to.equal '+9sdGxiqbAgyS31ktx+3Y3BpDh0='
it 'works for the non-empty Wikipedia example', ->
expect(Dropbox.Util.hmac(
'The quick brown fox jumps over the lazy dog', 'key')).to.
equal '3nybhbi3iqa8ino29wqQcBydtNk='
it 'works for the Oauth example', ->
key = 'kd94hf93k423kf44&pfkkdhi9sl3r4s00'
string = 'GET&http%3A%2F%2Fphotos.example.net%2Fphotos&file%3Dvacation.jpg%26oauth_consumer_key%3Ddpf43f3p2l4k3l03%26oauth_nonce%3Dkllo9940pd9333jh%26oauth_signature_method%3DHMAC-SHA1%26oauth_timestamp%3D1191242096%26oauth_token%3Dnnch734d00sl2jdk%26oauth_version%3D1.0%26size%3Doriginal'
expect(Dropbox.Util.hmac(string, key)).to.
equal 'tR3+Ty81lMeYAr/Fid0kMTYa/WM='
describe 'Dropbox.Util.sha1', ->
it 'works for an empty message', ->
expect(Dropbox.Util.sha1('')).to.equal '2jmj7l5rSw0yVb/vlWAYkK/YBwk='
it 'works for the FIPS-180 Appendix A sample 1', ->
expect(Dropbox.Util.sha1('abc')).to.equal 'qZk+NkcGgWq6PiVxeFDCbJzQ2J0='
it 'works for the FIPS-180 Appendix A sample 2', ->
string = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
expect(Dropbox.Util.sha1(string)).to.equal 'hJg+RBw70m66rkqh+VEp5eVGcPE='
describe 'Dropbox.Util.sha256', ->
it 'works for an empty message', ->
expect(Dropbox.Util.sha256('')).to.equal(
'47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=')
it 'works for the FIPS-180 Appendix A sample 1', ->
expect(Dropbox.Util.sha256('abc')).to.equal(
'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=')
it 'works for the FIPS-180 Appendix A sample 2', ->
string = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
expect(Dropbox.Util.sha256(string)).to.equal(
'JI1qYdIGOLjlwCaTDD5gOaM85Flk/yFn9uzt1BnbBsE=')
it 'works for the FIPS-180 Appendix A additional sample 8', ->
string = (new Array(1001)).join 'A'
expect(Dropbox.Util.sha256(string)).to.equal(
'wuaGgjSJztIBf2BZuLI5MYtjZPbc2DXQpRkQWh6t1uQ=')
it 'works for the FIPS-180 Appendix A additional sample 9', ->
string = (new Array(1006)).join 'U'
expect(Dropbox.Util.sha256(string)).to.equal(
'9NYt3sDz3ZDqE4D6FqX/jcTFSyF0BlDySvxBIJA1UrA=')
| 15461 | describe 'Dropbox.Util.hmac', ->
it 'works for an empty message with an empty key', ->
# Source:
# http://en.wikipedia.org/wiki/Hash-based_message_authentication_code#Examples_of_HMAC_.28MD5.2C_SHA1.2C_SHA256.29
expect(Dropbox.Util.hmac('', '')).to.equal '+9sdGxiqbAgyS31ktx+3Y3BpDh0='
it 'works for the non-empty Wikipedia example', ->
expect(Dropbox.Util.hmac(
'The quick brown fox jumps over the lazy dog', 'key')).to.
equal '3nybhbi3iqa8ino29wqQcBydtNk='
it 'works for the Oauth example', ->
key = '<KEY>'
string = 'GET&http%3A%2F%2Fphotos.example.net%2Fphotos&file%3Dvacation.jpg%26oauth_consumer_key%3D<KEY>%26oauth_nonce%3Dkllo9940pd9333jh%26oauth_signature_method%3DHMAC-SHA1%26oauth_timestamp%3D1191242096%26oauth_token%3D<PASSWORD>%26oauth_version%3D1.0%26size%3Doriginal'
expect(Dropbox.Util.hmac(string, key)).to.
equal 'tR3+Ty81lMeYAr/Fid0kMTYa/WM='
describe 'Dropbox.Util.sha1', ->
it 'works for an empty message', ->
expect(Dropbox.Util.sha1('')).to.equal '2jmj7l5rSw0yVb/vlWAYkK/YBwk='
it 'works for the FIPS-180 Appendix A sample 1', ->
expect(Dropbox.Util.sha1('abc')).to.equal 'qZk+NkcGgWq6PiVxeFDCbJzQ2J0='
it 'works for the FIPS-180 Appendix A sample 2', ->
string = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
expect(Dropbox.Util.sha1(string)).to.equal 'hJg+RBw70m66rkqh+VEp5eVGcPE='
describe 'Dropbox.Util.sha256', ->
it 'works for an empty message', ->
expect(Dropbox.Util.sha256('')).to.equal(
'47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=')
it 'works for the FIPS-180 Appendix A sample 1', ->
expect(Dropbox.Util.sha256('abc')).to.equal(
'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=')
it 'works for the FIPS-180 Appendix A sample 2', ->
string = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
expect(Dropbox.Util.sha256(string)).to.equal(
'JI1qYdIGOLjlwCaTDD5gOaM85Flk/yFn9uzt1BnbBsE=')
it 'works for the FIPS-180 Appendix A additional sample 8', ->
string = (new Array(1001)).join 'A'
expect(Dropbox.Util.sha256(string)).to.equal(
'wuaGgjSJztIBf2BZuLI5MYtjZPbc2DXQpRkQWh6t1uQ=')
it 'works for the FIPS-180 Appendix A additional sample 9', ->
string = (new Array(1006)).join 'U'
expect(Dropbox.Util.sha256(string)).to.equal(
'9NYt3sDz3ZDqE4D6FqX/jcTFSy<KEY>=')
| true | describe 'Dropbox.Util.hmac', ->
it 'works for an empty message with an empty key', ->
# Source:
# http://en.wikipedia.org/wiki/Hash-based_message_authentication_code#Examples_of_HMAC_.28MD5.2C_SHA1.2C_SHA256.29
expect(Dropbox.Util.hmac('', '')).to.equal '+9sdGxiqbAgyS31ktx+3Y3BpDh0='
it 'works for the non-empty Wikipedia example', ->
expect(Dropbox.Util.hmac(
'The quick brown fox jumps over the lazy dog', 'key')).to.
equal '3nybhbi3iqa8ino29wqQcBydtNk='
it 'works for the Oauth example', ->
key = 'PI:KEY:<KEY>END_PI'
string = 'GET&http%3A%2F%2Fphotos.example.net%2Fphotos&file%3Dvacation.jpg%26oauth_consumer_key%3DPI:KEY:<KEY>END_PI%26oauth_nonce%3Dkllo9940pd9333jh%26oauth_signature_method%3DHMAC-SHA1%26oauth_timestamp%3D1191242096%26oauth_token%3DPI:PASSWORD:<PASSWORD>END_PI%26oauth_version%3D1.0%26size%3Doriginal'
expect(Dropbox.Util.hmac(string, key)).to.
equal 'tR3+Ty81lMeYAr/Fid0kMTYa/WM='
describe 'Dropbox.Util.sha1', ->
it 'works for an empty message', ->
expect(Dropbox.Util.sha1('')).to.equal '2jmj7l5rSw0yVb/vlWAYkK/YBwk='
it 'works for the FIPS-180 Appendix A sample 1', ->
expect(Dropbox.Util.sha1('abc')).to.equal 'qZk+NkcGgWq6PiVxeFDCbJzQ2J0='
it 'works for the FIPS-180 Appendix A sample 2', ->
string = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
expect(Dropbox.Util.sha1(string)).to.equal 'hJg+RBw70m66rkqh+VEp5eVGcPE='
describe 'Dropbox.Util.sha256', ->
it 'works for an empty message', ->
expect(Dropbox.Util.sha256('')).to.equal(
'47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=')
it 'works for the FIPS-180 Appendix A sample 1', ->
expect(Dropbox.Util.sha256('abc')).to.equal(
'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=')
it 'works for the FIPS-180 Appendix A sample 2', ->
string = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
expect(Dropbox.Util.sha256(string)).to.equal(
'JI1qYdIGOLjlwCaTDD5gOaM85Flk/yFn9uzt1BnbBsE=')
it 'works for the FIPS-180 Appendix A additional sample 8', ->
string = (new Array(1001)).join 'A'
expect(Dropbox.Util.sha256(string)).to.equal(
'wuaGgjSJztIBf2BZuLI5MYtjZPbc2DXQpRkQWh6t1uQ=')
it 'works for the FIPS-180 Appendix A additional sample 9', ->
string = (new Array(1006)).join 'U'
expect(Dropbox.Util.sha256(string)).to.equal(
'9NYt3sDz3ZDqE4D6FqX/jcTFSyPI:KEY:<KEY>END_PI=')
|
[
{
"context": "rizontal: false\n\n\n#Change titles here\n(titles = [\"Bobby Drake\", \"Scott Summers\",\"Jean Grey\", \"Warren Worthingto",
"end": 331,
"score": 0.9998940825462341,
"start": 320,
"tag": "NAME",
"value": "Bobby Drake"
},
{
"context": "\n\n\n#Change titles here\n(titles ... | Android-Components/Lists/LeaveBehind.coffee | iamkeeler/UXTOOLTIME-Framer | 4 | plugin.run = (contents, options) ->
"""
#{contents}
# <fold>
#List_LeaveBehind
# Create the draggable layer
scrollpanel = new ScrollComponent
width: Screen.width
height: Screen.height - 80
x: Align.center
y: Align.bottom
backgroundColor: "#F2F2F2"
scrollHorizontal: false
#Change titles here
(titles = ["Bobby Drake", "Scott Summers","Jean Grey", "Warren Worthington III", "Hank McCoy", "Kurt Wagner", "Lorna Dane", "Kevin Sydney", "Armando Muñoz"])
#Change subbelow text here
(subbelow = ["123 Oak Street","345 Elm Street","456 Spruce Street","567 Pine Avenue",
"678 Birch Street","789 Sycamore Road","123 Maple Court","345 Willow Court","678 Cedar Street"])
#Change subright text here
(subright = ["2", "10", "3", "1", "0", "0", "12", "5", "4"])
Items = []
for i in [0...8]
LineItem = new Layer
height: 72
width: Screen.width
backgroundColor: "#fff"
y: i *72
#y: Align.top 80
x: Align.center
z: 1
name: [i]
parent: scrollpanel.content
Title = new TextLayer
text: titles[i]
fontFamily: "Roboto"
fontSize: 16
fontWeight: 400
color: "#2F2F2F"
x: 16
y: 16
parent: LineItem
SubBelow = new TextLayer
text: subbelow[i]
fontFamily: "Roboto"
fontSize: 14
fontWeight: 400
color: "#000000"
opacity: 0.54
x: 16
y: 38
lineHeight: 1.0
parent: LineItem
#Uncomment to show SubRight
# SubRight = new TextLayer
# text: subright[i]
# fontFamily: "Roboto"
# fontSize: 14
# fontWeight: 500
# color: "#626161"
# y: Align.center
# x: Align.right
# padding: right: 20
# parent: LineItem
Divider = new Layer
width: Screen.width
height: 1
y: Align.bottom
x: Align.right
parent: LineItem
backgroundColor: "rgba(0,0,0,0.12)"
hotspot = new Layer
x: Align.right
y: Align.center
height: LineItem.height
width: LineItem.width-70
backgroundColor: ""
name: [i]
parent: LineItem
# Make the layer draggable
LineItem.draggable.enabled = true
LineItem.draggable.vertical = false
# Set the constraints frame
LineItem.draggable.constraints = {
x: 0
y: i *72
width: 160
height: 80
}
for i in [0...8]
#Left BG
LeaveBehind_left = new Layer
y: i *72
height: 72
width: 200
x: Align.left
parent: scrollpanel.content
backgroundColor: "rgba(237,237,237,1)"
#Left Icon
Icon_Left = new Layer
size: 24
x: Align.left 24
y: Align.center
parent: LeaveBehind_left
#Right BG
LeaveBehind_right = new Layer
y: i *72
height: 72
width: 200
x: Align.right
parent: scrollpanel.content
backgroundColor: "rgba(237,237,237,1)"
#Right Icon
Icon_Right = new Layer
size: 24
x: Align.right -24
y: Align.center
parent: LeaveBehind_right
# </fold>
"""
| 60819 | plugin.run = (contents, options) ->
"""
#{contents}
# <fold>
#List_LeaveBehind
# Create the draggable layer
scrollpanel = new ScrollComponent
width: Screen.width
height: Screen.height - 80
x: Align.center
y: Align.bottom
backgroundColor: "#F2F2F2"
scrollHorizontal: false
#Change titles here
(titles = ["<NAME>", "<NAME>","<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "<NAME>"])
#Change subbelow text here
(subbelow = ["123 Oak Street","345 Elm Street","456 Spruce Street","567 Pine Avenue",
"678 Birch Street","789 Sycamore Road","123 Maple Court","345 Willow Court","678 Cedar Street"])
#Change subright text here
(subright = ["2", "10", "3", "1", "0", "0", "12", "5", "4"])
Items = []
for i in [0...8]
LineItem = new Layer
height: 72
width: Screen.width
backgroundColor: "#fff"
y: i *72
#y: Align.top 80
x: Align.center
z: 1
name: [i]
parent: scrollpanel.content
Title = new TextLayer
text: titles[i]
fontFamily: "Roboto"
fontSize: 16
fontWeight: 400
color: "#2F2F2F"
x: 16
y: 16
parent: LineItem
SubBelow = new TextLayer
text: subbelow[i]
fontFamily: "Roboto"
fontSize: 14
fontWeight: 400
color: "#000000"
opacity: 0.54
x: 16
y: 38
lineHeight: 1.0
parent: LineItem
#Uncomment to show SubRight
# SubRight = new TextLayer
# text: subright[i]
# fontFamily: "Roboto"
# fontSize: 14
# fontWeight: 500
# color: "#626161"
# y: Align.center
# x: Align.right
# padding: right: 20
# parent: LineItem
Divider = new Layer
width: Screen.width
height: 1
y: Align.bottom
x: Align.right
parent: LineItem
backgroundColor: "rgba(0,0,0,0.12)"
hotspot = new Layer
x: Align.right
y: Align.center
height: LineItem.height
width: LineItem.width-70
backgroundColor: ""
name: [i]
parent: LineItem
# Make the layer draggable
LineItem.draggable.enabled = true
LineItem.draggable.vertical = false
# Set the constraints frame
LineItem.draggable.constraints = {
x: 0
y: i *72
width: 160
height: 80
}
for i in [0...8]
#Left BG
LeaveBehind_left = new Layer
y: i *72
height: 72
width: 200
x: Align.left
parent: scrollpanel.content
backgroundColor: "rgba(237,237,237,1)"
#Left Icon
Icon_Left = new Layer
size: 24
x: Align.left 24
y: Align.center
parent: LeaveBehind_left
#Right BG
LeaveBehind_right = new Layer
y: i *72
height: 72
width: 200
x: Align.right
parent: scrollpanel.content
backgroundColor: "rgba(237,237,237,1)"
#Right Icon
Icon_Right = new Layer
size: 24
x: Align.right -24
y: Align.center
parent: LeaveBehind_right
# </fold>
"""
| true | plugin.run = (contents, options) ->
"""
#{contents}
# <fold>
#List_LeaveBehind
# Create the draggable layer
scrollpanel = new ScrollComponent
width: Screen.width
height: Screen.height - 80
x: Align.center
y: Align.bottom
backgroundColor: "#F2F2F2"
scrollHorizontal: false
#Change titles here
(titles = ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"])
#Change subbelow text here
(subbelow = ["123 Oak Street","345 Elm Street","456 Spruce Street","567 Pine Avenue",
"678 Birch Street","789 Sycamore Road","123 Maple Court","345 Willow Court","678 Cedar Street"])
#Change subright text here
(subright = ["2", "10", "3", "1", "0", "0", "12", "5", "4"])
Items = []
for i in [0...8]
LineItem = new Layer
height: 72
width: Screen.width
backgroundColor: "#fff"
y: i *72
#y: Align.top 80
x: Align.center
z: 1
name: [i]
parent: scrollpanel.content
Title = new TextLayer
text: titles[i]
fontFamily: "Roboto"
fontSize: 16
fontWeight: 400
color: "#2F2F2F"
x: 16
y: 16
parent: LineItem
SubBelow = new TextLayer
text: subbelow[i]
fontFamily: "Roboto"
fontSize: 14
fontWeight: 400
color: "#000000"
opacity: 0.54
x: 16
y: 38
lineHeight: 1.0
parent: LineItem
#Uncomment to show SubRight
# SubRight = new TextLayer
# text: subright[i]
# fontFamily: "Roboto"
# fontSize: 14
# fontWeight: 500
# color: "#626161"
# y: Align.center
# x: Align.right
# padding: right: 20
# parent: LineItem
Divider = new Layer
width: Screen.width
height: 1
y: Align.bottom
x: Align.right
parent: LineItem
backgroundColor: "rgba(0,0,0,0.12)"
hotspot = new Layer
x: Align.right
y: Align.center
height: LineItem.height
width: LineItem.width-70
backgroundColor: ""
name: [i]
parent: LineItem
# Make the layer draggable
LineItem.draggable.enabled = true
LineItem.draggable.vertical = false
# Set the constraints frame
LineItem.draggable.constraints = {
x: 0
y: i *72
width: 160
height: 80
}
for i in [0...8]
#Left BG
LeaveBehind_left = new Layer
y: i *72
height: 72
width: 200
x: Align.left
parent: scrollpanel.content
backgroundColor: "rgba(237,237,237,1)"
#Left Icon
Icon_Left = new Layer
size: 24
x: Align.left 24
y: Align.center
parent: LeaveBehind_left
#Right BG
LeaveBehind_right = new Layer
y: i *72
height: 72
width: 200
x: Align.right
parent: scrollpanel.content
backgroundColor: "rgba(237,237,237,1)"
#Right Icon
Icon_Right = new Layer
size: 24
x: Align.right -24
y: Align.center
parent: LeaveBehind_right
# </fold>
"""
|
[
{
"context": "loyee: {companyId: 5, position: 'Manager', name: 'Elin'}\n# trigger numEmployees〳Employee -> = 10\n# trigg",
"end": 1834,
"score": 0.9714105129241943,
"start": 1830,
"tag": "NAME",
"value": "Elin"
}
] | syntax-test.coffee | Cottin/popsiql | 0 | [ːamount, ːsex, ːID_Int_Seq, ːtext, ːid〳COUNT, ːnumConnections, ːh1, ːh3, ːnumCompanies, ːage, ːcount, ːname, ːdate, ːid, ːStr, ːtype] = ['amount', 'sex', 'ID_Int_Seq', 'text', 'id〳COUNT', 'numConnections', 'h1', 'h3', 'numCompanies', 'age', 'count', 'name', 'date', 'id', 'Str', 'type'] #auto_sugar
aggregations:
Company:
numEmployees:
1:
empty:
id〳COUNT: 3
2:
empty:
id〳COUNT: 2
managers:
1:
a:
query: {salary: {lt: 10000}}
id〳COUNT: 4
aggregations:
Company:
numEmployees:
1:
{ref: 1, fields: {id: {value: 3, op: 'count'}}}
2:
{ref: 2, fields: {id: {value: 2, op: 'count'}}}
managers:
1:
{ref: 3, fields: {id: {value: 2, op: 'count'}}}
numRichManagers:
1:
{ref: 4, fields: {id: {value: 2, op: 'count'}}}
refs:
1: 'Company/numEmployees/1'
2: 'Company/numEmployees/2'
Employee: [
{query: {companyId: 1, salary: {lt: 10000}}, refs: [1]}
]
Project: [
[
{price: {gt: 50000}}
{roles〳name: 'Lead developer'}}
{Person: }
]
]
Person: _ {name: {ilike: 'el%'}, ːage}
1,2,3,4
Role: _ {personId: {in: [1,2,3,4]}, name: 'Lead developer'},
1,4,6 = 3,5,7
Project: _ {id: {in: [3,5,7]}, price: {gt: 50000}, ːname},
3,5 = 5,7
Company: _ {id: {in: [5,7]}},
5,7
numEmployees〳Employee: _ {companyId: {in: [5,7]}, ːid〳COUNT}
= 9
numManagers〳Employee: _ {companyId: {in: [5,7]}, position: 'Manager'}
= 3
femaleEmployees〳Employee: _ {person〳sex: 'F'}
U Project: {id: 3, price: 40000}
# trigger Project -> 5 = 7 -> trigger Company -> trigger aggr
U Project: {id: 2, price: 60000}
# trigger inget
U Project: {id: 7, price: 60000}
# trigger Project -> 3,5,7 = 5,7,8 -> trigger Company -> trigger aggr
C Employee: {companyId: 5, position: 'Manager', name: 'Elin'}
# trigger numEmployees〳Employee -> = 10
# trigger numManagers〳Employee -> = 4
query =
Person: _ {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries: _ {ːid, ːamount, ːdate, ːtext},
task: _ {ːname}
project: _ {ːname}
roles: _ {name: 'Guest'},
project: _ {ːid, ːname, ːtype}
query =
Person: _ {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries: _ {ːid, ːamount, ːdate, ːtext},
task: ːname
project: _ {ːname}
roles: _ {name: 'Guest'},
project: _ {ːid, ːname, ːtype}
# Möjlig lösning men mindre kräver import-plugin
Person {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries {ːid, ːamount, ːdate, ːtext},
task {ːname}
project {ːname}
roles {name: 'Guest'},
project {id, name}
ConnectionType:
id: ːID_Int_Seq
name: ːStr
companies: {oneToMany〳: 'ConnectionType.id = Company.connectionTypeId'}
connections: {oneToMany〳: 'ConnectionType.id = Connection.connectionTypeId'}
numCompanies: companies: _ {ːid〳COUNT}
numConnections: connections: _ {ːid〳COUNT}
ConnectionType:
D: ({id}) ->
{numCompanies, numConnections} = R ConnectionType1: _ {id, ːnumCompanies, ːnumConnections}
if numCompanies != 0 then throw new VErr 'Kan inte ta bort för den används av företag'
if numConnections != 0 then throw new VErr 'Kan inte ta bort för den används av anslutningar'
comp = ->
data = Cache.readPE
cts: ConnectionType: _ {ːid, ːname, ːcount}
caps: Capacities: _ {ːid, ːname, ːcount}
sls: ServiceLevel: _ {ːid, ːname, ːcount}
_ Page, {},
_ Section, {},
_ ːh1, {}, 'Anslutningstyp, Kapacitet, Servicenivå'
_ renderEdit
_ {s: 'xrb__1'},
_ renderList, {data: cts, title: 'Anslutningstyp'}
_ renderList, {data: caps, title: 'Capacitet'}
_ renderList, {data: sls, title: 'Servicenivå'}
DetailsPanel = ->
{path2: type, path3, path4: isEdit} = Url() #
_ Edit.PE, {type, id: path3}, (id, isNew) -> # exponerar o genom context (alternative type och id)
if isEdit || isNew
_ {},
_ Edit.Textbox, {f: ːname} # får o genom context
_ Link.Button, {url: isNew && '//' || {path4: undefined}}, 'Avbryt'
_ Button.Flat, {onClick: Cache.commit(type, id)}, 'Spara'
else
_ {},
_ Edit.Label {f: ːname}
_ Link.Button, {url: '//'}, 'Avbryt'
_ Button.Flat, onClick: ->
if await Confirm.show('Är du säker?')
Cache.deletePE(type, id)
, 'Avbryt'
renderList = ({data, title}) ->
_ {s: 'xg1 mr40-last'}, # -last = not last, +last = only last
_ ːh3, title
_ List.Line
fmap data, ({id, name, count}) ->
_ List.Row, {key: id}
_ {}, name
_ {}, "#{count} st"
# o = Cache.edit {type, id}
# _ Confirm, {f: (yesNo) ->
# if !yesNo then return
# await Cache.delete(type, id)
# Url.change '//'
# }, (onClick) ->
# _ Button.Flat, {onClick}, 'Avbryt'
| 174945 | [ːamount, ːsex, ːID_Int_Seq, ːtext, ːid〳COUNT, ːnumConnections, ːh1, ːh3, ːnumCompanies, ːage, ːcount, ːname, ːdate, ːid, ːStr, ːtype] = ['amount', 'sex', 'ID_Int_Seq', 'text', 'id〳COUNT', 'numConnections', 'h1', 'h3', 'numCompanies', 'age', 'count', 'name', 'date', 'id', 'Str', 'type'] #auto_sugar
aggregations:
Company:
numEmployees:
1:
empty:
id〳COUNT: 3
2:
empty:
id〳COUNT: 2
managers:
1:
a:
query: {salary: {lt: 10000}}
id〳COUNT: 4
aggregations:
Company:
numEmployees:
1:
{ref: 1, fields: {id: {value: 3, op: 'count'}}}
2:
{ref: 2, fields: {id: {value: 2, op: 'count'}}}
managers:
1:
{ref: 3, fields: {id: {value: 2, op: 'count'}}}
numRichManagers:
1:
{ref: 4, fields: {id: {value: 2, op: 'count'}}}
refs:
1: 'Company/numEmployees/1'
2: 'Company/numEmployees/2'
Employee: [
{query: {companyId: 1, salary: {lt: 10000}}, refs: [1]}
]
Project: [
[
{price: {gt: 50000}}
{roles〳name: 'Lead developer'}}
{Person: }
]
]
Person: _ {name: {ilike: 'el%'}, ːage}
1,2,3,4
Role: _ {personId: {in: [1,2,3,4]}, name: 'Lead developer'},
1,4,6 = 3,5,7
Project: _ {id: {in: [3,5,7]}, price: {gt: 50000}, ːname},
3,5 = 5,7
Company: _ {id: {in: [5,7]}},
5,7
numEmployees〳Employee: _ {companyId: {in: [5,7]}, ːid〳COUNT}
= 9
numManagers〳Employee: _ {companyId: {in: [5,7]}, position: 'Manager'}
= 3
femaleEmployees〳Employee: _ {person〳sex: 'F'}
U Project: {id: 3, price: 40000}
# trigger Project -> 5 = 7 -> trigger Company -> trigger aggr
U Project: {id: 2, price: 60000}
# trigger inget
U Project: {id: 7, price: 60000}
# trigger Project -> 3,5,7 = 5,7,8 -> trigger Company -> trigger aggr
C Employee: {companyId: 5, position: 'Manager', name: '<NAME>'}
# trigger numEmployees〳Employee -> = 10
# trigger numManagers〳Employee -> = 4
query =
Person: _ {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries: _ {ːid, ːamount, ːdate, ːtext},
task: _ {ːname}
project: _ {ːname}
roles: _ {name: 'Guest'},
project: _ {ːid, ːname, ːtype}
query =
Person: _ {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries: _ {ːid, ːamount, ːdate, ːtext},
task: ːname
project: _ {ːname}
roles: _ {name: 'Guest'},
project: _ {ːid, ːname, ːtype}
# Möjlig lösning men mindre kräver import-plugin
Person {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries {ːid, ːamount, ːdate, ːtext},
task {ːname}
project {ːname}
roles {name: 'Guest'},
project {id, name}
ConnectionType:
id: ːID_Int_Seq
name: ːStr
companies: {oneToMany〳: 'ConnectionType.id = Company.connectionTypeId'}
connections: {oneToMany〳: 'ConnectionType.id = Connection.connectionTypeId'}
numCompanies: companies: _ {ːid〳COUNT}
numConnections: connections: _ {ːid〳COUNT}
ConnectionType:
D: ({id}) ->
{numCompanies, numConnections} = R ConnectionType1: _ {id, ːnumCompanies, ːnumConnections}
if numCompanies != 0 then throw new VErr 'Kan inte ta bort för den används av företag'
if numConnections != 0 then throw new VErr 'Kan inte ta bort för den används av anslutningar'
comp = ->
data = Cache.readPE
cts: ConnectionType: _ {ːid, ːname, ːcount}
caps: Capacities: _ {ːid, ːname, ːcount}
sls: ServiceLevel: _ {ːid, ːname, ːcount}
_ Page, {},
_ Section, {},
_ ːh1, {}, 'Anslutningstyp, Kapacitet, Servicenivå'
_ renderEdit
_ {s: 'xrb__1'},
_ renderList, {data: cts, title: 'Anslutningstyp'}
_ renderList, {data: caps, title: 'Capacitet'}
_ renderList, {data: sls, title: 'Servicenivå'}
DetailsPanel = ->
{path2: type, path3, path4: isEdit} = Url() #
_ Edit.PE, {type, id: path3}, (id, isNew) -> # exponerar o genom context (alternative type och id)
if isEdit || isNew
_ {},
_ Edit.Textbox, {f: ːname} # får o genom context
_ Link.Button, {url: isNew && '//' || {path4: undefined}}, 'Avbryt'
_ Button.Flat, {onClick: Cache.commit(type, id)}, 'Spara'
else
_ {},
_ Edit.Label {f: ːname}
_ Link.Button, {url: '//'}, 'Avbryt'
_ Button.Flat, onClick: ->
if await Confirm.show('Är du säker?')
Cache.deletePE(type, id)
, 'Avbryt'
renderList = ({data, title}) ->
_ {s: 'xg1 mr40-last'}, # -last = not last, +last = only last
_ ːh3, title
_ List.Line
fmap data, ({id, name, count}) ->
_ List.Row, {key: id}
_ {}, name
_ {}, "#{count} st"
# o = Cache.edit {type, id}
# _ Confirm, {f: (yesNo) ->
# if !yesNo then return
# await Cache.delete(type, id)
# Url.change '//'
# }, (onClick) ->
# _ Button.Flat, {onClick}, 'Avbryt'
| true | [ːamount, ːsex, ːID_Int_Seq, ːtext, ːid〳COUNT, ːnumConnections, ːh1, ːh3, ːnumCompanies, ːage, ːcount, ːname, ːdate, ːid, ːStr, ːtype] = ['amount', 'sex', 'ID_Int_Seq', 'text', 'id〳COUNT', 'numConnections', 'h1', 'h3', 'numCompanies', 'age', 'count', 'name', 'date', 'id', 'Str', 'type'] #auto_sugar
aggregations:
Company:
numEmployees:
1:
empty:
id〳COUNT: 3
2:
empty:
id〳COUNT: 2
managers:
1:
a:
query: {salary: {lt: 10000}}
id〳COUNT: 4
aggregations:
Company:
numEmployees:
1:
{ref: 1, fields: {id: {value: 3, op: 'count'}}}
2:
{ref: 2, fields: {id: {value: 2, op: 'count'}}}
managers:
1:
{ref: 3, fields: {id: {value: 2, op: 'count'}}}
numRichManagers:
1:
{ref: 4, fields: {id: {value: 2, op: 'count'}}}
refs:
1: 'Company/numEmployees/1'
2: 'Company/numEmployees/2'
Employee: [
{query: {companyId: 1, salary: {lt: 10000}}, refs: [1]}
]
Project: [
[
{price: {gt: 50000}}
{roles〳name: 'Lead developer'}}
{Person: }
]
]
Person: _ {name: {ilike: 'el%'}, ːage}
1,2,3,4
Role: _ {personId: {in: [1,2,3,4]}, name: 'Lead developer'},
1,4,6 = 3,5,7
Project: _ {id: {in: [3,5,7]}, price: {gt: 50000}, ːname},
3,5 = 5,7
Company: _ {id: {in: [5,7]}},
5,7
numEmployees〳Employee: _ {companyId: {in: [5,7]}, ːid〳COUNT}
= 9
numManagers〳Employee: _ {companyId: {in: [5,7]}, position: 'Manager'}
= 3
femaleEmployees〳Employee: _ {person〳sex: 'F'}
U Project: {id: 3, price: 40000}
# trigger Project -> 5 = 7 -> trigger Company -> trigger aggr
U Project: {id: 2, price: 60000}
# trigger inget
U Project: {id: 7, price: 60000}
# trigger Project -> 3,5,7 = 5,7,8 -> trigger Company -> trigger aggr
C Employee: {companyId: 5, position: 'Manager', name: 'PI:NAME:<NAME>END_PI'}
# trigger numEmployees〳Employee -> = 10
# trigger numManagers〳Employee -> = 4
query =
Person: _ {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries: _ {ːid, ːamount, ːdate, ːtext},
task: _ {ːname}
project: _ {ːname}
roles: _ {name: 'Guest'},
project: _ {ːid, ːname, ːtype}
query =
Person: _ {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries: _ {ːid, ːamount, ːdate, ːtext},
task: ːname
project: _ {ːname}
roles: _ {name: 'Guest'},
project: _ {ːid, ːname, ːtype}
# Möjlig lösning men mindre kräver import-plugin
Person {id: {gte: 3, lte: 4}, ːname, ːsex, ːage},
entries {ːid, ːamount, ːdate, ːtext},
task {ːname}
project {ːname}
roles {name: 'Guest'},
project {id, name}
ConnectionType:
id: ːID_Int_Seq
name: ːStr
companies: {oneToMany〳: 'ConnectionType.id = Company.connectionTypeId'}
connections: {oneToMany〳: 'ConnectionType.id = Connection.connectionTypeId'}
numCompanies: companies: _ {ːid〳COUNT}
numConnections: connections: _ {ːid〳COUNT}
ConnectionType:
D: ({id}) ->
{numCompanies, numConnections} = R ConnectionType1: _ {id, ːnumCompanies, ːnumConnections}
if numCompanies != 0 then throw new VErr 'Kan inte ta bort för den används av företag'
if numConnections != 0 then throw new VErr 'Kan inte ta bort för den används av anslutningar'
comp = ->
data = Cache.readPE
cts: ConnectionType: _ {ːid, ːname, ːcount}
caps: Capacities: _ {ːid, ːname, ːcount}
sls: ServiceLevel: _ {ːid, ːname, ːcount}
_ Page, {},
_ Section, {},
_ ːh1, {}, 'Anslutningstyp, Kapacitet, Servicenivå'
_ renderEdit
_ {s: 'xrb__1'},
_ renderList, {data: cts, title: 'Anslutningstyp'}
_ renderList, {data: caps, title: 'Capacitet'}
_ renderList, {data: sls, title: 'Servicenivå'}
DetailsPanel = ->
{path2: type, path3, path4: isEdit} = Url() #
_ Edit.PE, {type, id: path3}, (id, isNew) -> # exponerar o genom context (alternative type och id)
if isEdit || isNew
_ {},
_ Edit.Textbox, {f: ːname} # får o genom context
_ Link.Button, {url: isNew && '//' || {path4: undefined}}, 'Avbryt'
_ Button.Flat, {onClick: Cache.commit(type, id)}, 'Spara'
else
_ {},
_ Edit.Label {f: ːname}
_ Link.Button, {url: '//'}, 'Avbryt'
_ Button.Flat, onClick: ->
if await Confirm.show('Är du säker?')
Cache.deletePE(type, id)
, 'Avbryt'
renderList = ({data, title}) ->
_ {s: 'xg1 mr40-last'}, # -last = not last, +last = only last
_ ːh3, title
_ List.Line
fmap data, ({id, name, count}) ->
_ List.Row, {key: id}
_ {}, name
_ {}, "#{count} st"
# o = Cache.edit {type, id}
# _ Confirm, {f: (yesNo) ->
# if !yesNo then return
# await Cache.delete(type, id)
# Url.change '//'
# }, (onClick) ->
# _ Button.Flat, {onClick}, 'Avbryt'
|
[
{
"context": "class Rogue\n key: 'rogue'\n name: 'rogue'\n\n genders: ['male', 'female']\n ",
"end": 25,
"score": 0.9451420307159424,
"start": 20,
"tag": "USERNAME",
"value": "rogue"
},
{
"context": "class Rogue\n key: 'rogue'\n name: 'rogue'\n\n genders: ['male', 'female']\n ... | js/classes/rogue.coffee | ktchernov/7drl-lion.github.io | 27 | class Rogue
key: 'rogue'
name: 'rogue'
genders: ['male', 'female']
alignments: ['neutral', 'evil']
races: ['human', 'werewolf', 'demon', 'catfolk', 'gnome', 'vampire', 'dwarf', 'hobbit']
base_hp: -5
base_mp: -10
base_speed: 75
base_attack: 0
base_sight_range: 0
skills: [
'stealth'
'double_strike'
'apply_poison'
'counterattack'
'evade'
'distract'
'fan_of_knives'
'shadowstep'
'steal'
'preparation'
]
register_class Rogue
| 201950 | class Rogue
key: 'rogue'
name: '<NAME>'
genders: ['male', 'female']
alignments: ['neutral', 'evil']
races: ['human', 'werewolf', 'demon', 'catfolk', 'gnome', 'vampire', 'dwarf', 'hobbit']
base_hp: -5
base_mp: -10
base_speed: 75
base_attack: 0
base_sight_range: 0
skills: [
'stealth'
'double_strike'
'apply_poison'
'counterattack'
'evade'
'distract'
'fan_of_knives'
'shadowstep'
'steal'
'preparation'
]
register_class Rogue
| true | class Rogue
key: 'rogue'
name: 'PI:NAME:<NAME>END_PI'
genders: ['male', 'female']
alignments: ['neutral', 'evil']
races: ['human', 'werewolf', 'demon', 'catfolk', 'gnome', 'vampire', 'dwarf', 'hobbit']
base_hp: -5
base_mp: -10
base_speed: 75
base_attack: 0
base_sight_range: 0
skills: [
'stealth'
'double_strike'
'apply_poison'
'counterattack'
'evade'
'distract'
'fan_of_knives'
'shadowstep'
'steal'
'preparation'
]
register_class Rogue
|
[
{
"context": " {\n url: 'http://demo.redmine.org'\n id: 'RedmineTimeTracker'\n pass: 'RedmineTimeTracker'\n }\n\n beforeEach",
"end": 188,
"score": 0.5986804962158203,
"start": 170,
"tag": "USERNAME",
"value": "RedmineTimeTracker"
},
{
"context": "ne.org'\n id: 'Re... | test/redmine_test.coffee | Chanshi712/RedmineTimeTracker | 73 | expect = chai.expect
describe 'redmine.coffee', ->
Redmine = null
TestData = null
$httpBackend = null
_auth = {
url: 'http://demo.redmine.org'
id: 'RedmineTimeTracker'
pass: 'RedmineTimeTracker'
}
beforeEach () ->
angular.mock.module('timeTracker')
# initialize object
inject (_$httpBackend_, _Redmine_, _TestData_) ->
Redmine = _Redmine_
TestData = _TestData_()
$httpBackend = _$httpBackend_
afterEach () ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it 'should have working Redmine service', () ->
expect(Redmine.get).not.to.equal null
###
test for get(auth)
###
describe 'get(auth)', ->
it '1', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: 'test_pass'
}
expect(Redmine.get(auth)).exists
it '2', () ->
auth1 = {
url: 'http://github.com1'
id: 'test_id'
pass: 'test_pass'
}
auth2 = {
url: 'http://github.com2'
id: 'test_id'
pass: 'test_pass'
}
expect(Redmine.get(auth1)).exists
expect(Redmine.get(auth2)).exists
###
test for remove(auth)
###
describe 'remove(auth)', ->
it 'remove account', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: 'test_pass'
}
expect(Redmine.get(auth)).exists
Redmine.remove(auth)
expect(Redmine.get(auth)).not.exists
###
test for findUser(success, error)
###
describe 'findUser(success, error)', ->
it 'should load user', (done) ->
$httpBackend
.expectGET(_auth.url + '/users/current.json?include=memberships')
.respond(TestData.user)
success = (data) ->
expect(data.user).to.exist
done()
error = () ->
expect(false).to.be.true
done()
Redmine.get(_auth).findUser(success, error)
$httpBackend.flush()
###
test for loadQueries(params)
###
describe 'loadQueries(params)', ->
it 'should load queries', (done) ->
$httpBackend
.expectGET(_auth.url + '/queries.json?limit=25&page=0')
.respond(TestData.queries)
Redmine.get(_auth).loadQueries(page: 0, limit: 25).then(
(data) -> expect(data.queries).to.exist; done()
, () -> expect(false).to.be.true; done())
$httpBackend.flush()
###
test for loadTimeEntries(params)
###
describe 'loadTimeEntries(params)', ->
it 'should load time entries', (done) ->
$httpBackend
.expectGET(_auth.url + '/time_entries.json?limit=100')
.respond(TestData.time_entries)
Redmine.get(_auth).loadTimeEntries().then(
(data) -> expect(data.time_entries).to.exist; done()
, () -> expect(false).to.be.true; done())
$httpBackend.flush()
| 49944 | expect = chai.expect
describe 'redmine.coffee', ->
Redmine = null
TestData = null
$httpBackend = null
_auth = {
url: 'http://demo.redmine.org'
id: 'RedmineTimeTracker'
pass: '<PASSWORD>'
}
beforeEach () ->
angular.mock.module('timeTracker')
# initialize object
inject (_$httpBackend_, _Redmine_, _TestData_) ->
Redmine = _Redmine_
TestData = _TestData_()
$httpBackend = _$httpBackend_
afterEach () ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it 'should have working Redmine service', () ->
expect(Redmine.get).not.to.equal null
###
test for get(auth)
###
describe 'get(auth)', ->
it '1', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: '<PASSWORD>'
}
expect(Redmine.get(auth)).exists
it '2', () ->
auth1 = {
url: 'http://github.com1'
id: 'test_id'
pass: '<PASSWORD>'
}
auth2 = {
url: 'http://github.com2'
id: 'test_id'
pass: '<PASSWORD>'
}
expect(Redmine.get(auth1)).exists
expect(Redmine.get(auth2)).exists
###
test for remove(auth)
###
describe 'remove(auth)', ->
it 'remove account', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: '<PASSWORD>'
}
expect(Redmine.get(auth)).exists
Redmine.remove(auth)
expect(Redmine.get(auth)).not.exists
###
test for findUser(success, error)
###
describe 'findUser(success, error)', ->
it 'should load user', (done) ->
$httpBackend
.expectGET(_auth.url + '/users/current.json?include=memberships')
.respond(TestData.user)
success = (data) ->
expect(data.user).to.exist
done()
error = () ->
expect(false).to.be.true
done()
Redmine.get(_auth).findUser(success, error)
$httpBackend.flush()
###
test for loadQueries(params)
###
describe 'loadQueries(params)', ->
it 'should load queries', (done) ->
$httpBackend
.expectGET(_auth.url + '/queries.json?limit=25&page=0')
.respond(TestData.queries)
Redmine.get(_auth).loadQueries(page: 0, limit: 25).then(
(data) -> expect(data.queries).to.exist; done()
, () -> expect(false).to.be.true; done())
$httpBackend.flush()
###
test for loadTimeEntries(params)
###
describe 'loadTimeEntries(params)', ->
it 'should load time entries', (done) ->
$httpBackend
.expectGET(_auth.url + '/time_entries.json?limit=100')
.respond(TestData.time_entries)
Redmine.get(_auth).loadTimeEntries().then(
(data) -> expect(data.time_entries).to.exist; done()
, () -> expect(false).to.be.true; done())
$httpBackend.flush()
| true | expect = chai.expect
describe 'redmine.coffee', ->
Redmine = null
TestData = null
$httpBackend = null
_auth = {
url: 'http://demo.redmine.org'
id: 'RedmineTimeTracker'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
beforeEach () ->
angular.mock.module('timeTracker')
# initialize object
inject (_$httpBackend_, _Redmine_, _TestData_) ->
Redmine = _Redmine_
TestData = _TestData_()
$httpBackend = _$httpBackend_
afterEach () ->
$httpBackend.verifyNoOutstandingExpectation()
$httpBackend.verifyNoOutstandingRequest()
it 'should have working Redmine service', () ->
expect(Redmine.get).not.to.equal null
###
test for get(auth)
###
describe 'get(auth)', ->
it '1', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
expect(Redmine.get(auth)).exists
it '2', () ->
auth1 = {
url: 'http://github.com1'
id: 'test_id'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
auth2 = {
url: 'http://github.com2'
id: 'test_id'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
expect(Redmine.get(auth1)).exists
expect(Redmine.get(auth2)).exists
###
test for remove(auth)
###
describe 'remove(auth)', ->
it 'remove account', () ->
auth = {
url: 'http://github.com'
id: 'test_id'
pass: 'PI:PASSWORD:<PASSWORD>END_PI'
}
expect(Redmine.get(auth)).exists
Redmine.remove(auth)
expect(Redmine.get(auth)).not.exists
###
test for findUser(success, error)
###
describe 'findUser(success, error)', ->
it 'should load user', (done) ->
$httpBackend
.expectGET(_auth.url + '/users/current.json?include=memberships')
.respond(TestData.user)
success = (data) ->
expect(data.user).to.exist
done()
error = () ->
expect(false).to.be.true
done()
Redmine.get(_auth).findUser(success, error)
$httpBackend.flush()
###
test for loadQueries(params)
###
describe 'loadQueries(params)', ->
it 'should load queries', (done) ->
$httpBackend
.expectGET(_auth.url + '/queries.json?limit=25&page=0')
.respond(TestData.queries)
Redmine.get(_auth).loadQueries(page: 0, limit: 25).then(
(data) -> expect(data.queries).to.exist; done()
, () -> expect(false).to.be.true; done())
$httpBackend.flush()
###
test for loadTimeEntries(params)
###
describe 'loadTimeEntries(params)', ->
it 'should load time entries', (done) ->
$httpBackend
.expectGET(_auth.url + '/time_entries.json?limit=100')
.respond(TestData.time_entries)
Redmine.get(_auth).loadTimeEntries().then(
(data) -> expect(data.time_entries).to.exist; done()
, () -> expect(false).to.be.true; done())
$httpBackend.flush()
|
[
{
"context": "onsole.log \"終バス通知:#{today}\"\n to = toList[0] # \"minakusa\"\n dayIndex = getDayOfWeek(today)\n envelope ",
"end": 2422,
"score": 0.875831127166748,
"start": 2414,
"tag": "USERNAME",
"value": "minakusa"
},
{
"context": " room: config.cron_post_room\n try\... | src/ritsbus.coffee | programmerMOT/hubot-ritsbus | 3 | # Description:
# 立命館大学に関連する近江鉄道バスの時刻表通知
#
# Commands:
# hubot bus <n分後> <S|P|か|笠|西> - to南草津駅from立命館
# hubot kbus <n分後> - to草津駅from立命館
# hubot rbus <n分後> <S|P|か|笠|西> - to立命館from草津駅
#
Buffer = require('buffer').Buffer
cron = require('cron').CronJob
request = require('request')
cheerio = require('cheerio')
iconv = require('iconv')
PublicHoliday = require('japanese-public-holiday')
SHOW_MAX_BUS = 7
viaShuttle = ["S", "直", "shuttle", "シャトル","直行"]
viaPanaEast = ["P", "パナ東"]
viaKagayaki = ["か", "かがやき"]
viaKasayama = ["笠", "笠山"]
viaPanaWest = ["西", "パナ西"]
viaRitsumei = ["立"]
viaList = [viaShuttle, viaPanaEast, viaKagayaki, viaKasayama, viaPanaWest, viaRitsumei]
toList = ["minakusa", "kusatsu", "ritsumei"]
allDay = ["ordinary", "saturday", "holiday"]
allDayName = ["平日", "土曜日", "日曜・祝日"]
urlToMinakusa = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2"]
urlToKusatsu = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1"]
urlToRitsumei = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1"]
urls = {
"minakusa":
urlToMinakusa
"kusatsu":
urlToKusatsu
"ritsumei":
urlToRitsumei
}
config =
cron_post_room: process.env.HUBOT_RITSBUS_DEFAULT_ROOM
module.exports = (robot) ->
# 毎日午前4時に時刻表データ自動更新
new cron('1 4 * * *', () ->
now = new Date
console.log "自動更新:#{now}"
envelope =
room: config.cron_post_room
try
for day, index in allDay
for to in toList
brainBusSchedule(to, day, urls[to][index], robot)
catch error
console.log error
robot.send envelope, error.toString() if envelope.room?
).start()
# 毎日19時に終バス通知
new cron('1 19 * * *', () ->
today = new Date
console.log "終バス通知:#{today}"
to = toList[0] # "minakusa"
dayIndex = getDayOfWeek(today)
envelope =
room: config.cron_post_room
try
key = "#{to}_#{allDay[dayIndex]}_last"
console.log "本日の終バスです\n#{robot.brain.data[key]}"
robot.send envelope, robot.brain.data[key] if envelope.room?
catch error
console.log error
robot.send envelope, error.toString() if envelope.room?
).start()
# 立命館から南草津行き
robot.respond /(bus|mbus|バス)(.*)/i, (msg) ->
now = new Date
to = toList[0] # "minakusa"
toName = "南草津駅"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合7分後で検索)
searchDate = getSearchDate(now, options, 7)
# バスの経由地判定
viaBusStop = getViaBusStop(options)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, viaBusStop, searchDate, robot)
msg.reply replyMessage
# 立命館から草津行き
robot.respond /(kbus)(.*)/i, (msg) ->
now = new Date
to = toList[1] # "kusatsu"
toName = "草津駅"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合7分後で検索)
searchDate = getSearchDate(now, options, 7)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, "", searchDate, robot)
msg.reply replyMessage
# 南草津から立命館行き
robot.respond /(rbus)(.*)/i, (msg) ->
now = new Date
to = toList[2] # "ritsumei"
toName = "立命館大学"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合0分後で検索)
searchDate = getSearchDate(now, options, 0)
# バスの経由地判定
viaBusStop = getViaBusStop(options)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, viaBusStop, searchDate, robot)
msg.reply replyMessage
# 終バス
robot.respond /(last bus|終バス)(.*)/i, (msg) ->
today = new Date
to = toList[0] # "minakusa"
toName = "南草津駅"
dayIndex = getDayOfWeek(today)
key = "#{to}_#{allDay[dayIndex]}_last"
replyMessage = "\n#{toName}行き最後のバス) \n"
replyMessage += robot.brain.data[key]
msg.reply replyMessage
# コマンドから全てのバスの時刻表を取得
robot.respond /update/i, (msg) ->
now = new Date
for day, index in allDay
for to in toList
brainBusSchedule(to, day, urls[to][index], robot)
# 時刻表のbodyを取得する
brainBusSchedule = (to, day, url, robot) ->
options =
url: url
timeout: 50000
headers: {'user-agent': 'node title fetcher'}
encoding: 'binary'
request options, (error, response, body) ->
conv = new iconv.Iconv('CP932', 'UTF-8//TRANSLIT//IGNORE')
body = new Buffer(body, 'binary');
body = conv.convert(body).toString();
busSchedule = parseBody(to, day, body)
beforeValue = null
for key, value of busSchedule
robot.brain.data[key] = value
if value == null and beforeValue != null
lastKey = "#{to}_#{day}_last"
time = beforeKey.match(/\d{2}/g)
robot.brain.data[lastKey] = "#{time}時: #{beforeValue}"
beforeKey = key
beforeValue = value
robot.brain.save()
# 時刻表のbodyからデータを加工し,hubotに記憶させる
parseBody = (to, day, body) ->
busSchedule = {}
$ = cheerio.load(body)
$('tr').each ->
time = parseInt($(this).children('td').eq(0).find('b').text(), 10)
#lastBus = {S: null, P: null, C: null}
if time in [5..24]
bus = $(this).children('td').eq(1).find('a').text()
bus = bus.match(/[P|か|笠|西|立]?\d{2}/g)
key = "#{to}_#{day}_time#{time}"
busSchedule[key] = bus
return busSchedule
# コマンドのオプションから検索するバスの時間を返す
getSearchDate = (date, options, extensionMinutes) ->
searchDate = new Date(date.getTime() + extensionMinutes*60*1000)
for opt in options
if extensionMinutes = opt.match(/^\d+$/)
min = parseInt(extensionMinutes, 10)
searchDate = new Date(date.getTime() + min*60*1000) if min <= 120
if hhmm = opt.match(/\d+:\d+/)
time = hhmm.toString().split(":")
hour = parseInt(time[0], 10)
minutes = parseInt(time[1], 10)
hour = 24 if hour > 24
minutes = 59 if minutes > 59
searchDate.setHours(hour)
searchDate.setMinutes(minutes)
searchDate.setSeconds(0)
return searchDate
# 経由地判定
getViaBusStop = (options) ->
viaBusStop = ""
for opt in options
for via in viaList
viaBusStop = via[0] if opt in via
return viaBusStop
# バスの一覧文字列を返す
getBusList = (to, viaBusStop, searchDate, robot) ->
dayIndex = getDayOfWeek(searchDate)
hour = searchDate.getHours()
min = searchDate.getMinutes()
if hour in [0..4]
hour = 5
min = 0
busCounter = 0
busHour = hour
busList = ""
# 3時間以内にあるバスを7件まで次のバスとして表示する
while busCounter < SHOW_MAX_BUS and hour+3 > busHour
nextBus = []
key = "#{to}_#{allDay[dayIndex]}_time#{busHour}"
while robot.brain.data[key] is null and busHour <= 24
busHour++
key = "#{to}_#{allDay[dayIndex]}_time#{busHour}"
if busHour > 24
busList += "最後のバスです"
break
for value, index in robot.brain.data[key]
parseTime = parseInt(value.match(/\d{2}/))
# シャトルバスの場合の判定
if not parseBus = value.match(/\D/)
parseBus = viaShuttle[0]
# 現在の時刻より後のバスをnextBusに追加
if (busHour > hour and ///#{viaBusStop}///.test(parseBus)) or (parseTime > min and ///#{viaBusStop}///.test(parseBus))
nextBus.push(value)
busCounter++
break if busCounter >= SHOW_MAX_BUS
busList += "#{busHour}時:#{nextBus.join()}\n"
busHour++
return busList
# 曜日の要素取得
getDayOfWeek = (now) ->
dayIndex = 0
if PublicHoliday.isPublicHoliday(now) or now.getDay() is 0
dayIndex = 2
else if now.getDay() is 6
dayIndex = 1
return dayIndex
| 42842 | # Description:
# 立命館大学に関連する近江鉄道バスの時刻表通知
#
# Commands:
# hubot bus <n分後> <S|P|か|笠|西> - to南草津駅from立命館
# hubot kbus <n分後> - to草津駅from立命館
# hubot rbus <n分後> <S|P|か|笠|西> - to立命館from草津駅
#
Buffer = require('buffer').Buffer
cron = require('cron').CronJob
request = require('request')
cheerio = require('cheerio')
iconv = require('iconv')
PublicHoliday = require('japanese-public-holiday')
SHOW_MAX_BUS = 7
viaShuttle = ["S", "直", "shuttle", "シャトル","直行"]
viaPanaEast = ["P", "パナ東"]
viaKagayaki = ["か", "かがやき"]
viaKasayama = ["笠", "笠山"]
viaPanaWest = ["西", "パナ西"]
viaRitsumei = ["立"]
viaList = [viaShuttle, viaPanaEast, viaKagayaki, viaKasayama, viaPanaWest, viaRitsumei]
toList = ["minakusa", "kusatsu", "ritsumei"]
allDay = ["ordinary", "saturday", "holiday"]
allDayName = ["平日", "土曜日", "日曜・祝日"]
urlToMinakusa = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2"]
urlToKusatsu = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1"]
urlToRitsumei = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1"]
urls = {
"minakusa":
urlToMinakusa
"kusatsu":
urlToKusatsu
"ritsumei":
urlToRitsumei
}
config =
cron_post_room: process.env.HUBOT_RITSBUS_DEFAULT_ROOM
module.exports = (robot) ->
# 毎日午前4時に時刻表データ自動更新
new cron('1 4 * * *', () ->
now = new Date
console.log "自動更新:#{now}"
envelope =
room: config.cron_post_room
try
for day, index in allDay
for to in toList
brainBusSchedule(to, day, urls[to][index], robot)
catch error
console.log error
robot.send envelope, error.toString() if envelope.room?
).start()
# 毎日19時に終バス通知
new cron('1 19 * * *', () ->
today = new Date
console.log "終バス通知:#{today}"
to = toList[0] # "minakusa"
dayIndex = getDayOfWeek(today)
envelope =
room: config.cron_post_room
try
key = <KEY>[<KEY>Index]}_<KEY>"
console.log "本日の終バスです\n#{robot.brain.data[key]}"
robot.send envelope, robot.brain.data[key] if envelope.room?
catch error
console.log error
robot.send envelope, error.toString() if envelope.room?
).start()
# 立命館から南草津行き
robot.respond /(bus|mbus|バス)(.*)/i, (msg) ->
now = new Date
to = toList[0] # "minakusa"
toName = "南草津駅"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合7分後で検索)
searchDate = getSearchDate(now, options, 7)
# バスの経由地判定
viaBusStop = getViaBusStop(options)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, viaBusStop, searchDate, robot)
msg.reply replyMessage
# 立命館から草津行き
robot.respond /(kbus)(.*)/i, (msg) ->
now = new Date
to = toList[1] # "kusatsu"
toName = "<NAME>"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合7分後で検索)
searchDate = getSearchDate(now, options, 7)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, "", searchDate, robot)
msg.reply replyMessage
# 南草津から立命館行き
robot.respond /(rbus)(.*)/i, (msg) ->
now = new Date
to = toList[2] # "ritsumei"
toName = "立命館大学"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合0分後で検索)
searchDate = getSearchDate(now, options, 0)
# バスの経由地判定
viaBusStop = getViaBusStop(options)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, viaBusStop, searchDate, robot)
msg.reply replyMessage
# 終バス
robot.respond /(last bus|終バス)(.*)/i, (msg) ->
today = new Date
to = toList[0] # "minakusa"
toName = "<NAME>"
dayIndex = getDayOfWeek(today)
key = <KEY>all<KEY>[dayIndex]}_<KEY>"
replyMessage = "\n#{toName}行き最後のバス) \n"
replyMessage += robot.brain.data[key]
msg.reply replyMessage
# コマンドから全てのバスの時刻表を取得
robot.respond /update/i, (msg) ->
now = new Date
for day, index in allDay
for to in toList
brainBusSchedule(to, day, urls[to][index], robot)
# 時刻表のbodyを取得する
brainBusSchedule = (to, day, url, robot) ->
options =
url: url
timeout: 50000
headers: {'user-agent': 'node title fetcher'}
encoding: 'binary'
request options, (error, response, body) ->
conv = new iconv.Iconv('CP932', 'UTF-8//TRANSLIT//IGNORE')
body = new Buffer(body, 'binary');
body = conv.convert(body).toString();
busSchedule = parseBody(to, day, body)
beforeValue = null
for key, value of busSchedule
robot.brain.data[key] = value
if value == null and beforeValue != null
lastKey = <KEY>
time = beforeKey.match(/\<KEY>{2}/g)
robot.brain.data[lastKey] = "#{time}時: #{beforeValue}"
beforeKey = key
beforeValue = value
robot.brain.save()
# 時刻表のbodyからデータを加工し,hubotに記憶させる
parseBody = (to, day, body) ->
busSchedule = {}
$ = cheerio.load(body)
$('tr').each ->
time = parseInt($(this).children('td').eq(0).find('b').text(), 10)
#lastBus = {S: null, P: null, C: null}
if time in [5..24]
bus = $(this).children('td').eq(1).find('a').text()
bus = bus.match(/[P|か|笠|西|立]?\d{2}/g)
key = <KEY>
busSchedule[key] = bus
return busSchedule
# コマンドのオプションから検索するバスの時間を返す
getSearchDate = (date, options, extensionMinutes) ->
searchDate = new Date(date.getTime() + extensionMinutes*60*1000)
for opt in options
if extensionMinutes = opt.match(/^\d+$/)
min = parseInt(extensionMinutes, 10)
searchDate = new Date(date.getTime() + min*60*1000) if min <= 120
if hhmm = opt.match(/\d+:\d+/)
time = hhmm.toString().split(":")
hour = parseInt(time[0], 10)
minutes = parseInt(time[1], 10)
hour = 24 if hour > 24
minutes = 59 if minutes > 59
searchDate.setHours(hour)
searchDate.setMinutes(minutes)
searchDate.setSeconds(0)
return searchDate
# 経由地判定
getViaBusStop = (options) ->
viaBusStop = ""
for opt in options
for via in viaList
viaBusStop = via[0] if opt in via
return viaBusStop
# バスの一覧文字列を返す
getBusList = (to, viaBusStop, searchDate, robot) ->
dayIndex = getDayOfWeek(searchDate)
hour = searchDate.getHours()
min = searchDate.getMinutes()
if hour in [0..4]
hour = 5
min = 0
busCounter = 0
busHour = hour
busList = ""
# 3時間以内にあるバスを7件まで次のバスとして表示する
while busCounter < SHOW_MAX_BUS and hour+3 > busHour
nextBus = []
key = <KEY>all<KEY>
while robot.brain.data[key] is null and busHour <= 24
busHour++
key = <KEY>
if busHour > 24
busList += "最後のバスです"
break
for value, index in robot.brain.data[key]
parseTime = parseInt(value.match(/\d{2}/))
# シャトルバスの場合の判定
if not parseBus = value.match(/\D/)
parseBus = viaShuttle[0]
# 現在の時刻より後のバスをnextBusに追加
if (busHour > hour and ///#{viaBusStop}///.test(parseBus)) or (parseTime > min and ///#{viaBusStop}///.test(parseBus))
nextBus.push(value)
busCounter++
break if busCounter >= SHOW_MAX_BUS
busList += "#{busHour}時:#{nextBus.join()}\n"
busHour++
return busList
# 曜日の要素取得
getDayOfWeek = (now) ->
dayIndex = 0
if PublicHoliday.isPublicHoliday(now) or now.getDay() is 0
dayIndex = 2
else if now.getDay() is 6
dayIndex = 1
return dayIndex
| true | # Description:
# 立命館大学に関連する近江鉄道バスの時刻表通知
#
# Commands:
# hubot bus <n分後> <S|P|か|笠|西> - to南草津駅from立命館
# hubot kbus <n分後> - to草津駅from立命館
# hubot rbus <n分後> <S|P|か|笠|西> - to立命館from草津駅
#
Buffer = require('buffer').Buffer
cron = require('cron').CronJob
request = require('request')
cheerio = require('cheerio')
iconv = require('iconv')
PublicHoliday = require('japanese-public-holiday')
SHOW_MAX_BUS = 7
viaShuttle = ["S", "直", "shuttle", "シャトル","直行"]
viaPanaEast = ["P", "パナ東"]
viaKagayaki = ["か", "かがやき"]
viaKasayama = ["笠", "笠山"]
viaPanaWest = ["西", "パナ西"]
viaRitsumei = ["立"]
viaList = [viaShuttle, viaPanaEast, viaKagayaki, viaKasayama, viaPanaWest, viaRitsumei]
toList = ["minakusa", "kusatsu", "ritsumei"]
allDay = ["ordinary", "saturday", "holiday"]
allDayName = ["平日", "土曜日", "日曜・祝日"]
urlToMinakusa = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1050&KaiKbn=NOW&pole=2"]
urlToKusatsu = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1050&KaiKbn=NOW&pole=1"]
urlToRitsumei = ["http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=1&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=2&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1", "http://time.khobho.co.jp/ohmi_bus/tim_dsp.asp?projCd=3&eigCd=7&teicd=1250&KaiKbn=NOW&pole=1"]
urls = {
"minakusa":
urlToMinakusa
"kusatsu":
urlToKusatsu
"ritsumei":
urlToRitsumei
}
config =
cron_post_room: process.env.HUBOT_RITSBUS_DEFAULT_ROOM
module.exports = (robot) ->
# 毎日午前4時に時刻表データ自動更新
new cron('1 4 * * *', () ->
now = new Date
console.log "自動更新:#{now}"
envelope =
room: config.cron_post_room
try
for day, index in allDay
for to in toList
brainBusSchedule(to, day, urls[to][index], robot)
catch error
console.log error
robot.send envelope, error.toString() if envelope.room?
).start()
# 毎日19時に終バス通知
new cron('1 19 * * *', () ->
today = new Date
console.log "終バス通知:#{today}"
to = toList[0] # "minakusa"
dayIndex = getDayOfWeek(today)
envelope =
room: config.cron_post_room
try
key = PI:KEY:<KEY>END_PI[PI:KEY:<KEY>END_PIIndex]}_PI:KEY:<KEY>END_PI"
console.log "本日の終バスです\n#{robot.brain.data[key]}"
robot.send envelope, robot.brain.data[key] if envelope.room?
catch error
console.log error
robot.send envelope, error.toString() if envelope.room?
).start()
# 立命館から南草津行き
robot.respond /(bus|mbus|バス)(.*)/i, (msg) ->
now = new Date
to = toList[0] # "minakusa"
toName = "南草津駅"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合7分後で検索)
searchDate = getSearchDate(now, options, 7)
# バスの経由地判定
viaBusStop = getViaBusStop(options)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, viaBusStop, searchDate, robot)
msg.reply replyMessage
# 立命館から草津行き
robot.respond /(kbus)(.*)/i, (msg) ->
now = new Date
to = toList[1] # "kusatsu"
toName = "PI:NAME:<NAME>END_PI"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合7分後で検索)
searchDate = getSearchDate(now, options, 7)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, "", searchDate, robot)
msg.reply replyMessage
# 南草津から立命館行き
robot.respond /(rbus)(.*)/i, (msg) ->
now = new Date
to = toList[2] # "ritsumei"
toName = "立命館大学"
options = msg.match[2].replace(/^\s+/,"").split(/\s/)
# バスを検索する時間を指定(デフォルトの場合0分後で検索)
searchDate = getSearchDate(now, options, 0)
# バスの経由地判定
viaBusStop = getViaBusStop(options)
replyMessage = "\n#{toName}行き(#{searchDate.getHours()}:#{searchDate.getMinutes()}以降のバス) \n"
replyMessage += getBusList(to, viaBusStop, searchDate, robot)
msg.reply replyMessage
# 終バス
robot.respond /(last bus|終バス)(.*)/i, (msg) ->
today = new Date
to = toList[0] # "minakusa"
toName = "PI:NAME:<NAME>END_PI"
dayIndex = getDayOfWeek(today)
key = PI:KEY:<KEY>END_PIallPI:KEY:<KEY>END_PI[dayIndex]}_PI:KEY:<KEY>END_PI"
replyMessage = "\n#{toName}行き最後のバス) \n"
replyMessage += robot.brain.data[key]
msg.reply replyMessage
# コマンドから全てのバスの時刻表を取得
robot.respond /update/i, (msg) ->
now = new Date
for day, index in allDay
for to in toList
brainBusSchedule(to, day, urls[to][index], robot)
# 時刻表のbodyを取得する
brainBusSchedule = (to, day, url, robot) ->
options =
url: url
timeout: 50000
headers: {'user-agent': 'node title fetcher'}
encoding: 'binary'
request options, (error, response, body) ->
conv = new iconv.Iconv('CP932', 'UTF-8//TRANSLIT//IGNORE')
body = new Buffer(body, 'binary');
body = conv.convert(body).toString();
busSchedule = parseBody(to, day, body)
beforeValue = null
for key, value of busSchedule
robot.brain.data[key] = value
if value == null and beforeValue != null
lastKey = PI:KEY:<KEY>END_PI
time = beforeKey.match(/\PI:KEY:<KEY>END_PI{2}/g)
robot.brain.data[lastKey] = "#{time}時: #{beforeValue}"
beforeKey = key
beforeValue = value
robot.brain.save()
# 時刻表のbodyからデータを加工し,hubotに記憶させる
parseBody = (to, day, body) ->
busSchedule = {}
$ = cheerio.load(body)
$('tr').each ->
time = parseInt($(this).children('td').eq(0).find('b').text(), 10)
#lastBus = {S: null, P: null, C: null}
if time in [5..24]
bus = $(this).children('td').eq(1).find('a').text()
bus = bus.match(/[P|か|笠|西|立]?\d{2}/g)
key = PI:KEY:<KEY>END_PI
busSchedule[key] = bus
return busSchedule
# コマンドのオプションから検索するバスの時間を返す
getSearchDate = (date, options, extensionMinutes) ->
searchDate = new Date(date.getTime() + extensionMinutes*60*1000)
for opt in options
if extensionMinutes = opt.match(/^\d+$/)
min = parseInt(extensionMinutes, 10)
searchDate = new Date(date.getTime() + min*60*1000) if min <= 120
if hhmm = opt.match(/\d+:\d+/)
time = hhmm.toString().split(":")
hour = parseInt(time[0], 10)
minutes = parseInt(time[1], 10)
hour = 24 if hour > 24
minutes = 59 if minutes > 59
searchDate.setHours(hour)
searchDate.setMinutes(minutes)
searchDate.setSeconds(0)
return searchDate
# 経由地判定
getViaBusStop = (options) ->
viaBusStop = ""
for opt in options
for via in viaList
viaBusStop = via[0] if opt in via
return viaBusStop
# バスの一覧文字列を返す
getBusList = (to, viaBusStop, searchDate, robot) ->
dayIndex = getDayOfWeek(searchDate)
hour = searchDate.getHours()
min = searchDate.getMinutes()
if hour in [0..4]
hour = 5
min = 0
busCounter = 0
busHour = hour
busList = ""
# 3時間以内にあるバスを7件まで次のバスとして表示する
while busCounter < SHOW_MAX_BUS and hour+3 > busHour
nextBus = []
key = PI:KEY:<KEY>END_PIallPI:KEY:<KEY>END_PI
while robot.brain.data[key] is null and busHour <= 24
busHour++
key = PI:KEY:<KEY>END_PI
if busHour > 24
busList += "最後のバスです"
break
for value, index in robot.brain.data[key]
parseTime = parseInt(value.match(/\d{2}/))
# シャトルバスの場合の判定
if not parseBus = value.match(/\D/)
parseBus = viaShuttle[0]
# 現在の時刻より後のバスをnextBusに追加
if (busHour > hour and ///#{viaBusStop}///.test(parseBus)) or (parseTime > min and ///#{viaBusStop}///.test(parseBus))
nextBus.push(value)
busCounter++
break if busCounter >= SHOW_MAX_BUS
busList += "#{busHour}時:#{nextBus.join()}\n"
busHour++
return busList
# 曜日の要素取得
getDayOfWeek = (now) ->
dayIndex = 0
if PublicHoliday.isPublicHoliday(now) or now.getDay() is 0
dayIndex = 2
else if now.getDay() is 6
dayIndex = 1
return dayIndex
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.